diff --git a/.dir-locals.el b/.dir-locals.el new file mode 100644 index 000000000..ce4f7f218 --- /dev/null +++ b/.dir-locals.el @@ -0,0 +1 @@ +((nix-mode . ((apheleia-formatter . alejandra)))) diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 000000000..b8d1c9f09 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,16 @@ +root = true + +[*] +charset = utf-8 +end_of_line = lf +indent_style = tab +indent_size = 4 +insert_final_newline = true +trim_trailing_whitespace = true + +[*.md] +trim_trailing_whitespace = false +insert_final_newline = false + +[*.yml] +indent_style = space diff --git a/.envrc b/.envrc new file mode 100644 index 000000000..3550a30f2 --- /dev/null +++ b/.envrc @@ -0,0 +1 @@ +use flake diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 000000000..4e337a382 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,36 @@ +name: "Rulewerk Tests" +on: + pull_request: + branches: + - main + - master + push: + branches: + - main + - master +jobs: + unit-tests: + name: "Rulewerk Unit Tests" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: DeterminateSystems/nix-installer-action@v12 + - uses: DeterminateSystems/magic-nix-cache-action@v7 + - env: + BRANCH_NAME_OR_REF: ${{ github.head_ref || github.ref }} + run: echo "name=BRANCH_NAME::${BRANCH_NAME_OR_REF#refs/heads/}" >> $GITHUB_ENV + - run: echo "name=PR_NUMBER::$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")" >> $GITHUB_ENV + - run: nix-shell --pure --run "mvn clean test jacoco:report && mvn coveralls:report -D repoToken=${{ secrets.COVERALLS_TOKEN }} -D serviceBuildUrl=https://github.com/${{ github.repository }}/commit/${{ github.sha }}/checks -D branch=$BRANCH_NAME -D pullRequest=$PR_NUMBER" + integration-tests: + name: "Rulewerk Integration Tests" + needs: unit-tests + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: DeterminateSystems/nix-installer-action@v12 + - uses: DeterminateSystems/magic-nix-cache-action@v7 + - env: + BRANCH_NAME_OR_REF: ${{ github.head_ref || github.ref }} + run: echo "name=BRANCH_NAME::${BRANCH_NAME_OR_REF#refs/heads/}" >> $GITHUB_ENV + - run: echo "name=PR_NUMBER::$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")" >> $GITHUB_ENV + - run: nix-shell --pure --run "mvn clean verify -Dit.test=!org.semanticweb.rulewerk.integrationtests.vlogissues.*IT -DfailIfNoTests=false" diff --git a/.github/workflows/update-mvn2nix-lock.yaml b/.github/workflows/update-mvn2nix-lock.yaml new file mode 100644 index 000000000..27626f101 --- /dev/null +++ b/.github/workflows/update-mvn2nix-lock.yaml @@ -0,0 +1,23 @@ +name: "Update mvn2nix-lock.json" +on: + push: + paths: + - "**/pom.xml" + +jobs: + update-lock: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + - uses: DeterminateSystems/nix-installer-action@v12 + - uses: DeterminateSystems/magic-nix-cache-action@v7 + - name: Update mvn2nix-lock + run: "nix run .#mvn2nix" + + - name: Create Pull Request + uses: peter-evans/create-pull-request@v6.0.5 + with: + commit-message: Update mvn2nix-lock.json + title: Update mvn2nix-lock.json + branch: update-mvn2nix-lock diff --git a/.github/workflows/vlog-tests.yml b/.github/workflows/vlog-tests.yml new file mode 100644 index 000000000..91a730352 --- /dev/null +++ b/.github/workflows/vlog-tests.yml @@ -0,0 +1,23 @@ +name: "Rulewerk VLog Integration Tests" +on: + pull_request: + branches: + - main + - master + push: + branches: + - main + - master +jobs: + vlog-integration-tests: + name: "VLog Integration Tests" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: DeterminateSystems/nix-installer-action@v12 + - uses: DeterminateSystems/magic-nix-cache-action@v7 + - env: + BRANCH_NAME_OR_REF: ${{ github.head_ref || github.ref }} + run: echo "name=BRANCH_NAME::${BRANCH_NAME_OR_REF#refs/heads/}" >> $GITHUB_ENV + - run: echo "name=PR_NUMBER::$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")" >> $GITHUB_ENV + - run: nix-shell --pure --run "mvn clean verify -Dit.test=org.semanticweb.rulewerk.integrationtests.vlogissues.*IT -DfailIfNoTests=false" diff --git a/.gitignore b/.gitignore index 65533d6e9..9252042ca 100644 --- a/.gitignore +++ b/.gitignore @@ -29,10 +29,15 @@ nbactions.xml out/ target/ +# local jvlog +/rulewerk-vlog/lib/ +/build-vlog/ +/local_builds/ + # Don't apply the above to src/ where Java requires # subdirectories named according to package names. # We do not want to forbid things like "dumpfiles" in -# package names. +# package names. !src/ # Use as directory for local testing code @@ -46,3 +51,15 @@ target/ *.tmp .DS_Store Thumbs.db + +# Output of tests and examples +*.log +rulewerk-core/src/test/data/output/* +rulewerk-examples/src/main/data/output/* +rulewerk-examples/src/main/data/logs/* +rulewerk-rdf/src/main/data/output/* +rulewerk-vlog/src/test/data/output/* +/build-vlog/vlog/ +/TAGS +/vlog/result* +/.direnv/ diff --git a/.travis.yml b/.travis.yml index f89be3cb2..3259c75de 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,30 +1,40 @@ -language: java -jdk: - - openjdk8 -# - oraclejdk8 -# - oraclejdk9 - -addons: - apt: - sources: - - ubuntu-toolchain-r-test - packages: - - gcc-5 - - g++-5 - -before_install: - - sudo apt-get install gcc-5 -y - # - eval “CC=gcc-5 && CXX=g++-5” - ## Uncomment line below to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar - # - sh ./build-vlog-library.sh - -after_success: - - mvn clean cobertura:cobertura coveralls:cobertura - -dist: trusty -sudo: false - -cache: - directories: - - ./local_builds - - $HOME/.m2 +language: java +os: linux +jobs: + include: + - os: linux + dist: bionic + jdk: openjdk11 + after_success: + - mvn clean test jacoco:report coveralls:report + + - os: linux + dist: xenial + addons: + apt: + sources: + - ubuntu-toolchain-r-test + packages: + - gcc-6 + - g++-6 + - libstdc++6 + env: CC=gcc-6 CXX=g++-6 + jdk: openjdk8 + + - os: osx + osx_image: xcode10.2 + allow_failures: + - dist: trusty + +## Uncomment section below and the packages above to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar +before_install: +# # explicitly avoid bash as travis screws with .bashrc, +# # cf. https://travis-ci.community/t/travis-functions-no-such-file-or-directory/2286/12 +# - "[ -x /bin/dash ] && /bin/dash ./build-vlog-library.sh || /bin/sh ./build-vlog-library.sh" + +install: mvn install $OPTIONS -DskipTests=true + +cache: + directories: + - ./local_builds + - $HOME/.m2 diff --git a/README.md b/README.md index 604e36a4a..551c6e2e2 100644 --- a/README.md +++ b/README.md @@ -1,21 +1,70 @@ -VLog4J +Rulewerk ====== -[![Build Status](https://travis-ci.org/mkroetzsch/vlog4j.png?branch=master)](https://travis-ci.org/mkroetzsch/vlog4j) -[![Coverage Status](https://coveralls.io/repos/github/mkroetzsch/vlog4j/badge.svg?branch=master)](https://coveralls.io/github/mkroetzsch/vlog4j?branch=master) +[![Rulewerk Test Status](https://github.com/knowsys/rulewerk/workflows/Rulewerk%20Tests/badge.svg?branch=master)](https://github.com/knowsys/rulewerk/actions?query=workflow:Rulewerk+Tests) +[![Coverage Status](https://coveralls.io/repos/github/knowsys/rulewerk/badge.svg?branch=master)](https://coveralls.io/github/knowsys/rulewerk?branch=master) +[![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.semanticweb.rulewerk/rulewerk-parent/badge.svg)](http://search.maven.org/#search|ga|1|g%3A%22org.semanticweb.rulewerk%22) -A Java library based on the [VLog rule engine](https://github.com/karmaresearch/vlog) +Rulewerk is a Java library based on the [VLog rule engine](https://github.com/karmaresearch/vlog). + +***Note:*** The recent [Nemo rule engine](https://github.com/knowsys/nemo) also implements support for a Rulewerk-like Datalog-dialect, and might be a good choice for some projects using Rulewerk. Currently, there is no Rulewerk-integration of Nemo yet, but many Rulewerk progams will work in Nemo (or can easily be adapted), whereas Nemo tends to have more features (e.g., arithmetic built-ins and datatype support). Installation ------------ -To build vlog4j from source, you need to install Maven and perform the following steps: +The current release of Rulewerk is version [0.9.0](https://github.com/knowsys/rulewerk/releases/tag/v0.9.0). The easiest way of using the library is with Maven. Maven users must add the following dependency to the dependencies in their pom.xml file: + +``` + + org.semanticweb.rulewerk + rulewerk-core + 0.9.0 + +``` + +Previous to version `0.6.0`, *rulewerk* project name was *vlog4j*. Older versions released under name *vlog4j* have `org.semanticweb.vlog4j` and `vlog4j-core`, the latest version being version `0.5.0`. + + +You need to use Java 1.8 or above. Available source modules include: + +* **rulewerk-core**: essential data models for rules and facts, and essential reasoner functionality +* **rulewerk-parser**: support for processing knowledge bases in [Rulewerk syntax](https://github.com/knowsys/rulewerk/wiki/Rule-syntax-grammar) +* **rulewerk-graal**: support for converting rules, facts and queries from [Graal](http://graphik-team.github.io/graal/) API objects and [DLGP](http://graphik-team.github.io/graal/doc/dlgp) files +* **rulewerk-rdf**: support for reading from RDF files in Java (not required for loading RDF directly during reasoning) +* **rulewerk-owlapi**: support for converting rules from OWL ontology, loaded with the OWL API +* **rulewerk-client**: stand-alone application that builds a [command-line client](https://github.com/knowsys/rulewerk/wiki/Standalone-client) for Rulewerk. +* **rulewerk-commands**: support for running commands, as done by the client +* **rulewerk-vlog**: support for using [VLog](https://github.com/karmaresearch/vlog) as a reasoning backend for Rulewerk. + +Test module **rulewerk-integrationtests** contains integration tests that verify the correctness of the backend reasoners for various complex reasoning problems. + +The released **rulewerk-vlog** packages use [`vlog-java`](https://search.maven.org/search?q=a:vlog-java), which packages system-dependent [VLog](https://github.com/karmaresearch/vlog) binaries for Linux, macOS, and Windows, and should work out of the box with current versions of these systems (for Linux, you will need at least libstdc++-v3.4.22; for macOS, you will need at least macOS 10.14). In case of problems, or if you are using the current development version, own binaries can be compiled as follows: +* (Optional) It is recommended to increase the version of `vlog-java` (in `rulewerk-vlog/pom.xml`) before executing the next steps. +* Delete (if existing) previous local builds (`local_builds` directory). +* Run [build-vlog-library.sh](https://github.com/knowsys/rulewerk/blob/master/build-vlog-library.sh) or execute the commands in this file manually. This will compile a local jar file on your system, copy it to ```rulewerk-vlog/lib/jvlog-local.jar```, and install the new jar in your local Maven repository in place of the distributed version of `vlog-java`. +* Run ```mvn install``` to test if the setup works. + -* In the directory ```./vlog-core/lib``` copy the jar to ```jvlog-local.jar``` (the current default is a Linux library there) -* Run ```mvn initialize``` -* Run ```mvn install``` -* If this fails, you can run the script build-vlog-library.sh to compile and install this jar from the latest online sources using your local compiler Documentation ------------- -* [JavaDoc](https://mkroetzsch.github.io/vlog4j/) is available online and through the Maven packages. +* The module **rulewerk-examples** includes short example programs that demonstrate various features and use cases +* The GitHub project **[Rulewerk Example](https://github.com/knowsys/rulewerk-example)** shows how to use Rulewerk in own Maven projects and can be used as a skeleton for own projects +* [JavaDoc](https://knowsys.github.io/rulewerk/) is available online and through the Maven packages. +* A Rulewerk [Wiki](https://github.com/knowsys/rulewerk/wiki) is available online, with detailed information about rulewerk usage, the supported rule language [examples](https://github.com/knowsys/rulewerk/wiki/Rule-syntax-by-examples) and [grammar](https://github.com/knowsys/rulewerk/wiki/Rule-syntax-grammar), and related publications. +* You can contact developers and other users about usage and or development on our [support channel](https://matrix.to/#/#rulewerk-support:tu-dresden.de). + +Development +----------- + +* Pull requests are welcome. +* We largely follow [Java Programming Style Guidelines published by Petroware](https://petroware.no/javastyle.html). The main exception are the names of private members, which do not usually end in underscores in our code. + +* The master branch may require a development version of [VLog](https://github.com/karmaresearch/vlog). +Use the script `build-vlog-library.sh` [as shown here](#anchor-build-vlog) to create and install it on your machine. This will compile and install `vlog-java`dependency with the current code of [VLog](https://github.com/karmaresearch/vlog) master branch. + + + +* Users of Eclipse should install the [JavaCC Eclipse Plug-in](https://marketplace.eclipse.org/content/javacc-eclipse-plug) to generate the parser sources. After [installing](https://marketplace.eclipse.org/content/javacc-eclipse-plug/help) the plugin, right-click on the file `JavaCCParser.jj` in `org.semanticweb.rulewerk.parser.javacc`, and select "compile with javacc". This step needs to be repeated when the file changes. +* To build the standalone client jar, run `mvn install -Pclient`. This generates `standalone-rulewerk-client-[VERSION].jar` in `rulewerk-client/target`. +* The CI setup is [documented here](https://github.com/knowsys/rulewerk/wiki/CI-Setup). diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md new file mode 100644 index 000000000..762738bf5 --- /dev/null +++ b/RELEASE-NOTES.md @@ -0,0 +1,168 @@ +Rulewerk Release Notes +====================== + +Rulewerk v0.9.0 +--------------- + +Bugfixes: +* The parser now accepts empty prefixes, as allowed by, e.g., RDF Turtle. (#206) +* [rulewerk-owlapi] Axioms of type DisjointClasses and DisjointObjectProperties are no longer ignored, and are now translated into corresponding rules. (#209) +* String constants in long (`"foo"^^`) and short notation (`"foo"`) and are now interchangeable. +* Several reasoning errors in VLog (backend) have been discovered and fixed in the version used now. + +Rulewerk v0.8.0 +--------------- + +Bugfixes: +* Encoding of RDF strings corrected to make sure VLog succeeds joining on strings +* Fixed handling of trident databases that are not a direct child of the current working directory +* Fixed encoding of language-tagged strings that are used in Rulewerk facts, which had caused an exception +* Several reasoning errors in VLog (backend) have been discovered and fixed in the version used now + +Rulewerk v0.7.0 +--------------- + +New features: +* New interactive Rulewerk shell for rule reasoning from the command line client +* Significant speedup in iterating over query results +* Support for using data from a Trident database, the recommended data source for large + RDF graphs in VLog +* More features to control how Rulewerk imports RDF data using rulewerk-rdf module +* New class `LiteralQueryResultPrinter` for pretty-printing query results + +Other improvements: +* Improved serialization of knowledge bases (using namespaces) +* Simple (non-IRI, namespace-less) predicate names can now include - and _ +* Nulls in input data (aka "blank nodes") are now properly skolemized for VLog +* InMemoryGraphAnalysisExample now counts proper triangles using negation to avoid "triangles" where + two or more edges are the same. + +Breaking changes: +* The `RdfModelConverter` class from the rdf package is no longer static (and has more options) +* The `Serializer` class in the core package has been replaced by a new implementation + with a completely different interface. +* The methods `getSerialization` that were present in most syntax objects have been removed. Use `toString()` instead for simple serializations, or invoke a custom Serializer. +* The `DataSource` interface requires a new method to be implemented. +* `@import`, `@import-relative`, and `@source` now treat relative paths as relative to the file they occur in, as opposed to the global working directory. + +Rulewerk v0.6.0 +--------------- + +Breaking changes: +* VLog4j is now called Rulewerk. Consequently, the groupId, artifact Ids, and package names + of the project have changed. +* In the examples package, `ExamplesUtils.getQueryAnswerCount(queryString, reasoner)` does no + longer exist. It can be replaced by + `reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getCount()` +* The `FileDataSource` constructor and those of child classes (`CsvFileDataSource`, `RdfFileDataSource`) + now take the String path to a file instead of `File` object. +* The VLog backend has been moved to a new `rulewerk-vlog` module, + changing several import paths. `Reasoner.getInstance()` is + gone. Furthermore, `InMemoryDataSource` has become an abstract class, + use `VLogInMemoryDataSource` where applicable. + +New features: +* Counting query answers is more efficient now, using `Reasoner.countQueryAnswers()` +* All inferred facts can be serialized to a file using `Reasoner.writeInferences()` +* All inferred facts can be obtained as a Stream using `Reasoner.getInferences()` +* `Reasoner.getCorrectness()` returns the correctness result of the last reasoning task. +* Knowledge bases can be serialized to a file using `KnowlegdeBase.writeKnowledgeBase()` +* Rules files may import other rules files using `@import` and + `@import-relative`, where the latter resolves relative IRIs using + the current base IRI, unless the imported file explicitly specifies + a different one. +* Named nulls of the form `_:name` are now allowed during parsing (but + may not occur in rule bodies). They are renamed to assure that they + are distinct on a per-file level. +* The parser allows custom directives to be implemented, and a certain + set of delimiters allows for custom literal expressions. + +Other improvements: +* Prefix declarations are now kept as part of the Knowledge Base and + are used to abbreviate names when exporting inferences. + +Bugfixes: +* Several reasoning errors in VLog (backend) have been discovered and fixed in the version used now + +VLog4j v0.5.0 +------------- + +Breaking changes: +* The data model for rules has been refined and changed: + * Instead of Constant, specific types of constants are used to capture abtract and data values + * Instead of Variable, ExistentialVariable and UniversalVariable now indicate quantification + * Blank was renamed to NamedNull to avoid confusion with RDF blank nodes + * Methods to access terms now use Java Streams and are unified across syntactic objects +* Data source declarations now use brackets to denote arity, e.g., `@source predicate[2]: load-csv()` + +New features: +* New module vlog4j-client provides a stand-alone command line client jar for VLog4j +* A wiki for VLog4j use and related publications has been created: https://github.com/knowsys/vlog4j/wiki +* The parser behaviour for data source declarations and certain datatype literals can be customised. + +Other improvements: +* Data model is better aligned with syntax supported by parser +* Java object Statements (rules, facts, datasource declarations) String representation is parseable +* OWL API dependency has been upgraded from 4.5.1 to latest (5.1.11) +* SL4J dependency has been upgraded from 1.7.10 to latest (1.7.28) +* Cobertura test coverage tool has been replaced by JaCoCo + +Bugfixes: +* Acyclicity checks work again without calling reason() first (issue #128) +* in vlog4j-owlapi, class expressions of type ObjectMaxCardinality are not allowed in superclasses (issue #104) +* in vlog4j-owlapi, class expressions of type ObjectOneOf are only allowed as subclasses in axioms of type subClassOf (issue #20) +* When parsing syntactic fragment such as Facts or Literals, the parser now enforces that all input is consumed. + +VLog4j v0.4.0 +------------- + +Breaking changes: +* The Reasoner interface has changed (knowledge base and related methods moved to KnowledgeBase) +* The EdbIdbSeparation is obsolete and does no longer exist +* IRIs loaded from RDF inputs no longer include surrounding < > in their string identifier +* A new interface Fact has replaced the overly general PositiveLiteral in many places + +New features: +* New own syntax for rules, facts, and data sources to create knowledge bases from files or strings in Java +* Input predicates can now be used with multiple sources and in rule heads (no more EDB-IDB distinction) +* New InMemoryDataSource for efficient in-memory fact loading +* New KnowledgeBase class separates facts, data sources, and rules from the actual Reasoner +* Modifications to the knowledge base are taken into account by the reasoner +* New and updated example programs to illustrate use of syntax + +Other improvements: +* Query results now indicate their guaranteed correctness (example: answers can be incomplete when setting a timeout) +* Faster and more memory-efficient loading of facts +* Better error reporting; improved use of exceptions +* Better logging, especially on the INFO level +* Better code structure and testing + +Bugfixes: +* Several reasoning errors in VLog (backend) have been discovered and fixed in the version used now + + +VLog4j v0.3.0 +------------- + +New features: +* Support for Graal data structures (conversion from Graal model to VLog model objects) +* Stratified negation: rule bodies are conjunctions of positive or negated literals +* SPARQL-based data sources: load remote data from SPARQL endpoints +* Acyclicity and cyclicity checks: JA, RJA, MFA, RMFA, RFC, as well as a generic method that checks whether given set or rules and fact predicates are acyclic, cyclic, or undetermined + +VLog4j v0.2.0 +------------- + +New features: +* supporting File data sources of N-Triples format (.nt file extension) +* supporting g-zipped data source files (.csv.gz, .nt.gz) + +VLog4j v0.1.0 +------------- + +Initial release. + +New features: +* Essential data models for rules and facts, and essential reasoner functionality +* support for reading from RDF files +* support for converting rules from OWL ontology, loaded with the OWL API diff --git a/build-vlog-library.sh b/build-vlog-library.sh old mode 100644 new mode 100755 index 848ae7efa..ff059801f --- a/build-vlog-library.sh +++ b/build-vlog-library.sh @@ -1,13 +1,12 @@ #!/bin/sh -# Script to build unreleased snapshots of karmaresearch/vlog into vlog4j-base jar on Unix-like systems +# Script to build unreleased snapshots of karmaresearch/vlog into rulewerk-base jar on Unix-like systems if [ -f "./local_builds/jvlog.jar" ] then echo "Using cached VLog JAR." else echo "Building new VLog JAR." - export CC=gcc-5 && export CXX=g++-5 - mkdir local_builds + mkdir -p local_builds rm -rf build-vlog mkdir build-vlog cd build-vlog @@ -16,12 +15,18 @@ else # git pull mkdir build cd build - cmake -DJAVA=1 -DSPARQL=1 .. + if [ "x${CI}" = "xtrue" ] + then + # disable warnings when running in CI to keep travis logs short + cmake -DJAVA=1 -DSPARQL=1 -DCMAKE_CXX_FLAGS=-w .. + else + cmake -DJAVA=1 -DSPARQL=1 .. + fi make cp jvlog.jar ../../../local_builds/jvlog.jar cd ../../.. fi -mkdir local_builds/jvlog.jar vlog4j-core/lib -cp local_builds/jvlog.jar vlog4j-core/lib/jvlog-local.jar +mkdir -p rulewerk-vlog/lib +cp local_builds/jvlog.jar rulewerk-vlog/lib/jvlog-local.jar mvn initialize -Pdevelopment diff --git a/vlog4j-core/LICENSE.txt b/coverage/LICENSE.txt similarity index 100% rename from vlog4j-core/LICENSE.txt rename to coverage/LICENSE.txt diff --git a/coverage/pom.xml b/coverage/pom.xml new file mode 100644 index 000000000..b14d4bdeb --- /dev/null +++ b/coverage/pom.xml @@ -0,0 +1,87 @@ + + + 4.0.0 + + + org.semanticweb.rulewerk + rulewerk-parent + 0.10.0-SNAPSHOT + + + coverage + + coverage + + + ${project.groupId} + rulewerk-core + ${project.version} + + + ${project.groupId} + rulewerk-vlog + ${project.version} + + + ${project.groupId} + rulewerk-rdf + ${project.version} + + + ${project.groupId} + rulewerk-owlapi + ${project.version} + + + ${project.groupId} + rulewerk-graal + ${project.version} + + + ${project.groupId} + rulewerk-parser + ${project.version} + + + ${project.groupId} + rulewerk-commands + ${project.version} + + + ${project.groupId} + rulewerk-client + ${project.version} + + + + + + + + + + + + + + org.eluder.coveralls + coveralls-maven-plugin + + + org.jacoco + jacoco-maven-plugin + + + aggregate-reports-ut + test + + report-aggregate + + + + + + + diff --git a/flake.lock b/flake.lock new file mode 100644 index 000000000..a73ab65f8 --- /dev/null +++ b/flake.lock @@ -0,0 +1,124 @@ +{ + "nodes": { + "flake-compat": { + "flake": false, + "locked": { + "lastModified": 1673956053, + "narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=", + "owner": "edolstra", + "repo": "flake-compat", + "rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9", + "type": "github" + }, + "original": { + "owner": "edolstra", + "repo": "flake-compat", + "type": "github" + } + }, + "flake-utils": { + "locked": { + "lastModified": 1644229661, + "narHash": "sha256-1YdnJAsNy69bpcjuoKdOYQX0YxZBiCYZo4Twxerqv7k=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "3cecb5b042f7f209c56ffd8371b2711a290ec797", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "gitignore": { + "inputs": { + "nixpkgs": [ + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1660459072, + "narHash": "sha256-8DFJjXG8zqoONA1vXtgeKXy68KdJL5UaXR8NtVMUbx8=", + "owner": "hercules-ci", + "repo": "gitignore.nix", + "rev": "a20de23b925fd8264fd7fad6454652e142fd7f73", + "type": "github" + }, + "original": { + "owner": "hercules-ci", + "repo": "gitignore.nix", + "type": "github" + } + }, + "mvn2nix": { + "inputs": { + "nixpkgs": [ + "nixpkgs" + ], + "utils": [ + "utils", + "flake-utils" + ] + }, + "locked": { + "lastModified": 1629170129, + "narHash": "sha256-v/HvYqzkPaGin1ujo+Fi59wXC9vWxW3lYVSwElORRi8=", + "owner": "fzakaria", + "repo": "mvn2nix", + "rev": "ea21cfe97069feee55fa307ca9b125616c1fa84f", + "type": "github" + }, + "original": { + "owner": "fzakaria", + "repo": "mvn2nix", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1689048911, + "narHash": "sha256-pODI2CkjWbSLo5nPMZoLtkRNJU/Nr3VSITXZqqmNtIk=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "8163a64662b43848802092d52015ef60777d6129", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixos-23.05", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "flake-compat": "flake-compat", + "gitignore": "gitignore", + "mvn2nix": "mvn2nix", + "nixpkgs": "nixpkgs", + "utils": "utils" + } + }, + "utils": { + "inputs": { + "flake-utils": "flake-utils" + }, + "locked": { + "lastModified": 1657226504, + "narHash": "sha256-GIYNjuq4mJlFgqKsZ+YrgzWm0IpA4axA3MCrdKYj7gs=", + "owner": "gytis-ivaskevicius", + "repo": "flake-utils-plus", + "rev": "2bf0f91643c2e5ae38c1b26893ac2927ac9bd82a", + "type": "github" + }, + "original": { + "owner": "gytis-ivaskevicius", + "repo": "flake-utils-plus", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 000000000..536884352 --- /dev/null +++ b/flake.nix @@ -0,0 +1,131 @@ +{ + description = "Rulewerk, a java toolkit for reasoning with existential rules"; + + inputs = { + nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.05"; + utils.url = "github:gytis-ivaskevicius/flake-utils-plus"; + gitignore = { + url = "github:hercules-ci/gitignore.nix"; + inputs.nixpkgs.follows = "nixpkgs"; + }; + mvn2nix = { + url = "github:fzakaria/mvn2nix"; + inputs = { + nixpkgs.follows = "nixpkgs"; + utils.follows = "utils/flake-utils"; + }; + }; + flake-compat = { + url = "github:edolstra/flake-compat"; + flake = false; + }; + }; + + outputs = { + self, + nixpkgs, + utils, + gitignore, + mvn2nix, + ... + } @ inputs: let + # this selects the JDK version used from a package set. + getJdk = pkgs: pkgs.jdk8_headless; + in + utils.lib.mkFlake rec { + inherit self inputs; + + overlays = rec { + mvn2nix = inputs.mvn2nix.overlay; + rulewerk = import ./nix { + inherit getJdk; + inherit (gitignore.lib) gitignoreSource; + }; + default = inputs.nixpkgs.lib.composeManyExtensions [mvn2nix rulewerk]; + }; + + sharedOverlays = [ + mvn2nix.overlay + self.overlays.default + ]; + + outputsBuilder = channels: let + pkgs = channels.nixpkgs; + jdk = getJdk pkgs; + maven = pkgs.maven.override {inherit jdk;}; + in rec { + formatter = pkgs.alejandra; + + packages = rec { + inherit (pkgs) kognac trident vlog rulewerk; + default = rulewerk; + }; + + apps = rec { + rulewerk = utils.lib.mkApp {drv = packages.rulewerk;}; + default = rulewerk; + mvn2nix = utils.lib.mkApp { + drv = pkgs.writeShellScriptBin "mvn2nix" '' + ${maven}/bin/mvn clean + + # skip the tests here, since we are only interested in + # building the dependency graph for rulewerk. Tests will + # run later as part of the rulewerk derivation. Also make sure + # to invoke all plugins that are executed during build and test, + # since we need to have them in the repository as well. + + MAVEN_OPTS="-DskipTests=true -DskipIT=true" \ + ${pkgs.mvn2nix}/bin/mvn2nix \ + --jdk ${jdk} \ + --goals \ + initialize \ + package \ + verify \ + jacoco:report \ + coveralls:help \ + io.github.zlika:reproducible-build-maven-plugin:help \ + org.apache.maven.plugins:maven-install-plugin:help \ + org.apache.maven.plugins:maven-shade-plugin:help \ + --verbose \ + | ${pkgs.jq}/bin/jq -S '{dependencies: .dependencies | with_entries(select(.key|startswith("org.semanticweb.rulewerk") == false))}' \ + > mvn2nix-lock.json + + # the `jq` invocation above serves two purposes: + # (i) `-S` sorts the output, so that we don't get spurious + # changes where just the order of dependencies in the lock + # file is different, but nothing relevant is changed, and + # (ii) remove lock entries for `org.semanticweb.rulewerk`, + # since those are either jars built as part of rulewerk, or + # the `vlog-java` jar. In either case, we always want to use + # our local version of that jar instead of something from + # maven central. Setting up the local `vlog-java` jar happens + # as part of the rulewerk derivation. + ''; + }; + }; + + devShells.default = pkgs.mkShell { + buildInputs = [ + jdk + maven + pkgs.kognac + pkgs.trident + pkgs.sparsehash + pkgs.curl + pkgs.lz4 + pkgs.zlib + # rulewerk-debug is like rulewerk, but with debug symbols + # in both VLog and rulewerk enabled + pkgs.rulewerk-debug + ]; + # rulewerk/rulewerk-debug include a wrapper around `mvn` + # that automatically provides the local repository with the + # dependencies in it, make sure this is at the front of the path, + # i.e., before the upstream `mvn`. + shellHook = '' + export "PATH=${pkgs.rulewerk-debug}/bin:$PATH" + ''; + }; + }; + }; +} diff --git a/mvn2nix-lock.json b/mvn2nix-lock.json new file mode 100644 index 000000000..c38fd13d0 --- /dev/null +++ b/mvn2nix-lock.json @@ -0,0 +1,4804 @@ +{ + "dependencies": { + "antlr:antlr:pom:2.7.2": { + "layout": "antlr/antlr/2.7.2/antlr-2.7.2.pom", + "sha256": "5e9abd6c993c7d2859fb759bfb77355a6c8184aa5d200aa740e345da5f4c58fe", + "url": "https://repo.maven.apache.org/maven2/antlr/antlr/2.7.2/antlr-2.7.2.pom" + }, + "avalon-framework:avalon-framework:pom:4.1.3": { + "layout": "avalon-framework/avalon-framework/4.1.3/avalon-framework-4.1.3.pom", + "sha256": "c6c971b146ec8e596e660e64d5517aae02e34c3cce240de44bb92ccd98f046bf", + "url": "https://repo.maven.apache.org/maven2/avalon-framework/avalon-framework/4.1.3/avalon-framework-4.1.3.pom" + }, + "backport-util-concurrent:backport-util-concurrent:jar:3.1": { + "layout": "backport-util-concurrent/backport-util-concurrent/3.1/backport-util-concurrent-3.1.jar", + "sha256": "f5759b7fcdfc83a525a036deedcbd32e5b536b625ebc282426f16ca137eb5902", + "url": "https://repo.maven.apache.org/maven2/backport-util-concurrent/backport-util-concurrent/3.1/backport-util-concurrent-3.1.jar" + }, + "backport-util-concurrent:backport-util-concurrent:pom:3.1": { + "layout": "backport-util-concurrent/backport-util-concurrent/3.1/backport-util-concurrent-3.1.pom", + "sha256": "770471090ca40a17b9e436ee2ec00819be42042da6f4085ece1d37916dc08ff9", + "url": "https://repo.maven.apache.org/maven2/backport-util-concurrent/backport-util-concurrent/3.1/backport-util-concurrent-3.1.pom" + }, + "classworlds:classworlds:jar:1.1": { + "layout": "classworlds/classworlds/1.1/classworlds-1.1.jar", + "sha256": "4e3e0ad158ec60917e0de544c550f31cd65d5a97c3af1c1968bf427e4a9df2e4", + "url": "https://repo.maven.apache.org/maven2/classworlds/classworlds/1.1/classworlds-1.1.jar" + }, + "classworlds:classworlds:jar:1.1-alpha-2": { + "layout": "classworlds/classworlds/1.1-alpha-2/classworlds-1.1-alpha-2.jar", + "sha256": "2bf4e59f3acd106fea6145a9a88fe8956509f8b9c0fdd11eb96fee757269e3f3", + "url": "https://repo.maven.apache.org/maven2/classworlds/classworlds/1.1-alpha-2/classworlds-1.1-alpha-2.jar" + }, + "classworlds:classworlds:pom:1.1": { + "layout": "classworlds/classworlds/1.1/classworlds-1.1.pom", + "sha256": "25a1efc00bcd1f029fd20c44df843b8b12d1fa17485235470764f011d2f5cb29", + "url": "https://repo.maven.apache.org/maven2/classworlds/classworlds/1.1/classworlds-1.1.pom" + }, + "classworlds:classworlds:pom:1.1-alpha-2": { + "layout": "classworlds/classworlds/1.1-alpha-2/classworlds-1.1-alpha-2.pom", + "sha256": "0cc647963b74ad1d7a37c9868e9e5a8f474e49297e1863582253a08a4c719cb1", + "url": "https://repo.maven.apache.org/maven2/classworlds/classworlds/1.1-alpha-2/classworlds-1.1-alpha-2.pom" + }, + "com.fasterxml.jackson.core:jackson-annotations:jar:2.8.3": { + "layout": "com/fasterxml/jackson/core/jackson-annotations/2.8.3/jackson-annotations-2.8.3.jar", + "sha256": "4aee65071dfd3bbba3476a971ebff8b0d77402f84dd8a0c5f7580d27a7ce611d", + "url": "https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-annotations/2.8.3/jackson-annotations-2.8.3.jar" + }, + "com.fasterxml.jackson.core:jackson-annotations:jar:2.9.0": { + "layout": "com/fasterxml/jackson/core/jackson-annotations/2.9.0/jackson-annotations-2.9.0.jar", + "sha256": "45d32ac61ef8a744b464c54c2b3414be571016dd46bfc2bec226761cf7ae457a", + "url": "https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-annotations/2.9.0/jackson-annotations-2.9.0.jar" + }, + "com.fasterxml.jackson.core:jackson-annotations:pom:2.8.3": { + "layout": "com/fasterxml/jackson/core/jackson-annotations/2.8.3/jackson-annotations-2.8.3.pom", + "sha256": "6442e94d8d1c7386dbb49fa3195ebe9adc44ec2f4dc5094489bb24a092644db9", + "url": "https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-annotations/2.8.3/jackson-annotations-2.8.3.pom" + }, + "com.fasterxml.jackson.core:jackson-annotations:pom:2.9.0": { + "layout": "com/fasterxml/jackson/core/jackson-annotations/2.9.0/jackson-annotations-2.9.0.pom", + "sha256": "edeb9d9bfca726b441bfae09b69d94b5db99cd5a9da5f4a061b90e72b640905c", + "url": "https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-annotations/2.9.0/jackson-annotations-2.9.0.pom" + }, + "com.fasterxml.jackson.core:jackson-core:jar:2.8.3": { + "layout": "com/fasterxml/jackson/core/jackson-core/2.8.3/jackson-core-2.8.3.jar", + "sha256": "b0e1727f9dbbc0e2d3534456c1d6420b362109cd47c41fabab7997bbb7194d0c", + "url": "https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-core/2.8.3/jackson-core-2.8.3.jar" + }, + "com.fasterxml.jackson.core:jackson-core:jar:2.8.8": { + "layout": "com/fasterxml/jackson/core/jackson-core/2.8.8/jackson-core-2.8.8.jar", + "sha256": "d9bde8c72c22202bf17b05c7811db4964ff8e843d97c00a9bfb048c0fe7a726b", + "url": "https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-core/2.8.8/jackson-core-2.8.8.jar" + }, + "com.fasterxml.jackson.core:jackson-core:pom:2.8.3": { + "layout": "com/fasterxml/jackson/core/jackson-core/2.8.3/jackson-core-2.8.3.pom", + "sha256": "975706e32437f6b63c01dde7bdacc58c8fe9208b8a7475f8c4ccc6bbc6468d00", + "url": "https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-core/2.8.3/jackson-core-2.8.3.pom" + }, + "com.fasterxml.jackson.core:jackson-core:pom:2.8.8": { + "layout": "com/fasterxml/jackson/core/jackson-core/2.8.8/jackson-core-2.8.8.pom", + "sha256": "cec2f39fd2a4fe7d6e6d0ce5f002393aa2c66ea38e6720e0d436ea1c3e19c9dd", + "url": "https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-core/2.8.8/jackson-core-2.8.8.pom" + }, + "com.fasterxml.jackson.core:jackson-core:pom:2.9.0": { + "layout": "com/fasterxml/jackson/core/jackson-core/2.9.0/jackson-core-2.9.0.pom", + "sha256": "e48ad689b9dcaab93aa4bc6d735649c2ea25143a3cf997846394d96f0dbf802a", + "url": "https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-core/2.9.0/jackson-core-2.9.0.pom" + }, + "com.fasterxml.jackson.core:jackson-core:pom:2.9.5": { + "layout": "com/fasterxml/jackson/core/jackson-core/2.9.5/jackson-core-2.9.5.pom", + "sha256": "5cff6ebf24676803daa08fb33dd7e8d122a22c25984f2766dd9d34f0f49a215a", + "url": "https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-core/2.9.5/jackson-core-2.9.5.pom" + }, + "com.fasterxml.jackson.core:jackson-databind:jar:2.8.3": { + "layout": "com/fasterxml/jackson/core/jackson-databind/2.8.3/jackson-databind-2.8.3.jar", + "sha256": "3e207264b4c1d8235ad048929f34f0a868ebcd7154e0eaeed6c4f7dda3a2b8c5", + "url": "https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.8.3/jackson-databind-2.8.3.jar" + }, + "com.fasterxml.jackson.core:jackson-databind:jar:2.9.5": { + "layout": "com/fasterxml/jackson/core/jackson-databind/2.9.5/jackson-databind-2.9.5.jar", + "sha256": "0fb4e079c118e752cc94c15ad22e6782b0dfc5dc09145f4813fb39d82e686047", + "url": "https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.9.5/jackson-databind-2.9.5.jar" + }, + "com.fasterxml.jackson.core:jackson-databind:pom:2.8.3": { + "layout": "com/fasterxml/jackson/core/jackson-databind/2.8.3/jackson-databind-2.8.3.pom", + "sha256": "fe15a34559ed39d0b17d55775dc3b6305e22737e59145a22aed2755092fa6131", + "url": "https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.8.3/jackson-databind-2.8.3.pom" + }, + "com.fasterxml.jackson.core:jackson-databind:pom:2.9.0": { + "layout": "com/fasterxml/jackson/core/jackson-databind/2.9.0/jackson-databind-2.9.0.pom", + "sha256": "e9895089ab8acf63204e8d053b552f4256068d3f1d0ec47e57149a45ec39e1c8", + "url": "https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.9.0/jackson-databind-2.9.0.pom" + }, + "com.fasterxml.jackson.core:jackson-databind:pom:2.9.5": { + "layout": "com/fasterxml/jackson/core/jackson-databind/2.9.5/jackson-databind-2.9.5.pom", + "sha256": "f336981b0303400af3caa58a1a2779d04feeb2967f2b4fcefa8c764d2fc4f2f0", + "url": "https://repo.maven.apache.org/maven2/com/fasterxml/jackson/core/jackson-databind/2.9.5/jackson-databind-2.9.5.pom" + }, + "com.fasterxml.jackson:jackson-base:pom:2.9.5": { + "layout": "com/fasterxml/jackson/jackson-base/2.9.5/jackson-base-2.9.5.pom", + "sha256": "1af5780f7e8de45a0ed2d470d2040fbc7148f0857448ddf574c7a76444a1a0fb", + "url": "https://repo.maven.apache.org/maven2/com/fasterxml/jackson/jackson-base/2.9.5/jackson-base-2.9.5.pom" + }, + "com.fasterxml.jackson:jackson-bom:pom:2.8.8": { + "layout": "com/fasterxml/jackson/jackson-bom/2.8.8/jackson-bom-2.8.8.pom", + "sha256": "5ad047d670cd3e009a90ecf3ba034b486c7172873efc96a2d97f2d8b7045205b", + "url": "https://repo.maven.apache.org/maven2/com/fasterxml/jackson/jackson-bom/2.8.8/jackson-bom-2.8.8.pom" + }, + "com.fasterxml.jackson:jackson-bom:pom:2.9.0": { + "layout": "com/fasterxml/jackson/jackson-bom/2.9.0/jackson-bom-2.9.0.pom", + "sha256": "0e8117802e82c29fea7d9488b4dc0a8e429c700a38c698195ff2a278cee95ffc", + "url": "https://repo.maven.apache.org/maven2/com/fasterxml/jackson/jackson-bom/2.9.0/jackson-bom-2.9.0.pom" + }, + "com.fasterxml.jackson:jackson-bom:pom:2.9.5": { + "layout": "com/fasterxml/jackson/jackson-bom/2.9.5/jackson-bom-2.9.5.pom", + "sha256": "895aa2ba19bc6677bb164c4fcf43eaf9c192b18c85d604f9ec7a23e6776ac499", + "url": "https://repo.maven.apache.org/maven2/com/fasterxml/jackson/jackson-bom/2.9.5/jackson-bom-2.9.5.pom" + }, + "com.fasterxml.jackson:jackson-parent:pom:2.8": { + "layout": "com/fasterxml/jackson/jackson-parent/2.8/jackson-parent-2.8.pom", + "sha256": "3b51994c1a3a29c2c89728226c0be14b69888a1bb0ef311f8d65904cdfbd9358", + "url": "https://repo.maven.apache.org/maven2/com/fasterxml/jackson/jackson-parent/2.8/jackson-parent-2.8.pom" + }, + "com.fasterxml.jackson:jackson-parent:pom:2.9.0": { + "layout": "com/fasterxml/jackson/jackson-parent/2.9.0/jackson-parent-2.9.0.pom", + "sha256": "000508b61dd124387e58199fb9e2113c1b5d8b386c5bf7a891b2de6041eb1d52", + "url": "https://repo.maven.apache.org/maven2/com/fasterxml/jackson/jackson-parent/2.9.0/jackson-parent-2.9.0.pom" + }, + "com.fasterxml.jackson:jackson-parent:pom:2.9.1": { + "layout": "com/fasterxml/jackson/jackson-parent/2.9.1/jackson-parent-2.9.1.pom", + "sha256": "7c04f029d280fbb8274e75021dc90f39b2c622fe3499daf07fc35081c2d9d9ff", + "url": "https://repo.maven.apache.org/maven2/com/fasterxml/jackson/jackson-parent/2.9.1/jackson-parent-2.9.1.pom" + }, + "com.fasterxml:oss-parent:pom:27": { + "layout": "com/fasterxml/oss-parent/27/oss-parent-27.pom", + "sha256": "b9b8f388fd628057b1249756468b86726c8fd5816ce14d313cb40003a509beeb", + "url": "https://repo.maven.apache.org/maven2/com/fasterxml/oss-parent/27/oss-parent-27.pom" + }, + "com.fasterxml:oss-parent:pom:28": { + "layout": "com/fasterxml/oss-parent/28/oss-parent-28.pom", + "sha256": "c4c369e36b8820ad26d2e6c70ff44ad99efafe19399a5d1178e9ea82f4d2fd38", + "url": "https://repo.maven.apache.org/maven2/com/fasterxml/oss-parent/28/oss-parent-28.pom" + }, + "com.fasterxml:oss-parent:pom:30": { + "layout": "com/fasterxml/oss-parent/30/oss-parent-30.pom", + "sha256": "d0e25466520981ff57ecadbdc94034d1d16903b92e950be9f9d42441c594f9f0", + "url": "https://repo.maven.apache.org/maven2/com/fasterxml/oss-parent/30/oss-parent-30.pom" + }, + "com.github.ben-manes.caffeine:caffeine:jar:2.6.1": { + "layout": "com/github/ben-manes/caffeine/caffeine/2.6.1/caffeine-2.6.1.jar", + "sha256": "e45e7995bd0f9924e105691127d9b092f37ef314fab8364a2302a4dfb416d1ca", + "url": "https://repo.maven.apache.org/maven2/com/github/ben-manes/caffeine/caffeine/2.6.1/caffeine-2.6.1.jar" + }, + "com.github.ben-manes.caffeine:caffeine:pom:2.6.1": { + "layout": "com/github/ben-manes/caffeine/caffeine/2.6.1/caffeine-2.6.1.pom", + "sha256": "3bf6b1c5b574d6c4accb1cc768ac9254d0195709ec508b0bfb1e863b90abe1c2", + "url": "https://repo.maven.apache.org/maven2/com/github/ben-manes/caffeine/caffeine/2.6.1/caffeine-2.6.1.pom" + }, + "com.github.jsonld-java:jsonld-java-parent:pom:0.11.1": { + "layout": "com/github/jsonld-java/jsonld-java-parent/0.11.1/jsonld-java-parent-0.11.1.pom", + "sha256": "d5aeda848376452ea44a036c0294e28b18eec324335a0e755fd28a13232b65ac", + "url": "https://repo.maven.apache.org/maven2/com/github/jsonld-java/jsonld-java-parent/0.11.1/jsonld-java-parent-0.11.1.pom" + }, + "com.github.jsonld-java:jsonld-java-parent:pom:0.12.0": { + "layout": "com/github/jsonld-java/jsonld-java-parent/0.12.0/jsonld-java-parent-0.12.0.pom", + "sha256": "c5f8122e5529818e257ee7b143729def351ed8c0ee53f659833b1290d7aab614", + "url": "https://repo.maven.apache.org/maven2/com/github/jsonld-java/jsonld-java-parent/0.12.0/jsonld-java-parent-0.12.0.pom" + }, + "com.github.jsonld-java:jsonld-java:jar:0.12.0": { + "layout": "com/github/jsonld-java/jsonld-java/0.12.0/jsonld-java-0.12.0.jar", + "sha256": "57e60ed820590a43670898222ef4b6c711732b5060d9b060726b3b1e6d08181c", + "url": "https://repo.maven.apache.org/maven2/com/github/jsonld-java/jsonld-java/0.12.0/jsonld-java-0.12.0.jar" + }, + "com.github.jsonld-java:jsonld-java:pom:0.11.1": { + "layout": "com/github/jsonld-java/jsonld-java/0.11.1/jsonld-java-0.11.1.pom", + "sha256": "8e6186dc18f81e78a2a683ad2828e5f0a09d6d6e79289fba2470e89444298de0", + "url": "https://repo.maven.apache.org/maven2/com/github/jsonld-java/jsonld-java/0.11.1/jsonld-java-0.11.1.pom" + }, + "com.github.jsonld-java:jsonld-java:pom:0.12.0": { + "layout": "com/github/jsonld-java/jsonld-java/0.12.0/jsonld-java-0.12.0.pom", + "sha256": "95090e908d6270d55a8da83d19982ee8403b79f58a3fff173c59acc6dc66f8a3", + "url": "https://repo.maven.apache.org/maven2/com/github/jsonld-java/jsonld-java/0.12.0/jsonld-java-0.12.0.pom" + }, + "com.github.vsonnier:hppcrt-parent:pom:0.7.5": { + "layout": "com/github/vsonnier/hppcrt-parent/0.7.5/hppcrt-parent-0.7.5.pom", + "sha256": "8e6469ff08e3c05bb76336409943f5b33308b9c535d524cf5e690ba2cc41d4bf", + "url": "https://repo.maven.apache.org/maven2/com/github/vsonnier/hppcrt-parent/0.7.5/hppcrt-parent-0.7.5.pom" + }, + "com.github.vsonnier:hppcrt:jar:0.7.5": { + "layout": "com/github/vsonnier/hppcrt/0.7.5/hppcrt-0.7.5.jar", + "sha256": "f3cfbd511d0bd5a29b72bcca97fcaa81889cf6698f045c5f658744829c8784b9", + "url": "https://repo.maven.apache.org/maven2/com/github/vsonnier/hppcrt/0.7.5/hppcrt-0.7.5.jar" + }, + "com.github.vsonnier:hppcrt:pom:0.7.5": { + "layout": "com/github/vsonnier/hppcrt/0.7.5/hppcrt-0.7.5.pom", + "sha256": "5609281e38341b7bf5e63595490328e2978f3e412ab94254572b56d4c864a623", + "url": "https://repo.maven.apache.org/maven2/com/github/vsonnier/hppcrt/0.7.5/hppcrt-0.7.5.pom" + }, + "com.google.code.findbugs:jsr305:jar:3.0.2": { + "layout": "com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar", + "sha256": "766ad2a0783f2687962c8ad74ceecc38a28b9f72a2d085ee438b7813e928d0c7", + "url": "https://repo.maven.apache.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar" + }, + "com.google.code.findbugs:jsr305:pom:1.3.9": { + "layout": "com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9.pom", + "sha256": "feab9191311c3d7aeef2b66d6064afc80d3d1d52d980fb07ae43c78c987ba93a", + "url": "https://repo.maven.apache.org/maven2/com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9.pom" + }, + "com.google.code.findbugs:jsr305:pom:2.0.1": { + "layout": "com/google/code/findbugs/jsr305/2.0.1/jsr305-2.0.1.pom", + "sha256": "02c12c3c2ae12dd475219ff691c82a4d9ea21f44bc594a181295bf6d43dcfbb0", + "url": "https://repo.maven.apache.org/maven2/com/google/code/findbugs/jsr305/2.0.1/jsr305-2.0.1.pom" + }, + "com.google.code.findbugs:jsr305:pom:2.0.2": { + "layout": "com/google/code/findbugs/jsr305/2.0.2/jsr305-2.0.2.pom", + "sha256": "8bc2c4f67a6396a7333dece2d1f991ca7d0aea48b29592265e2239be91972579", + "url": "https://repo.maven.apache.org/maven2/com/google/code/findbugs/jsr305/2.0.2/jsr305-2.0.2.pom" + }, + "com.google.code.findbugs:jsr305:pom:2.0.3": { + "layout": "com/google/code/findbugs/jsr305/2.0.3/jsr305-2.0.3.pom", + "sha256": "1125d1e25c85654622f762c0c1f66ee4c435d381e2dfb4a86b8baa48cf230308", + "url": "https://repo.maven.apache.org/maven2/com/google/code/findbugs/jsr305/2.0.3/jsr305-2.0.3.pom" + }, + "com.google.code.findbugs:jsr305:pom:3.0.0": { + "layout": "com/google/code/findbugs/jsr305/3.0.0/jsr305-3.0.0.pom", + "sha256": "7c8a0859a3833eee5c53b0bceb2898ef56a88c7dd9a7e4f6bc5f0ff9770c95ec", + "url": "https://repo.maven.apache.org/maven2/com/google/code/findbugs/jsr305/3.0.0/jsr305-3.0.0.pom" + }, + "com.google.code.findbugs:jsr305:pom:3.0.1": { + "layout": "com/google/code/findbugs/jsr305/3.0.1/jsr305-3.0.1.pom", + "sha256": "4170a761dc5bfd3981a8e6f7aab9e2acdce82d34fd5aa9bb11e3c090d25314ce", + "url": "https://repo.maven.apache.org/maven2/com/google/code/findbugs/jsr305/3.0.1/jsr305-3.0.1.pom" + }, + "com.google.code.findbugs:jsr305:pom:3.0.2": { + "layout": "com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.pom", + "sha256": "19889dbdf1b254b2601a5ee645b8147a974644882297684c798afe5d63d78dfe", + "url": "https://repo.maven.apache.org/maven2/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.pom" + }, + "com.google.errorprone:error_prone_annotations:jar:2.0.18": { + "layout": "com/google/errorprone/error_prone_annotations/2.0.18/error_prone_annotations-2.0.18.jar", + "sha256": "cb4cfad870bf563a07199f3ebea5763f0dec440fcda0b318640b1feaa788656b", + "url": "https://repo.maven.apache.org/maven2/com/google/errorprone/error_prone_annotations/2.0.18/error_prone_annotations-2.0.18.jar" + }, + "com.google.errorprone:error_prone_annotations:pom:2.0.18": { + "layout": "com/google/errorprone/error_prone_annotations/2.0.18/error_prone_annotations-2.0.18.pom", + "sha256": "9144127192d6f612c2366825dceaeb23b0d53130b83e0bf1ffe107d1470a8487", + "url": "https://repo.maven.apache.org/maven2/com/google/errorprone/error_prone_annotations/2.0.18/error_prone_annotations-2.0.18.pom" + }, + "com.google.errorprone:error_prone_parent:pom:2.0.18": { + "layout": "com/google/errorprone/error_prone_parent/2.0.18/error_prone_parent-2.0.18.pom", + "sha256": "cf149955279b07d4f11e817985c1164a69e930d73db7441b43a6ef53bbd286c4", + "url": "https://repo.maven.apache.org/maven2/com/google/errorprone/error_prone_parent/2.0.18/error_prone_parent-2.0.18.pom" + }, + "com.google.guava:guava-parent:pom:18.0": { + "layout": "com/google/guava/guava-parent/18.0/guava-parent-18.0.pom", + "sha256": "a4accc8895e757f6a33f087e4fd0b661c5638ffe5e0728f298efe7d80551b166", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava-parent/18.0/guava-parent-18.0.pom" + }, + "com.google.guava:guava-parent:pom:19.0": { + "layout": "com/google/guava/guava-parent/19.0/guava-parent-19.0.pom", + "sha256": "3b76ff43ecdf10f74da36e4aae2b470c9d063e053c5ca026f5512ae94f1c3e10", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava-parent/19.0/guava-parent-19.0.pom" + }, + "com.google.guava:guava-parent:pom:19.0-rc1": { + "layout": "com/google/guava/guava-parent/19.0-rc1/guava-parent-19.0-rc1.pom", + "sha256": "da69afed87089e5a5b41c895ed816b564a061e1dd9e40b233758d494efd3bf63", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava-parent/19.0-rc1/guava-parent-19.0-rc1.pom" + }, + "com.google.guava:guava-parent:pom:19.0-rc2": { + "layout": "com/google/guava/guava-parent/19.0-rc2/guava-parent-19.0-rc2.pom", + "sha256": "692ca50bced4e29168b964cf1d3b44904c20403698841eedaad06f137fa285b6", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava-parent/19.0-rc2/guava-parent-19.0-rc2.pom" + }, + "com.google.guava:guava-parent:pom:19.0-rc3": { + "layout": "com/google/guava/guava-parent/19.0-rc3/guava-parent-19.0-rc3.pom", + "sha256": "1616d9006c5442e1da5f42c2c30e5b7203cff65b8468e6f9cce15e020d7e33e2", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava-parent/19.0-rc3/guava-parent-19.0-rc3.pom" + }, + "com.google.guava:guava-parent:pom:20.0": { + "layout": "com/google/guava/guava-parent/20.0/guava-parent-20.0.pom", + "sha256": "f1226fd07fc72af8d6232bdfa70bf31d883a1a01cbc547f23a74e9066c692df1", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava-parent/20.0/guava-parent-20.0.pom" + }, + "com.google.guava:guava-parent:pom:20.0-rc1": { + "layout": "com/google/guava/guava-parent/20.0-rc1/guava-parent-20.0-rc1.pom", + "sha256": "eed2db5e37d000a259f004a9e404c7469a7d0107e03969a3afffdde5b8359946", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava-parent/20.0-rc1/guava-parent-20.0-rc1.pom" + }, + "com.google.guava:guava-parent:pom:21.0": { + "layout": "com/google/guava/guava-parent/21.0/guava-parent-21.0.pom", + "sha256": "acdc6d6f07b346caeb174932788bccc63c035cc5c2b100958c21ffe0f6e19988", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava-parent/21.0/guava-parent-21.0.pom" + }, + "com.google.guava:guava-parent:pom:21.0-rc1": { + "layout": "com/google/guava/guava-parent/21.0-rc1/guava-parent-21.0-rc1.pom", + "sha256": "2756d15dd0a4394a319948aabdccca0236dde2de781d258e4442aa241add604e", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava-parent/21.0-rc1/guava-parent-21.0-rc1.pom" + }, + "com.google.guava:guava-parent:pom:21.0-rc2": { + "layout": "com/google/guava/guava-parent/21.0-rc2/guava-parent-21.0-rc2.pom", + "sha256": "7c9abf41f3679038181c3c3ca9aca2fad1344aac3f0bd74cd929c5a49cc7d6d8", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava-parent/21.0-rc2/guava-parent-21.0-rc2.pom" + }, + "com.google.guava:guava-parent:pom:22.0": { + "layout": "com/google/guava/guava-parent/22.0/guava-parent-22.0.pom", + "sha256": "1eaf9182e1977c1c50a70edbfbf70536398c68990bfaafc9f0e9899041201539", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava-parent/22.0/guava-parent-22.0.pom" + }, + "com.google.guava:guava-parent:pom:22.0-rc1": { + "layout": "com/google/guava/guava-parent/22.0-rc1/guava-parent-22.0-rc1.pom", + "sha256": "8172aa643a0a0084f43824554450993a1a9e9ce5f48a2ae827d05f6980cdd5a3", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava-parent/22.0-rc1/guava-parent-22.0-rc1.pom" + }, + "com.google.guava:guava-parent:pom:22.0-rc1-android": { + "layout": "com/google/guava/guava-parent/22.0-rc1-android/guava-parent-22.0-rc1-android.pom", + "sha256": "e1f70ce4e4d871e403be19984e2ba7d6bdc406ab8ea9f5373a429f11efac4cfc", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava-parent/22.0-rc1-android/guava-parent-22.0-rc1-android.pom" + }, + "com.google.guava:guava:jar:22.0": { + "layout": "com/google/guava/guava/22.0/guava-22.0.jar", + "sha256": "1158e94c7de4da480873f0b4ab4a1da14c0d23d4b1902cc94a58a6f0f9ab579e", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava/22.0/guava-22.0.jar" + }, + "com.google.guava:guava:pom:18.0": { + "layout": "com/google/guava/guava/18.0/guava-18.0.pom", + "sha256": "e743d61d76f76b5dc060d6f7914fdd41c4418b3529062556920116a716719836", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava/18.0/guava-18.0.pom" + }, + "com.google.guava:guava:pom:19.0": { + "layout": "com/google/guava/guava/19.0/guava-19.0.pom", + "sha256": "addc064da7f1077fdf4dc90e5aaa887cd790850d1b74fe2e6e25d5f78551ac63", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava/19.0/guava-19.0.pom" + }, + "com.google.guava:guava:pom:19.0-rc1": { + "layout": "com/google/guava/guava/19.0-rc1/guava-19.0-rc1.pom", + "sha256": "a988866858763cef2b02f582395abbde420388f76d62ff4e80ad193bf4f525d5", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava/19.0-rc1/guava-19.0-rc1.pom" + }, + "com.google.guava:guava:pom:19.0-rc2": { + "layout": "com/google/guava/guava/19.0-rc2/guava-19.0-rc2.pom", + "sha256": "bbd8af7499125736785a16656b3be207b1af76414d5d8cea4e17714d4daf0b40", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava/19.0-rc2/guava-19.0-rc2.pom" + }, + "com.google.guava:guava:pom:19.0-rc3": { + "layout": "com/google/guava/guava/19.0-rc3/guava-19.0-rc3.pom", + "sha256": "ad47ca81e17be00ed01e8bcc40f1a949e308f89eff19399be777cdda5d305456", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava/19.0-rc3/guava-19.0-rc3.pom" + }, + "com.google.guava:guava:pom:20.0": { + "layout": "com/google/guava/guava/20.0/guava-20.0.pom", + "sha256": "363cc83767b760d7a564d5301e09467e6d48fc1c1c1664b1e18c50815ce19076", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava/20.0/guava-20.0.pom" + }, + "com.google.guava:guava:pom:20.0-rc1": { + "layout": "com/google/guava/guava/20.0-rc1/guava-20.0-rc1.pom", + "sha256": "b6f856478630d0d640f1d56093c6c60f55ec5eff513e3cb4e9412e6f5dd6fcbc", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava/20.0-rc1/guava-20.0-rc1.pom" + }, + "com.google.guava:guava:pom:21.0": { + "layout": "com/google/guava/guava/21.0/guava-21.0.pom", + "sha256": "c0b016a85c784e01ca51e9d64bf8fe80ce402b206fea897b03258d5e6d0ff80d", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava/21.0/guava-21.0.pom" + }, + "com.google.guava:guava:pom:21.0-rc1": { + "layout": "com/google/guava/guava/21.0-rc1/guava-21.0-rc1.pom", + "sha256": "86cef61a830225c50749d0b3b1628d8f1ed75bd4fbaa9581b58a09b1ef43edde", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava/21.0-rc1/guava-21.0-rc1.pom" + }, + "com.google.guava:guava:pom:21.0-rc2": { + "layout": "com/google/guava/guava/21.0-rc2/guava-21.0-rc2.pom", + "sha256": "1683943853033d608ed5d3ce358d2d527686ccc4c4be156784dc3b9faa12a5c3", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava/21.0-rc2/guava-21.0-rc2.pom" + }, + "com.google.guava:guava:pom:22.0": { + "layout": "com/google/guava/guava/22.0/guava-22.0.pom", + "sha256": "bfadb3b40f65dd6de1666d6b29f8bb54031396c76eeef4146cf9f28255f8bf33", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava/22.0/guava-22.0.pom" + }, + "com.google.guava:guava:pom:22.0-rc1": { + "layout": "com/google/guava/guava/22.0-rc1/guava-22.0-rc1.pom", + "sha256": "ffda4f7a80ff1aa2b29551b66223ac4a31365164729c38c72091c4dc4761653b", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava/22.0-rc1/guava-22.0-rc1.pom" + }, + "com.google.guava:guava:pom:22.0-rc1-android": { + "layout": "com/google/guava/guava/22.0-rc1-android/guava-22.0-rc1-android.pom", + "sha256": "51b10855eeb66693659652239c4d98cfa99f0ddfdd784174c25a94aa76b3b3da", + "url": "https://repo.maven.apache.org/maven2/com/google/guava/guava/22.0-rc1-android/guava-22.0-rc1-android.pom" + }, + "com.google.j2objc:j2objc-annotations:jar:1.1": { + "layout": "com/google/j2objc/j2objc-annotations/1.1/j2objc-annotations-1.1.jar", + "sha256": "2994a7eb78f2710bd3d3bfb639b2c94e219cedac0d4d084d516e78c16dddecf6", + "url": "https://repo.maven.apache.org/maven2/com/google/j2objc/j2objc-annotations/1.1/j2objc-annotations-1.1.jar" + }, + "com.google.j2objc:j2objc-annotations:pom:1.1": { + "layout": "com/google/j2objc/j2objc-annotations/1.1/j2objc-annotations-1.1.pom", + "sha256": "f0c98c571e93a7cb4dd18df0fa308f0963e7a0620ac2d4244e61e709d03ad6be", + "url": "https://repo.maven.apache.org/maven2/com/google/j2objc/j2objc-annotations/1.1/j2objc-annotations-1.1.pom" + }, + "com.googlecode.javaewah:JavaEWAH:jar:0.7.9": { + "layout": "com/googlecode/javaewah/JavaEWAH/0.7.9/JavaEWAH-0.7.9.jar", + "sha256": "fc499deb9153610f735f75817f1c177978d27a95a18e03d7d3849cfcb35abfc4", + "url": "https://repo.maven.apache.org/maven2/com/googlecode/javaewah/JavaEWAH/0.7.9/JavaEWAH-0.7.9.jar" + }, + "com.googlecode.javaewah:JavaEWAH:pom:0.7.9": { + "layout": "com/googlecode/javaewah/JavaEWAH/0.7.9/JavaEWAH-0.7.9.pom", + "sha256": "ab05da5b081f27f64f00b9695962bef0865c3968f8d3e6c32824d319cceea0b5", + "url": "https://repo.maven.apache.org/maven2/com/googlecode/javaewah/JavaEWAH/0.7.9/JavaEWAH-0.7.9.pom" + }, + "com.jcraft:jsch:jar:0.1.53": { + "layout": "com/jcraft/jsch/0.1.53/jsch-0.1.53.jar", + "sha256": "f00d5cb29d70a98ef6bf2000edc89b415ae6f59d25e33caf5578b20d0d400932", + "url": "https://repo.maven.apache.org/maven2/com/jcraft/jsch/0.1.53/jsch-0.1.53.jar" + }, + "com.jcraft:jsch:pom:0.1.53": { + "layout": "com/jcraft/jsch/0.1.53/jsch-0.1.53.pom", + "sha256": "540369663e20b9b7933630bed08025c66e7a395845ea5be61ac1295432a75d49", + "url": "https://repo.maven.apache.org/maven2/com/jcraft/jsch/0.1.53/jsch-0.1.53.pom" + }, + "com.sun.activation:all:pom:1.2.0": { + "layout": "com/sun/activation/all/1.2.0/all-1.2.0.pom", + "sha256": "1d8518e3ac7532a104e4f7be77def37c982e530723c6bdb3d67708cce2b0c2c4", + "url": "https://repo.maven.apache.org/maven2/com/sun/activation/all/1.2.0/all-1.2.0.pom" + }, + "com.thoughtworks.qdox:qdox:jar:2.0-M7": { + "layout": "com/thoughtworks/qdox/qdox/2.0-M7/qdox-2.0-M7.jar", + "sha256": "d5d596e64c49e91c4463e45673b8fb7f379d6ae01221ac0b555ad2b53cd5411c", + "url": "https://repo.maven.apache.org/maven2/com/thoughtworks/qdox/qdox/2.0-M7/qdox-2.0-M7.jar" + }, + "com.thoughtworks.qdox:qdox:jar:2.0-M9": { + "layout": "com/thoughtworks/qdox/qdox/2.0-M9/qdox-2.0-M9.jar", + "sha256": "ee2f7fa60b6ef3151f1bb0a242e0bacb832ff29f3ee8fd3da61d26d8608bc1bc", + "url": "https://repo.maven.apache.org/maven2/com/thoughtworks/qdox/qdox/2.0-M9/qdox-2.0-M9.jar" + }, + "com.thoughtworks.qdox:qdox:pom:2.0-M7": { + "layout": "com/thoughtworks/qdox/qdox/2.0-M7/qdox-2.0-M7.pom", + "sha256": "9cadf9ea7765f8b581e6aeac6a30606b3c486dc242c77b41888abe0fb7cbbeca", + "url": "https://repo.maven.apache.org/maven2/com/thoughtworks/qdox/qdox/2.0-M7/qdox-2.0-M7.pom" + }, + "com.thoughtworks.qdox:qdox:pom:2.0-M9": { + "layout": "com/thoughtworks/qdox/qdox/2.0-M9/qdox-2.0-M9.pom", + "sha256": "8fb39a1e86ee1dca54680df79512ff2939eba17660f62e142b2b9df699b3b6a3", + "url": "https://repo.maven.apache.org/maven2/com/thoughtworks/qdox/qdox/2.0-M9/qdox-2.0-M9.pom" + }, + "commons-beanutils:commons-beanutils:jar:1.7.0": { + "layout": "commons-beanutils/commons-beanutils/1.7.0/commons-beanutils-1.7.0.jar", + "sha256": "24bcaa20ccbdc7c856ce0c0aea144566943403e2e9f27bd9779cda1d76823ef4", + "url": "https://repo.maven.apache.org/maven2/commons-beanutils/commons-beanutils/1.7.0/commons-beanutils-1.7.0.jar" + }, + "commons-beanutils:commons-beanutils:pom:1.6": { + "layout": "commons-beanutils/commons-beanutils/1.6/commons-beanutils-1.6.pom", + "sha256": "f1309fdb6c64284485bc39188e55d0a30ddbb9311e4d4b6ee08bb038fa4b556d", + "url": "https://repo.maven.apache.org/maven2/commons-beanutils/commons-beanutils/1.6/commons-beanutils-1.6.pom" + }, + "commons-beanutils:commons-beanutils:pom:1.7.0": { + "layout": "commons-beanutils/commons-beanutils/1.7.0/commons-beanutils-1.7.0.pom", + "sha256": "b6aca6465a28b027686f025d57702f90ad0d128e14d1cfceca0bd871f0084ad9", + "url": "https://repo.maven.apache.org/maven2/commons-beanutils/commons-beanutils/1.7.0/commons-beanutils-1.7.0.pom" + }, + "commons-chain:commons-chain:jar:1.1": { + "layout": "commons-chain/commons-chain/1.1/commons-chain-1.1.jar", + "sha256": "e408f72da5ed4c5db6ae19e8c3b7ee36259c36c05f7a77f15509a014bfe7bcaa", + "url": "https://repo.maven.apache.org/maven2/commons-chain/commons-chain/1.1/commons-chain-1.1.jar" + }, + "commons-chain:commons-chain:pom:1.1": { + "layout": "commons-chain/commons-chain/1.1/commons-chain-1.1.pom", + "sha256": "cf0c15c4e843507d95be11114039794494d6fc6259118581a90e03b2db5f5acb", + "url": "https://repo.maven.apache.org/maven2/commons-chain/commons-chain/1.1/commons-chain-1.1.pom" + }, + "commons-cli:commons-cli:jar:1.0": { + "layout": "commons-cli/commons-cli/1.0/commons-cli-1.0.jar", + "sha256": "43f24850b7b7b7d79c5fa652418518fbdf427e602b1edabe6f11b85fb93eb013", + "url": "https://repo.maven.apache.org/maven2/commons-cli/commons-cli/1.0/commons-cli-1.0.jar" + }, + "commons-cli:commons-cli:pom:1.0": { + "layout": "commons-cli/commons-cli/1.0/commons-cli-1.0.pom", + "sha256": "97ee40f4e80ca5ecc20162f4e97ee1adfeac1b45ba88b923d5a521e487c9c407", + "url": "https://repo.maven.apache.org/maven2/commons-cli/commons-cli/1.0/commons-cli-1.0.pom" + }, + "commons-codec:commons-codec:jar:1.10": { + "layout": "commons-codec/commons-codec/1.10/commons-codec-1.10.jar", + "sha256": "4241dfa94e711d435f29a4604a3e2de5c4aa3c165e23bd066be6fc1fc4309569", + "url": "https://repo.maven.apache.org/maven2/commons-codec/commons-codec/1.10/commons-codec-1.10.jar" + }, + "commons-codec:commons-codec:jar:1.11": { + "layout": "commons-codec/commons-codec/1.11/commons-codec-1.11.jar", + "sha256": "e599d5318e97aa48f42136a2927e6dfa4e8881dff0e6c8e3109ddbbff51d7b7d", + "url": "https://repo.maven.apache.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.jar" + }, + "commons-codec:commons-codec:jar:1.3": { + "layout": "commons-codec/commons-codec/1.3/commons-codec-1.3.jar", + "sha256": "1bafd2ece2e88db4cdf835a7f8f0de65fab5b1147977a5dcc59b7c1b8c6f5080", + "url": "https://repo.maven.apache.org/maven2/commons-codec/commons-codec/1.3/commons-codec-1.3.jar" + }, + "commons-codec:commons-codec:jar:1.9": { + "layout": "commons-codec/commons-codec/1.9/commons-codec-1.9.jar", + "sha256": "ad19d2601c3abf0b946b5c3a4113e226a8c1e3305e395b90013b78dd94a723ce", + "url": "https://repo.maven.apache.org/maven2/commons-codec/commons-codec/1.9/commons-codec-1.9.jar" + }, + "commons-codec:commons-codec:pom:1.10": { + "layout": "commons-codec/commons-codec/1.10/commons-codec-1.10.pom", + "sha256": "bdb8db7012d112a6e3ea8fdb7c510b300d99eff0819d27dddba9c43397ea4cfb", + "url": "https://repo.maven.apache.org/maven2/commons-codec/commons-codec/1.10/commons-codec-1.10.pom" + }, + "commons-codec:commons-codec:pom:1.11": { + "layout": "commons-codec/commons-codec/1.11/commons-codec-1.11.pom", + "sha256": "c1e7140d1dea8fdf3528bc1e3c5444ac0b541297311f45f9806c213ec3ee9a10", + "url": "https://repo.maven.apache.org/maven2/commons-codec/commons-codec/1.11/commons-codec-1.11.pom" + }, + "commons-codec:commons-codec:pom:1.3": { + "layout": "commons-codec/commons-codec/1.3/commons-codec-1.3.pom", + "sha256": "d157e34244e884dd91fa01921ca84372e11f7bb08fedb5d456c0670c28054636", + "url": "https://repo.maven.apache.org/maven2/commons-codec/commons-codec/1.3/commons-codec-1.3.pom" + }, + "commons-codec:commons-codec:pom:1.9": { + "layout": "commons-codec/commons-codec/1.9/commons-codec-1.9.pom", + "sha256": "e5efcf039cd909688c201dc5479b144fd6f01f0e40252b7fc5e7d2e1b5c07990", + "url": "https://repo.maven.apache.org/maven2/commons-codec/commons-codec/1.9/commons-codec-1.9.pom" + }, + "commons-collections:commons-collections:jar:2.0": { + "layout": "commons-collections/commons-collections/2.0/commons-collections-2.0.jar", + "sha256": "b5d8a9f671a4e6698d553d0ec98d33ba70358e9b2180c845c88fc7176ddfbb1e", + "url": "https://repo.maven.apache.org/maven2/commons-collections/commons-collections/2.0/commons-collections-2.0.jar" + }, + "commons-collections:commons-collections:jar:3.2.1": { + "layout": "commons-collections/commons-collections/3.2.1/commons-collections-3.2.1.jar", + "sha256": "87363a4c94eaabeefd8b930cb059f66b64c9f7d632862f23de3012da7660047b", + "url": "https://repo.maven.apache.org/maven2/commons-collections/commons-collections/3.2.1/commons-collections-3.2.1.jar" + }, + "commons-collections:commons-collections:pom:2.0": { + "layout": "commons-collections/commons-collections/2.0/commons-collections-2.0.pom", + "sha256": "dafa5cd143542dca7ec092ef4c670a3fd285dfc02b00e790cbe25687bef513cc", + "url": "https://repo.maven.apache.org/maven2/commons-collections/commons-collections/2.0/commons-collections-2.0.pom" + }, + "commons-collections:commons-collections:pom:2.1": { + "layout": "commons-collections/commons-collections/2.1/commons-collections-2.1.pom", + "sha256": "f8a93d50bfaf6fc0720eee8fde6e8fde20da33238ba296e9b1b7cba50ca6d772", + "url": "https://repo.maven.apache.org/maven2/commons-collections/commons-collections/2.1/commons-collections-2.1.pom" + }, + "commons-collections:commons-collections:pom:3.1": { + "layout": "commons-collections/commons-collections/3.1/commons-collections-3.1.pom", + "sha256": "59c9e5fc75e5790e56976c166a89f2cbdad99f76c49b92f74a1749689af726a2", + "url": "https://repo.maven.apache.org/maven2/commons-collections/commons-collections/3.1/commons-collections-3.1.pom" + }, + "commons-collections:commons-collections:pom:3.2": { + "layout": "commons-collections/commons-collections/3.2/commons-collections-3.2.pom", + "sha256": "6da6d5e61be60d77a7eea6c7d0b8ac3cc35ca73cef3cbff97d5982006553786d", + "url": "https://repo.maven.apache.org/maven2/commons-collections/commons-collections/3.2/commons-collections-3.2.pom" + }, + "commons-collections:commons-collections:pom:3.2.1": { + "layout": "commons-collections/commons-collections/3.2.1/commons-collections-3.2.1.pom", + "sha256": "1f9626cbaa584ed5d86021866e4e367e26fe5efc248382652be68beeb43e7416", + "url": "https://repo.maven.apache.org/maven2/commons-collections/commons-collections/3.2.1/commons-collections-3.2.1.pom" + }, + "commons-digester:commons-digester:jar:1.6": { + "layout": "commons-digester/commons-digester/1.6/commons-digester-1.6.jar", + "sha256": "4040c63d0dba10ce048ec02bcaf67092f58ac5662ae1ba7b050f77cf86265249", + "url": "https://repo.maven.apache.org/maven2/commons-digester/commons-digester/1.6/commons-digester-1.6.jar" + }, + "commons-digester:commons-digester:jar:1.8": { + "layout": "commons-digester/commons-digester/1.8/commons-digester-1.8.jar", + "sha256": "05662373044f3dff112567b7bb5dfa1174e91e074c0c727b4412788013f49d56", + "url": "https://repo.maven.apache.org/maven2/commons-digester/commons-digester/1.8/commons-digester-1.8.jar" + }, + "commons-digester:commons-digester:pom:1.6": { + "layout": "commons-digester/commons-digester/1.6/commons-digester-1.6.pom", + "sha256": "9ef0db04ffe98d03eb9a921337364be7d123d58d66dcaff3eac763f0b0c63d48", + "url": "https://repo.maven.apache.org/maven2/commons-digester/commons-digester/1.6/commons-digester-1.6.pom" + }, + "commons-digester:commons-digester:pom:1.8": { + "layout": "commons-digester/commons-digester/1.8/commons-digester-1.8.pom", + "sha256": "c10144f223d7ab697ccea7da0e753b75603ea7fbc4e35570068e6c477068e9b5", + "url": "https://repo.maven.apache.org/maven2/commons-digester/commons-digester/1.8/commons-digester-1.8.pom" + }, + "commons-io:commons-io:jar:2.4": { + "layout": "commons-io/commons-io/2.4/commons-io-2.4.jar", + "sha256": "cc6a41dc3eaacc9e440a6bd0d2890b20d36b4ee408fe2d67122f328bb6e01581", + "url": "https://repo.maven.apache.org/maven2/commons-io/commons-io/2.4/commons-io-2.4.jar" + }, + "commons-io:commons-io:jar:2.5": { + "layout": "commons-io/commons-io/2.5/commons-io-2.5.jar", + "sha256": "a10418348d234968600ccb1d988efcbbd08716e1d96936ccc1880e7d22513474", + "url": "https://repo.maven.apache.org/maven2/commons-io/commons-io/2.5/commons-io-2.5.jar" + }, + "commons-io:commons-io:jar:2.6": { + "layout": "commons-io/commons-io/2.6/commons-io-2.6.jar", + "sha256": "f877d304660ac2a142f3865badfc971dec7ed73c747c7f8d5d2f5139ca736513", + "url": "https://repo.maven.apache.org/maven2/commons-io/commons-io/2.6/commons-io-2.6.jar" + }, + "commons-io:commons-io:pom:2.4": { + "layout": "commons-io/commons-io/2.4/commons-io-2.4.pom", + "sha256": "b2b5dd46cf998fa626eb6f8a1c114f6167c8d392694164e62533e5898e9b31f2", + "url": "https://repo.maven.apache.org/maven2/commons-io/commons-io/2.4/commons-io-2.4.pom" + }, + "commons-io:commons-io:pom:2.5": { + "layout": "commons-io/commons-io/2.5/commons-io-2.5.pom", + "sha256": "28ebb2998bc7d7acb25078526971640892000f3413586ff42d611f1043bfec30", + "url": "https://repo.maven.apache.org/maven2/commons-io/commons-io/2.5/commons-io-2.5.pom" + }, + "commons-io:commons-io:pom:2.6": { + "layout": "commons-io/commons-io/2.6/commons-io-2.6.pom", + "sha256": "0c23863893a2291f5a7afdbd8d15923b3948afd87e563fa341cdcf6eae338a60", + "url": "https://repo.maven.apache.org/maven2/commons-io/commons-io/2.6/commons-io-2.6.pom" + }, + "commons-lang:commons-lang:jar:2.1": { + "layout": "commons-lang/commons-lang/2.1/commons-lang-2.1.jar", + "sha256": "2ded7343dc8e57decd5e6302337139be020fdd885a2935925e8d575975e480b9", + "url": "https://repo.maven.apache.org/maven2/commons-lang/commons-lang/2.1/commons-lang-2.1.jar" + }, + "commons-lang:commons-lang:jar:2.4": { + "layout": "commons-lang/commons-lang/2.4/commons-lang-2.4.jar", + "sha256": "2c73b940c91250bc98346926270f13a6a10bb6e29d2c9316a70d134e382c873e", + "url": "https://repo.maven.apache.org/maven2/commons-lang/commons-lang/2.4/commons-lang-2.4.jar" + }, + "commons-lang:commons-lang:pom:2.1": { + "layout": "commons-lang/commons-lang/2.1/commons-lang-2.1.pom", + "sha256": "f1a709cd489f23498a0b6b3dfbfc0d21d4f15904791446dec7f8a58a7da5bd6a", + "url": "https://repo.maven.apache.org/maven2/commons-lang/commons-lang/2.1/commons-lang-2.1.pom" + }, + "commons-lang:commons-lang:pom:2.4": { + "layout": "commons-lang/commons-lang/2.4/commons-lang-2.4.pom", + "sha256": "90306278d39ed5a50dafa468adee4d272b635d54c4f7295e293e42bbdb8ad666", + "url": "https://repo.maven.apache.org/maven2/commons-lang/commons-lang/2.4/commons-lang-2.4.pom" + }, + "commons-logging:commons-logging-api:jar:1.0.4": { + "layout": "commons-logging/commons-logging-api/1.0.4/commons-logging-api-1.0.4.jar", + "sha256": "e168814e138fd3c00ba5e6dd4db0cf64896dfaa0f3a890d0d66652088fd01816", + "url": "https://repo.maven.apache.org/maven2/commons-logging/commons-logging-api/1.0.4/commons-logging-api-1.0.4.jar" + }, + "commons-logging:commons-logging-api:pom:1.0.4": { + "layout": "commons-logging/commons-logging-api/1.0.4/commons-logging-api-1.0.4.pom", + "sha256": "7f1c4c51afbd97e0d60efe3cdffc4bb73128748518a1f4263d211d2a4120f45f", + "url": "https://repo.maven.apache.org/maven2/commons-logging/commons-logging-api/1.0.4/commons-logging-api-1.0.4.pom" + }, + "commons-logging:commons-logging:jar:1.0.4": { + "layout": "commons-logging/commons-logging/1.0.4/commons-logging-1.0.4.jar", + "sha256": "e94af49749384c11f5aa50e8d0f5fe679be771295b52030338d32843c980351e", + "url": "https://repo.maven.apache.org/maven2/commons-logging/commons-logging/1.0.4/commons-logging-1.0.4.jar" + }, + "commons-logging:commons-logging:jar:1.2": { + "layout": "commons-logging/commons-logging/1.2/commons-logging-1.2.jar", + "sha256": "daddea1ea0be0f56978ab3006b8ac92834afeefbd9b7e4e6316fca57df0fa636", + "url": "https://repo.maven.apache.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.jar" + }, + "commons-logging:commons-logging:pom:1.0": { + "layout": "commons-logging/commons-logging/1.0/commons-logging-1.0.pom", + "sha256": "52b3caa59ca5e8f6279421ecb517a0b24571d666ea86bb145462076760026a6f", + "url": "https://repo.maven.apache.org/maven2/commons-logging/commons-logging/1.0/commons-logging-1.0.pom" + }, + "commons-logging:commons-logging:pom:1.0.3": { + "layout": "commons-logging/commons-logging/1.0.3/commons-logging-1.0.3.pom", + "sha256": "8c23c6e92f1df7f58b455cd2caa009dcc87a2fe64976e6ce461522e635aea41e", + "url": "https://repo.maven.apache.org/maven2/commons-logging/commons-logging/1.0.3/commons-logging-1.0.3.pom" + }, + "commons-logging:commons-logging:pom:1.0.4": { + "layout": "commons-logging/commons-logging/1.0.4/commons-logging-1.0.4.pom", + "sha256": "65d310509352b5425118225ee600a01f83ba72142d035014b5d164bc04b2d284", + "url": "https://repo.maven.apache.org/maven2/commons-logging/commons-logging/1.0.4/commons-logging-1.0.4.pom" + }, + "commons-logging:commons-logging:pom:1.1": { + "layout": "commons-logging/commons-logging/1.1/commons-logging-1.1.pom", + "sha256": "1f68425fce1007c3343343a27c27057f1427970682cb6d33e493c111721f7cb6", + "url": "https://repo.maven.apache.org/maven2/commons-logging/commons-logging/1.1/commons-logging-1.1.pom" + }, + "commons-logging:commons-logging:pom:1.1.1": { + "layout": "commons-logging/commons-logging/1.1.1/commons-logging-1.1.1.pom", + "sha256": "d0f2e16d054e8bb97add9ca26525eb2346f692809fcd2a28787da8ceb3c35ee8", + "url": "https://repo.maven.apache.org/maven2/commons-logging/commons-logging/1.1.1/commons-logging-1.1.1.pom" + }, + "commons-logging:commons-logging:pom:1.2": { + "layout": "commons-logging/commons-logging/1.2/commons-logging-1.2.pom", + "sha256": "c91ab5aa570d86f6fd07cc158ec6bc2c50080402972ee9179fe24100739fbb20", + "url": "https://repo.maven.apache.org/maven2/commons-logging/commons-logging/1.2/commons-logging-1.2.pom" + }, + "commons-validator:commons-validator:jar:1.2.0": { + "layout": "commons-validator/commons-validator/1.2.0/commons-validator-1.2.0.jar", + "sha256": "ad7565ec5ce34d53083777ad93d1ff08cdb37142f579f435131b1ab7f3796cdb", + "url": "https://repo.maven.apache.org/maven2/commons-validator/commons-validator/1.2.0/commons-validator-1.2.0.jar" + }, + "commons-validator:commons-validator:jar:1.3.1": { + "layout": "commons-validator/commons-validator/1.3.1/commons-validator-1.3.1.jar", + "sha256": "d3680636c84e5cea6bfe43f338f8bab03d6a3e18cc663fc8b671684ef66c0c8d", + "url": "https://repo.maven.apache.org/maven2/commons-validator/commons-validator/1.3.1/commons-validator-1.3.1.jar" + }, + "commons-validator:commons-validator:pom:1.2.0": { + "layout": "commons-validator/commons-validator/1.2.0/commons-validator-1.2.0.pom", + "sha256": "a6159f9c0a2fdbe66c1117ac73d0e513f7d0a9eac78fb013ee5b9d0d25ac24f8", + "url": "https://repo.maven.apache.org/maven2/commons-validator/commons-validator/1.2.0/commons-validator-1.2.0.pom" + }, + "commons-validator:commons-validator:pom:1.3.1": { + "layout": "commons-validator/commons-validator/1.3.1/commons-validator-1.3.1.pom", + "sha256": "7ea241ea8821d6236125d8d6388e51d9d8e9a558cee8444f016dfdadb6044ccc", + "url": "https://repo.maven.apache.org/maven2/commons-validator/commons-validator/1.3.1/commons-validator-1.3.1.pom" + }, + "dom4j:dom4j:pom:1.1": { + "layout": "dom4j/dom4j/1.1/dom4j-1.1.pom", + "sha256": "03c18c93b1df85cbce3a21a17d9b27e399e27478c1421071f5348c58bd0ab61f", + "url": "https://repo.maven.apache.org/maven2/dom4j/dom4j/1.1/dom4j-1.1.pom" + }, + "edu.ucla.cs.compilers:jtb:jar:1.3.2": { + "layout": "edu/ucla/cs/compilers/jtb/1.3.2/jtb-1.3.2.jar", + "sha256": "b99a43b99f2fce94115cb397e7f5fb55aa704532b1b1de9f2292573b5a1f4f0b", + "url": "https://repo.maven.apache.org/maven2/edu/ucla/cs/compilers/jtb/1.3.2/jtb-1.3.2.jar" + }, + "edu.ucla.cs.compilers:jtb:pom:1.3.2": { + "layout": "edu/ucla/cs/compilers/jtb/1.3.2/jtb-1.3.2.pom", + "sha256": "2d3535ff60f0f957404f0becb8f7451465699dccd69b22b55b70130a937670d3", + "url": "https://repo.maven.apache.org/maven2/edu/ucla/cs/compilers/jtb/1.3.2/jtb-1.3.2.pom" + }, + "fr.lirmm.graphik:dlgp2-parser:jar:2.1.1": { + "layout": "fr/lirmm/graphik/dlgp2-parser/2.1.1/dlgp2-parser-2.1.1.jar", + "sha256": "1853a87b7629c629fbed6ef3eccf65d7d605f5a889f54cb1cd92249a522c1982", + "url": "https://repo.maven.apache.org/maven2/fr/lirmm/graphik/dlgp2-parser/2.1.1/dlgp2-parser-2.1.1.jar" + }, + "fr.lirmm.graphik:dlgp2-parser:pom:2.1.1": { + "layout": "fr/lirmm/graphik/dlgp2-parser/2.1.1/dlgp2-parser-2.1.1.pom", + "sha256": "8167b8d41b87b4d57d4957865cd808bef6421597082099a2048797e07959b40b", + "url": "https://repo.maven.apache.org/maven2/fr/lirmm/graphik/dlgp2-parser/2.1.1/dlgp2-parser-2.1.1.pom" + }, + "fr.lirmm.graphik:graal-api:jar:1.3.1": { + "layout": "fr/lirmm/graphik/graal-api/1.3.1/graal-api-1.3.1.jar", + "sha256": "cf28dd3897ef35c453cdf95808c212ab111e2329ec9ccae048ef1a0c33cc30ae", + "url": "https://repo.maven.apache.org/maven2/fr/lirmm/graphik/graal-api/1.3.1/graal-api-1.3.1.jar" + }, + "fr.lirmm.graphik:graal-api:pom:1.3.1": { + "layout": "fr/lirmm/graphik/graal-api/1.3.1/graal-api-1.3.1.pom", + "sha256": "c5f02658bed3355261bcda2d12363c466706e4581647c84eb9146cf0a0c6a9ca", + "url": "https://repo.maven.apache.org/maven2/fr/lirmm/graphik/graal-api/1.3.1/graal-api-1.3.1.pom" + }, + "fr.lirmm.graphik:graal-core:jar:1.3.1": { + "layout": "fr/lirmm/graphik/graal-core/1.3.1/graal-core-1.3.1.jar", + "sha256": "5d6f4bc892d29a8bd10159faa054adf4c5f3893cb8c892e277a51853d923627b", + "url": "https://repo.maven.apache.org/maven2/fr/lirmm/graphik/graal-core/1.3.1/graal-core-1.3.1.jar" + }, + "fr.lirmm.graphik:graal-core:pom:1.3.1": { + "layout": "fr/lirmm/graphik/graal-core/1.3.1/graal-core-1.3.1.pom", + "sha256": "393bc67be5f1bd70893d36d8fe20450f47ccf5f6a0e266008cfa77b4db0c015f", + "url": "https://repo.maven.apache.org/maven2/fr/lirmm/graphik/graal-core/1.3.1/graal-core-1.3.1.pom" + }, + "fr.lirmm.graphik:graal-io-dlgp:jar:1.3.1": { + "layout": "fr/lirmm/graphik/graal-io-dlgp/1.3.1/graal-io-dlgp-1.3.1.jar", + "sha256": "b5da8822aaf47c5bebeaf084b7bf6f1cedbd2f376db53efd3acff3551f70629a", + "url": "https://repo.maven.apache.org/maven2/fr/lirmm/graphik/graal-io-dlgp/1.3.1/graal-io-dlgp-1.3.1.jar" + }, + "fr.lirmm.graphik:graal-io-dlgp:pom:1.3.1": { + "layout": "fr/lirmm/graphik/graal-io-dlgp/1.3.1/graal-io-dlgp-1.3.1.pom", + "sha256": "47d220f9d7afc992bd6836c7a5df42bcb55c39269efedd051a19be04abe798f1", + "url": "https://repo.maven.apache.org/maven2/fr/lirmm/graphik/graal-io-dlgp/1.3.1/graal-io-dlgp-1.3.1.pom" + }, + "fr.lirmm.graphik:graal-io:pom:1.3.1": { + "layout": "fr/lirmm/graphik/graal-io/1.3.1/graal-io-1.3.1.pom", + "sha256": "5c179084e8b65d9dd7d4a393e66bdb4ebe3c580c3c5ec10fb06b85848e1569e4", + "url": "https://repo.maven.apache.org/maven2/fr/lirmm/graphik/graal-io/1.3.1/graal-io-1.3.1.pom" + }, + "fr.lirmm.graphik:graal-util:jar:1.3.1": { + "layout": "fr/lirmm/graphik/graal-util/1.3.1/graal-util-1.3.1.jar", + "sha256": "adfa0f6633b840f5e9550ab6f8fa878e6d5feedc588878f3066bffe252921f64", + "url": "https://repo.maven.apache.org/maven2/fr/lirmm/graphik/graal-util/1.3.1/graal-util-1.3.1.jar" + }, + "fr.lirmm.graphik:graal-util:pom:1.3.1": { + "layout": "fr/lirmm/graphik/graal-util/1.3.1/graal-util-1.3.1.pom", + "sha256": "5fd4832c060d35b8361899f9e9160ad2571a32b0678f5ea4be23acfb070b4152", + "url": "https://repo.maven.apache.org/maven2/fr/lirmm/graphik/graal-util/1.3.1/graal-util-1.3.1.pom" + }, + "fr.lirmm.graphik:graal:pom:1.3.1": { + "layout": "fr/lirmm/graphik/graal/1.3.1/graal-1.3.1.pom", + "sha256": "39e88ee90d0ee9656d42f739eb9997c63b5540c2ad0426bb46e710af4c580b94", + "url": "https://repo.maven.apache.org/maven2/fr/lirmm/graphik/graal/1.3.1/graal-1.3.1.pom" + }, + "info.picocli:picocli:jar:4.0.4": { + "layout": "info/picocli/picocli/4.0.4/picocli-4.0.4.jar", + "sha256": "8e532fb4a2f118a87c77b4e12a3565550f2dd7ec34d865c837e2a23728a45a48", + "url": "https://repo.maven.apache.org/maven2/info/picocli/picocli/4.0.4/picocli-4.0.4.jar" + }, + "info.picocli:picocli:pom:4.0.4": { + "layout": "info/picocli/picocli/4.0.4/picocli-4.0.4.pom", + "sha256": "41888a6813384fa4b0f74f421aae955c309779e92d4b432048484ef7e336ff8e", + "url": "https://repo.maven.apache.org/maven2/info/picocli/picocli/4.0.4/picocli-4.0.4.pom" + }, + "io.github.zlika:reproducible-build-maven-plugin:jar:0.16": { + "layout": "io/github/zlika/reproducible-build-maven-plugin/0.16/reproducible-build-maven-plugin-0.16.jar", + "sha256": "c184b5f5681550eff826fb2454d6538fb1238b0b49db15cf9d2c994bae36d3a9", + "url": "https://repo.maven.apache.org/maven2/io/github/zlika/reproducible-build-maven-plugin/0.16/reproducible-build-maven-plugin-0.16.jar" + }, + "io.github.zlika:reproducible-build-maven-plugin:pom:0.16": { + "layout": "io/github/zlika/reproducible-build-maven-plugin/0.16/reproducible-build-maven-plugin-0.16.pom", + "sha256": "1522a0a1b7b3f297b4c2fa73df2f4fafc70d2b26d25b50f0f6b38f0d1dae21b2", + "url": "https://repo.maven.apache.org/maven2/io/github/zlika/reproducible-build-maven-plugin/0.16/reproducible-build-maven-plugin-0.16.pom" + }, + "javax.activation:javax.activation-api:jar:1.2.0": { + "layout": "javax/activation/javax.activation-api/1.2.0/javax.activation-api-1.2.0.jar", + "sha256": "43fdef0b5b6ceb31b0424b208b930c74ab58fac2ceeb7b3f6fd3aeb8b5ca4393", + "url": "https://repo.maven.apache.org/maven2/javax/activation/javax.activation-api/1.2.0/javax.activation-api-1.2.0.jar" + }, + "javax.activation:javax.activation-api:pom:1.2.0": { + "layout": "javax/activation/javax.activation-api/1.2.0/javax.activation-api-1.2.0.pom", + "sha256": "da2926f3c8be898643cc10acdec6de0b0351a57fb2735770fa0177b06ade71b9", + "url": "https://repo.maven.apache.org/maven2/javax/activation/javax.activation-api/1.2.0/javax.activation-api-1.2.0.pom" + }, + "javax.inject:javax.inject:jar:1": { + "layout": "javax/inject/javax.inject/1/javax.inject-1.jar", + "sha256": "91c77044a50c481636c32d916fd89c9118a72195390452c81065080f957de7ff", + "url": "https://repo.maven.apache.org/maven2/javax/inject/javax.inject/1/javax.inject-1.jar" + }, + "javax.inject:javax.inject:pom:1": { + "layout": "javax/inject/javax.inject/1/javax.inject-1.pom", + "sha256": "943e12b100627804638fa285805a0ab788a680266531e650921ebfe4621a8bfa", + "url": "https://repo.maven.apache.org/maven2/javax/inject/javax.inject/1/javax.inject-1.pom" + }, + "javax.servlet:servlet-api:pom:2.3": { + "layout": "javax/servlet/servlet-api/2.3/servlet-api-2.3.pom", + "sha256": "abb294a8f064018ea226a5ad2176eaa9dbf1cde029a47815fd4a4049d1374160", + "url": "https://repo.maven.apache.org/maven2/javax/servlet/servlet-api/2.3/servlet-api-2.3.pom" + }, + "javax.xml.bind:jaxb-api-parent:pom:2.3.1": { + "layout": "javax/xml/bind/jaxb-api-parent/2.3.1/jaxb-api-parent-2.3.1.pom", + "sha256": "cd1beaa4560dc4dfdb826b9d809e464db22526dfb54264bae78a6ff7efb08e1f", + "url": "https://repo.maven.apache.org/maven2/javax/xml/bind/jaxb-api-parent/2.3.1/jaxb-api-parent-2.3.1.pom" + }, + "javax.xml.bind:jaxb-api:jar:2.3.1": { + "layout": "javax/xml/bind/jaxb-api/2.3.1/jaxb-api-2.3.1.jar", + "sha256": "88b955a0df57880a26a74708bc34f74dcaf8ebf4e78843a28b50eae945732b06", + "url": "https://repo.maven.apache.org/maven2/javax/xml/bind/jaxb-api/2.3.1/jaxb-api-2.3.1.jar" + }, + "javax.xml.bind:jaxb-api:pom:2.3.1": { + "layout": "javax/xml/bind/jaxb-api/2.3.1/jaxb-api-2.3.1.pom", + "sha256": "12b20cf922773445c3445c2883cbf671fa982111e9bf9f875020f9313b3814b1", + "url": "https://repo.maven.apache.org/maven2/javax/xml/bind/jaxb-api/2.3.1/jaxb-api-2.3.1.pom" + }, + "junit:junit:jar:3.8.1": { + "layout": "junit/junit/3.8.1/junit-3.8.1.jar", + "sha256": "b58e459509e190bed737f3592bc1950485322846cf10e78ded1d065153012d70", + "url": "https://repo.maven.apache.org/maven2/junit/junit/3.8.1/junit-3.8.1.jar" + }, + "junit:junit:jar:4.13.1": { + "layout": "junit/junit/4.13.1/junit-4.13.1.jar", + "sha256": "c30719db974d6452793fe191b3638a5777005485bae145924044530ffa5f6122", + "url": "https://repo.maven.apache.org/maven2/junit/junit/4.13.1/junit-4.13.1.jar" + }, + "junit:junit:jar:4.13.2": { + "layout": "junit/junit/4.13.2/junit-4.13.2.jar", + "sha256": "8e495b634469d64fb8acfa3495a065cbacc8a0fff55ce1e31007be4c16dc57d3", + "url": "https://repo.maven.apache.org/maven2/junit/junit/4.13.2/junit-4.13.2.jar" + }, + "junit:junit:pom:3.8.1": { + "layout": "junit/junit/3.8.1/junit-3.8.1.pom", + "sha256": "e68f33343d832398f3c8aa78afcd808d56b7c1020de4d3ad8ce47909095ee904", + "url": "https://repo.maven.apache.org/maven2/junit/junit/3.8.1/junit-3.8.1.pom" + }, + "junit:junit:pom:3.8.2": { + "layout": "junit/junit/3.8.2/junit-3.8.2.pom", + "sha256": "aede67999f02ac851c2a2ae8cec58f9d801f826ba20994df23a1d9fbecc47f0f", + "url": "https://repo.maven.apache.org/maven2/junit/junit/3.8.2/junit-3.8.2.pom" + }, + "junit:junit:pom:4.10": { + "layout": "junit/junit/4.10/junit-4.10.pom", + "sha256": "22a1bf0baae8b6106b7ad8026ecf27c59e7e47ddb49d62975036beadb2c62eb5", + "url": "https://repo.maven.apache.org/maven2/junit/junit/4.10/junit-4.10.pom" + }, + "junit:junit:pom:4.12": { + "layout": "junit/junit/4.12/junit-4.12.pom", + "sha256": "90f163f78e3ffb6f1c7ad97de9e7eba4eea25807141b85d6d12be67ca25449c4", + "url": "https://repo.maven.apache.org/maven2/junit/junit/4.12/junit-4.12.pom" + }, + "junit:junit:pom:4.13.1": { + "layout": "junit/junit/4.13.1/junit-4.13.1.pom", + "sha256": "c68defdedaaaeae1432e12a5302bf2bfa05057d8b5acc65aaa3f3d9853ff40d6", + "url": "https://repo.maven.apache.org/maven2/junit/junit/4.13.1/junit-4.13.1.pom" + }, + "junit:junit:pom:4.13.2": { + "layout": "junit/junit/4.13.2/junit-4.13.2.pom", + "sha256": "569b6977ee4603c965c1c46c3058fa6e969291b0160eb6964dd092cd89eadd94", + "url": "https://repo.maven.apache.org/maven2/junit/junit/4.13.2/junit-4.13.2.pom" + }, + "log4j:log4j:jar:1.2.17": { + "layout": "log4j/log4j/1.2.17/log4j-1.2.17.jar", + "sha256": "1d31696445697720527091754369082a6651bd49781b6005deb94e56753406f9", + "url": "https://repo.maven.apache.org/maven2/log4j/log4j/1.2.17/log4j-1.2.17.jar" + }, + "log4j:log4j:pom:1.2.12": { + "layout": "log4j/log4j/1.2.12/log4j-1.2.12.pom", + "sha256": "cb54dedc5d8c4510148dfa792701cbac1a84c383a84f48f5a32e6d7e460bbb72", + "url": "https://repo.maven.apache.org/maven2/log4j/log4j/1.2.12/log4j-1.2.12.pom" + }, + "log4j:log4j:pom:1.2.17": { + "layout": "log4j/log4j/1.2.17/log4j-1.2.17.pom", + "sha256": "3b95a3d3cdd3aa4b91ab327ddb5a1bfe03d81e273794e36aa1440471d5d70e5e", + "url": "https://repo.maven.apache.org/maven2/log4j/log4j/1.2.17/log4j-1.2.17.pom" + }, + "logkit:logkit:pom:1.0.1": { + "layout": "logkit/logkit/1.0.1/logkit-1.0.1.pom", + "sha256": "3de328dfa1b563ba6dfc5829774cf2f8dab0dc9528ed2731c35251ab7fd6c4c6", + "url": "https://repo.maven.apache.org/maven2/logkit/logkit/1.0.1/logkit-1.0.1.pom" + }, + "net.bytebuddy:byte-buddy-agent:jar:1.9.10": { + "layout": "net/bytebuddy/byte-buddy-agent/1.9.10/byte-buddy-agent-1.9.10.jar", + "sha256": "8ed739d29132103250d307d2e8e3c95f07588ef0543ab11d2881d00768a5e182", + "url": "https://repo.maven.apache.org/maven2/net/bytebuddy/byte-buddy-agent/1.9.10/byte-buddy-agent-1.9.10.jar" + }, + "net.bytebuddy:byte-buddy-agent:pom:1.9.10": { + "layout": "net/bytebuddy/byte-buddy-agent/1.9.10/byte-buddy-agent-1.9.10.pom", + "sha256": "0ec07c293fdda816cf6054bb81df95e2b453e748bad92c6de150162348e64152", + "url": "https://repo.maven.apache.org/maven2/net/bytebuddy/byte-buddy-agent/1.9.10/byte-buddy-agent-1.9.10.pom" + }, + "net.bytebuddy:byte-buddy-parent:pom:1.9.10": { + "layout": "net/bytebuddy/byte-buddy-parent/1.9.10/byte-buddy-parent-1.9.10.pom", + "sha256": "29fb23ea1e3445d124c427d41a4bd5541c5a8789c06d3644795c25b480f0e2ea", + "url": "https://repo.maven.apache.org/maven2/net/bytebuddy/byte-buddy-parent/1.9.10/byte-buddy-parent-1.9.10.pom" + }, + "net.bytebuddy:byte-buddy:jar:1.9.10": { + "layout": "net/bytebuddy/byte-buddy/1.9.10/byte-buddy-1.9.10.jar", + "sha256": "2936debc4d7b6c534848d361412e2d0f8bd06f7f27a6f4e728a20e97648d2bf3", + "url": "https://repo.maven.apache.org/maven2/net/bytebuddy/byte-buddy/1.9.10/byte-buddy-1.9.10.jar" + }, + "net.bytebuddy:byte-buddy:pom:1.9.10": { + "layout": "net/bytebuddy/byte-buddy/1.9.10/byte-buddy-1.9.10.pom", + "sha256": "b3d5807907458353c10accad5cb696836114f3c2678a2955a1de46b62745be43", + "url": "https://repo.maven.apache.org/maven2/net/bytebuddy/byte-buddy/1.9.10/byte-buddy-1.9.10.pom" + }, + "net.java.dev.javacc:javacc:jar:5.0": { + "layout": "net/java/dev/javacc/javacc/5.0/javacc-5.0.jar", + "sha256": "71113161bc8cf6641515541c2818028b87c78ec2e8ffaa75317686ee08967b89", + "url": "https://repo.maven.apache.org/maven2/net/java/dev/javacc/javacc/5.0/javacc-5.0.jar" + }, + "net.java.dev.javacc:javacc:pom:5.0": { + "layout": "net/java/dev/javacc/javacc/5.0/javacc-5.0.pom", + "sha256": "941660d47822f9c0d80d40ea06d4981fcdc2b87cc625381b545efd7e6b449cef", + "url": "https://repo.maven.apache.org/maven2/net/java/dev/javacc/javacc/5.0/javacc-5.0.pom" + }, + "net.java:jvnet-parent:pom:1": { + "layout": "net/java/jvnet-parent/1/jvnet-parent-1.pom", + "sha256": "281440811268e65d9e266b3cc898297e214e04f09740d0386ceeb4a8923d63bf", + "url": "https://repo.maven.apache.org/maven2/net/java/jvnet-parent/1/jvnet-parent-1.pom" + }, + "net.java:jvnet-parent:pom:5": { + "layout": "net/java/jvnet-parent/5/jvnet-parent-5.pom", + "sha256": "1af699f8d9ddab67f9a0d202fbd7915eb0362a5a6dfd5ffc54cafa3465c9cb0a", + "url": "https://repo.maven.apache.org/maven2/net/java/jvnet-parent/5/jvnet-parent-5.pom" + }, + "net.sourceforge.owlapi:owlapi-api:jar:5.1.11": { + "layout": "net/sourceforge/owlapi/owlapi-api/5.1.11/owlapi-api-5.1.11.jar", + "sha256": "7b8b8e0c90c1d01c696722e737d5d63bc9a4a52f7a1d7baebb30c981d19f9d38", + "url": "https://repo.maven.apache.org/maven2/net/sourceforge/owlapi/owlapi-api/5.1.11/owlapi-api-5.1.11.jar" + }, + "net.sourceforge.owlapi:owlapi-api:pom:5.1.11": { + "layout": "net/sourceforge/owlapi/owlapi-api/5.1.11/owlapi-api-5.1.11.pom", + "sha256": "35ce474f066075ba93c4839a338b3ef408042e510e209c0f9e4dc839756ab8b8", + "url": "https://repo.maven.apache.org/maven2/net/sourceforge/owlapi/owlapi-api/5.1.11/owlapi-api-5.1.11.pom" + }, + "net.sourceforge.owlapi:owlapi-apibinding:jar:5.1.11": { + "layout": "net/sourceforge/owlapi/owlapi-apibinding/5.1.11/owlapi-apibinding-5.1.11.jar", + "sha256": "28c365aee37308c4c8f19449bec6e0358895787b9025a979f38c39eb89ce3a62", + "url": "https://repo.maven.apache.org/maven2/net/sourceforge/owlapi/owlapi-apibinding/5.1.11/owlapi-apibinding-5.1.11.jar" + }, + "net.sourceforge.owlapi:owlapi-apibinding:pom:5.1.11": { + "layout": "net/sourceforge/owlapi/owlapi-apibinding/5.1.11/owlapi-apibinding-5.1.11.pom", + "sha256": "dfb4e6131d9c2dacfaf3515eca9335e856945b9c0ff6e27436f1bebec7809bf1", + "url": "https://repo.maven.apache.org/maven2/net/sourceforge/owlapi/owlapi-apibinding/5.1.11/owlapi-apibinding-5.1.11.pom" + }, + "net.sourceforge.owlapi:owlapi-impl:jar:5.1.11": { + "layout": "net/sourceforge/owlapi/owlapi-impl/5.1.11/owlapi-impl-5.1.11.jar", + "sha256": "25c10e15db15830f0fad2c2d3a5101e94ecfef2ca8025c1648ae0e2fc14ed2f7", + "url": "https://repo.maven.apache.org/maven2/net/sourceforge/owlapi/owlapi-impl/5.1.11/owlapi-impl-5.1.11.jar" + }, + "net.sourceforge.owlapi:owlapi-impl:pom:5.1.11": { + "layout": "net/sourceforge/owlapi/owlapi-impl/5.1.11/owlapi-impl-5.1.11.pom", + "sha256": "4f9948f75e7cf053232a1878b861de5e298c5e1fdc605edb043fdfe1430d8101", + "url": "https://repo.maven.apache.org/maven2/net/sourceforge/owlapi/owlapi-impl/5.1.11/owlapi-impl-5.1.11.pom" + }, + "net.sourceforge.owlapi:owlapi-oboformat:jar:5.1.11": { + "layout": "net/sourceforge/owlapi/owlapi-oboformat/5.1.11/owlapi-oboformat-5.1.11.jar", + "sha256": "2ae79ae6be06f37d679a22937ca4576ef27ec0f75fe010e86401d0b8edb98773", + "url": "https://repo.maven.apache.org/maven2/net/sourceforge/owlapi/owlapi-oboformat/5.1.11/owlapi-oboformat-5.1.11.jar" + }, + "net.sourceforge.owlapi:owlapi-oboformat:pom:5.1.11": { + "layout": "net/sourceforge/owlapi/owlapi-oboformat/5.1.11/owlapi-oboformat-5.1.11.pom", + "sha256": "2d057ae4918f779aa8cc03b4281398a7eab62048b0fa10b9a5ee5bb1a6dff5e0", + "url": "https://repo.maven.apache.org/maven2/net/sourceforge/owlapi/owlapi-oboformat/5.1.11/owlapi-oboformat-5.1.11.pom" + }, + "net.sourceforge.owlapi:owlapi-parent:pom:5.1.11": { + "layout": "net/sourceforge/owlapi/owlapi-parent/5.1.11/owlapi-parent-5.1.11.pom", + "sha256": "322700bc6a8008d286788394084c3f1d81490363093dfb9dc6a1eb55c06ab7f5", + "url": "https://repo.maven.apache.org/maven2/net/sourceforge/owlapi/owlapi-parent/5.1.11/owlapi-parent-5.1.11.pom" + }, + "net.sourceforge.owlapi:owlapi-parsers:jar:5.1.11": { + "layout": "net/sourceforge/owlapi/owlapi-parsers/5.1.11/owlapi-parsers-5.1.11.jar", + "sha256": "00249d340a1e91ae93e7a3f8ed79549862fca36e706c686bd495e1a6e30ae474", + "url": "https://repo.maven.apache.org/maven2/net/sourceforge/owlapi/owlapi-parsers/5.1.11/owlapi-parsers-5.1.11.jar" + }, + "net.sourceforge.owlapi:owlapi-parsers:pom:5.1.11": { + "layout": "net/sourceforge/owlapi/owlapi-parsers/5.1.11/owlapi-parsers-5.1.11.pom", + "sha256": "62a9d8de8a674becde58c521eeacca8b9048a8ff3bc081186b763a6c7ba9c044", + "url": "https://repo.maven.apache.org/maven2/net/sourceforge/owlapi/owlapi-parsers/5.1.11/owlapi-parsers-5.1.11.pom" + }, + "net.sourceforge.owlapi:owlapi-rio:jar:5.1.11": { + "layout": "net/sourceforge/owlapi/owlapi-rio/5.1.11/owlapi-rio-5.1.11.jar", + "sha256": "551f97905a6df3435dca2b053735b66b90ab90b9dc0c82f7fde5380f51027ddc", + "url": "https://repo.maven.apache.org/maven2/net/sourceforge/owlapi/owlapi-rio/5.1.11/owlapi-rio-5.1.11.jar" + }, + "net.sourceforge.owlapi:owlapi-rio:pom:5.1.11": { + "layout": "net/sourceforge/owlapi/owlapi-rio/5.1.11/owlapi-rio-5.1.11.pom", + "sha256": "2c4b715a214d4639de081d0da95e5b33ee03c400138df24115b3bf97eb888193", + "url": "https://repo.maven.apache.org/maven2/net/sourceforge/owlapi/owlapi-rio/5.1.11/owlapi-rio-5.1.11.pom" + }, + "net.sourceforge.owlapi:owlapi-tools:jar:5.1.11": { + "layout": "net/sourceforge/owlapi/owlapi-tools/5.1.11/owlapi-tools-5.1.11.jar", + "sha256": "7c4829bbcdd1360197ba3ae7d6d2b9e57fda3986c00bcbff4dca40cf55d1c2eb", + "url": "https://repo.maven.apache.org/maven2/net/sourceforge/owlapi/owlapi-tools/5.1.11/owlapi-tools-5.1.11.jar" + }, + "net.sourceforge.owlapi:owlapi-tools:pom:5.1.11": { + "layout": "net/sourceforge/owlapi/owlapi-tools/5.1.11/owlapi-tools-5.1.11.pom", + "sha256": "91062b7db79b6505f7e67d41b022cdf6553318ea2dfb7353fd2e5e5acb996cb5", + "url": "https://repo.maven.apache.org/maven2/net/sourceforge/owlapi/owlapi-tools/5.1.11/owlapi-tools-5.1.11.pom" + }, + "org.apache.commons:commons-collections4:jar:4.2": { + "layout": "org/apache/commons/commons-collections4/4.2/commons-collections4-4.2.jar", + "sha256": "6a594721d51444fd97b3eaefc998a77f606dedb03def494f74755aead3c9df3e", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-collections4/4.2/commons-collections4-4.2.jar" + }, + "org.apache.commons:commons-collections4:pom:4.2": { + "layout": "org/apache/commons/commons-collections4/4.2/commons-collections4-4.2.pom", + "sha256": "52fd0e75a913b7058e47a8a7cfbfede9daec791310e7067e783419390f8c987b", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-collections4/4.2/commons-collections4-4.2.pom" + }, + "org.apache.commons:commons-compress:jar:1.21": { + "layout": "org/apache/commons/commons-compress/1.21/commons-compress-1.21.jar", + "sha256": "6aecfd5459728a595601cfa07258d131972ffc39b492eb48bdd596577a2f244a", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-compress/1.21/commons-compress-1.21.jar" + }, + "org.apache.commons:commons-compress:pom:1.21": { + "layout": "org/apache/commons/commons-compress/1.21/commons-compress-1.21.pom", + "sha256": "675bb023c9beedde3232949979b9742a5fea946280a55a1b462d4ca7801088cd", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-compress/1.21/commons-compress-1.21.pom" + }, + "org.apache.commons:commons-csv:jar:1.5": { + "layout": "org/apache/commons/commons-csv/1.5/commons-csv-1.5.jar", + "sha256": "f0acb4058efe4616ea631b5119f59ef21fe43843f4e399455521e984e8df99e5", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-csv/1.5/commons-csv-1.5.jar" + }, + "org.apache.commons:commons-csv:pom:1.5": { + "layout": "org/apache/commons/commons-csv/1.5/commons-csv-1.5.pom", + "sha256": "ea163076368d6ecac5d81d7a6ea89b1590efec1e9154cd6cd40af3a763a3c559", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-csv/1.5/commons-csv-1.5.pom" + }, + "org.apache.commons:commons-lang3:jar:3.4": { + "layout": "org/apache/commons/commons-lang3/3.4/commons-lang3-3.4.jar", + "sha256": "734c8356420cc8e30c795d64fd1fcd5d44ea9d90342a2cc3262c5158fbc6d98b", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-lang3/3.4/commons-lang3-3.4.jar" + }, + "org.apache.commons:commons-lang3:jar:3.9": { + "layout": "org/apache/commons/commons-lang3/3.9/commons-lang3-3.9.jar", + "sha256": "de2e1dcdcf3ef917a8ce858661a06726a9a944f28e33ad7f9e08bea44dc3c230", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-lang3/3.9/commons-lang3-3.9.jar" + }, + "org.apache.commons:commons-lang3:pom:3.4": { + "layout": "org/apache/commons/commons-lang3/3.4/commons-lang3-3.4.pom", + "sha256": "686e75b561a13c1031d43a7647a364e2ed3e456467050eac4527b94b06d73fd1", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-lang3/3.4/commons-lang3-3.4.pom" + }, + "org.apache.commons:commons-lang3:pom:3.5": { + "layout": "org/apache/commons/commons-lang3/3.5/commons-lang3-3.5.pom", + "sha256": "45e7fbb2c231db903a5d5aadafc636a173a4d54560f78a11ff498028ef9e345e", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-lang3/3.5/commons-lang3-3.5.pom" + }, + "org.apache.commons:commons-lang3:pom:3.9": { + "layout": "org/apache/commons/commons-lang3/3.9/commons-lang3-3.9.pom", + "sha256": "a4022429b98425b430181915721279a52f610f34648bdac487d4cacbbe8dfeb5", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-lang3/3.9/commons-lang3-3.9.pom" + }, + "org.apache.commons:commons-parent:pom:25": { + "layout": "org/apache/commons/commons-parent/25/commons-parent-25.pom", + "sha256": "467ae650442e876867379094e7518dfdd67d22c5352ebd39808c84259e9790ba", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-parent/25/commons-parent-25.pom" + }, + "org.apache.commons:commons-parent:pom:32": { + "layout": "org/apache/commons/commons-parent/32/commons-parent-32.pom", + "sha256": "e4d258af8b2ff4032148d415379def7870789a6003e80576f1504b10f26b4be8", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-parent/32/commons-parent-32.pom" + }, + "org.apache.commons:commons-parent:pom:34": { + "layout": "org/apache/commons/commons-parent/34/commons-parent-34.pom", + "sha256": "3a2e69d06d641d1f3b293126dc9e2e4ea6563bf8c36c87e0ab6fa4292d04b79c", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-parent/34/commons-parent-34.pom" + }, + "org.apache.commons:commons-parent:pom:35": { + "layout": "org/apache/commons/commons-parent/35/commons-parent-35.pom", + "sha256": "7098a1ab8336ecd4c9dc21cbbcac869f82c66f64b8ac4f7988d41b4fcb44e49a", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-parent/35/commons-parent-35.pom" + }, + "org.apache.commons:commons-parent:pom:37": { + "layout": "org/apache/commons/commons-parent/37/commons-parent-37.pom", + "sha256": "ee705a4dd68d8dcd9cc8d1249d5790861eb145ce7b0c6d6c0555ba94489d014b", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-parent/37/commons-parent-37.pom" + }, + "org.apache.commons:commons-parent:pom:39": { + "layout": "org/apache/commons/commons-parent/39/commons-parent-39.pom", + "sha256": "87cd27e1a02a5c3eb6d85059ce98696bb1b44c2b8b650f0567c86df60fa61da7", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-parent/39/commons-parent-39.pom" + }, + "org.apache.commons:commons-parent:pom:41": { + "layout": "org/apache/commons/commons-parent/41/commons-parent-41.pom", + "sha256": "b2877c8016f8b28924c8e90dd5ae40cd11f328d02a1de98d80de22574ab36ec7", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-parent/41/commons-parent-41.pom" + }, + "org.apache.commons:commons-parent:pom:42": { + "layout": "org/apache/commons/commons-parent/42/commons-parent-42.pom", + "sha256": "cd313494c670b483ec256972af1698b330e598f807002354eb765479f604b09c", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-parent/42/commons-parent-42.pom" + }, + "org.apache.commons:commons-parent:pom:47": { + "layout": "org/apache/commons/commons-parent/47/commons-parent-47.pom", + "sha256": "8a8ecb570553bf9f1ffae211a8d4ca9ee630c17afe59293368fba7bd9b42fcb7", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-parent/47/commons-parent-47.pom" + }, + "org.apache.commons:commons-parent:pom:48": { + "layout": "org/apache/commons/commons-parent/48/commons-parent-48.pom", + "sha256": "1e1f7de9370a7b7901f128f1dacd1422be74e3f47f9558b0f79e04c0637ca0b4", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-parent/48/commons-parent-48.pom" + }, + "org.apache.commons:commons-parent:pom:5": { + "layout": "org/apache/commons/commons-parent/5/commons-parent-5.pom", + "sha256": "8bd632c00bdf80a7de36c22b60f12452c147d8eca2f00d79d66699ebe7daa02a", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-parent/5/commons-parent-5.pom" + }, + "org.apache.commons:commons-parent:pom:52": { + "layout": "org/apache/commons/commons-parent/52/commons-parent-52.pom", + "sha256": "75dbe8f34e98e4c3ff42daae4a2f9eb4cbcd3b5f1047d54460ace906dbb4502e", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-parent/52/commons-parent-52.pom" + }, + "org.apache.commons:commons-parent:pom:9": { + "layout": "org/apache/commons/commons-parent/9/commons-parent-9.pom", + "sha256": "5331b7d3e0aed59728c80f1118e4dbf78565d4109e81d16602c9cadbdb23a128", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-parent/9/commons-parent-9.pom" + }, + "org.apache.commons:commons-rdf-api:jar:0.5.0": { + "layout": "org/apache/commons/commons-rdf-api/0.5.0/commons-rdf-api-0.5.0.jar", + "sha256": "360fd9197008502cbc3fd09afe2ad45d8c8c905e21f42e7cd5b2aa1c39742b5e", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-rdf-api/0.5.0/commons-rdf-api-0.5.0.jar" + }, + "org.apache.commons:commons-rdf-api:pom:0.5.0": { + "layout": "org/apache/commons/commons-rdf-api/0.5.0/commons-rdf-api-0.5.0.pom", + "sha256": "541fbabdd24e764c881a205d9883a6da0e32181b2e9ebc73850b35d9736921d3", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-rdf-api/0.5.0/commons-rdf-api-0.5.0.pom" + }, + "org.apache.commons:commons-rdf-parent:pom:0.5.0": { + "layout": "org/apache/commons/commons-rdf-parent/0.5.0/commons-rdf-parent-0.5.0.pom", + "sha256": "83257e77c8d8d3377e18833f0fc4fe6396ccfb88bee82c66c81f87e79abcb947", + "url": "https://repo.maven.apache.org/maven2/org/apache/commons/commons-rdf-parent/0.5.0/commons-rdf-parent-0.5.0.pom" + }, + "org.apache.httpcomponents:fluent-hc:jar:4.5.5": { + "layout": "org/apache/httpcomponents/fluent-hc/4.5.5/fluent-hc-4.5.5.jar", + "sha256": "497a7a52e3293bba775ab151f0e0e2bc387eaf16cdaa15fdd8622deb2cb85a77", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/fluent-hc/4.5.5/fluent-hc-4.5.5.jar" + }, + "org.apache.httpcomponents:fluent-hc:pom:4.5.5": { + "layout": "org/apache/httpcomponents/fluent-hc/4.5.5/fluent-hc-4.5.5.pom", + "sha256": "4aab17ff151bc71c55a772f8b1629fe4752f8ce543ae8f1cbb89e752117a3043", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/fluent-hc/4.5.5/fluent-hc-4.5.5.pom" + }, + "org.apache.httpcomponents:httpclient-cache:jar:4.5.2": { + "layout": "org/apache/httpcomponents/httpclient-cache/4.5.2/httpclient-cache-4.5.2.jar", + "sha256": "343cbee681f2b57ecdda0102a4b7ac247db1be1b36b8ff71c40ceea8cd56f239", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpclient-cache/4.5.2/httpclient-cache-4.5.2.jar" + }, + "org.apache.httpcomponents:httpclient-cache:pom:4.5.2": { + "layout": "org/apache/httpcomponents/httpclient-cache/4.5.2/httpclient-cache-4.5.2.pom", + "sha256": "8ed51e8da875c225f5417db283e61f0d2817959f8564a99a5c4e90113aef32d2", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpclient-cache/4.5.2/httpclient-cache-4.5.2.pom" + }, + "org.apache.httpcomponents:httpclient-cache:pom:4.5.5": { + "layout": "org/apache/httpcomponents/httpclient-cache/4.5.5/httpclient-cache-4.5.5.pom", + "sha256": "e5f0dc5beaffb908c9ba1b80f57dda4c90e91d6895d83217b181aeabe65dd753", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpclient-cache/4.5.5/httpclient-cache-4.5.5.pom" + }, + "org.apache.httpcomponents:httpclient-osgi:jar:4.5.5": { + "layout": "org/apache/httpcomponents/httpclient-osgi/4.5.5/httpclient-osgi-4.5.5.jar", + "sha256": "ba76819cc553111347f4a58ae82b83cf4f2b74aff46a2fa3b080d44ccbd93ab8", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpclient-osgi/4.5.5/httpclient-osgi-4.5.5.jar" + }, + "org.apache.httpcomponents:httpclient-osgi:pom:4.5.5": { + "layout": "org/apache/httpcomponents/httpclient-osgi/4.5.5/httpclient-osgi-4.5.5.pom", + "sha256": "1195525b151aa4c01c99c5d2380d08d9d3439ffd8627e3e917ae8f87b3346cf3", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpclient-osgi/4.5.5/httpclient-osgi-4.5.5.pom" + }, + "org.apache.httpcomponents:httpclient:jar:4.0.2": { + "layout": "org/apache/httpcomponents/httpclient/4.0.2/httpclient-4.0.2.jar", + "sha256": "377e963549427abe67e7c21b2c374cfa87c9218a9a947d71d7675a4cbdff20d0", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpclient/4.0.2/httpclient-4.0.2.jar" + }, + "org.apache.httpcomponents:httpclient:jar:4.5.2": { + "layout": "org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.jar", + "sha256": "0dffc621400d6c632f55787d996b8aeca36b30746a716e079a985f24d8074057", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.jar" + }, + "org.apache.httpcomponents:httpclient:pom:4.0.2": { + "layout": "org/apache/httpcomponents/httpclient/4.0.2/httpclient-4.0.2.pom", + "sha256": "1c0a5ab5ce9980cd49e4e37c8089d28611f852e2b33e9c3d0f8ae40f0eea75b8", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpclient/4.0.2/httpclient-4.0.2.pom" + }, + "org.apache.httpcomponents:httpclient:pom:4.5.2": { + "layout": "org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.pom", + "sha256": "488001ba21829a4b28b0efbed18dccb13689f58f0985453863257049f7ec19f0", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.pom" + }, + "org.apache.httpcomponents:httpclient:pom:4.5.5": { + "layout": "org/apache/httpcomponents/httpclient/4.5.5/httpclient-4.5.5.pom", + "sha256": "db3b0198e11f3aa5fa51310c915b818c134a8cbcb82fc81ddf95ba2313862626", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpclient/4.5.5/httpclient-4.5.5.pom" + }, + "org.apache.httpcomponents:httpcomponents-client:pom:4.0.2": { + "layout": "org/apache/httpcomponents/httpcomponents-client/4.0.2/httpcomponents-client-4.0.2.pom", + "sha256": "d6b6540bcc1a2fbd2a3733e657cd0c70eebd1fd0dac43031d5bfc975c2c755a7", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpcomponents-client/4.0.2/httpcomponents-client-4.0.2.pom" + }, + "org.apache.httpcomponents:httpcomponents-client:pom:4.5.2": { + "layout": "org/apache/httpcomponents/httpcomponents-client/4.5.2/httpcomponents-client-4.5.2.pom", + "sha256": "1de8cba6c1e5c46b28619e335a6fb7204c352dba3e8b1cefb4b59575e2beed01", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpcomponents-client/4.5.2/httpcomponents-client-4.5.2.pom" + }, + "org.apache.httpcomponents:httpcomponents-client:pom:4.5.5": { + "layout": "org/apache/httpcomponents/httpcomponents-client/4.5.5/httpcomponents-client-4.5.5.pom", + "sha256": "1445d012158f941731a6062c7eab740093ea2745b46b54de236b3f7787e99bf3", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpcomponents-client/4.5.5/httpcomponents-client-4.5.5.pom" + }, + "org.apache.httpcomponents:httpcomponents-core:pom:4.0.1": { + "layout": "org/apache/httpcomponents/httpcomponents-core/4.0.1/httpcomponents-core-4.0.1.pom", + "sha256": "70d3b2bb5d199da825681559ff68aa5bdee201206c50af177bf7ca4d94226752", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpcomponents-core/4.0.1/httpcomponents-core-4.0.1.pom" + }, + "org.apache.httpcomponents:httpcomponents-core:pom:4.4.4": { + "layout": "org/apache/httpcomponents/httpcomponents-core/4.4.4/httpcomponents-core-4.4.4.pom", + "sha256": "c811ccdf77e0ff5e31abc58b0354f9b651e730518596e74073e1495ec25f77b1", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpcomponents-core/4.4.4/httpcomponents-core-4.4.4.pom" + }, + "org.apache.httpcomponents:httpcomponents-core:pom:4.4.9": { + "layout": "org/apache/httpcomponents/httpcomponents-core/4.4.9/httpcomponents-core-4.4.9.pom", + "sha256": "32e66095a919456fc76a10c7865e70c9a14c62bbba847026420a055652366b18", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpcomponents-core/4.4.9/httpcomponents-core-4.4.9.pom" + }, + "org.apache.httpcomponents:httpcomponents-parent:pom:10": { + "layout": "org/apache/httpcomponents/httpcomponents-parent/10/httpcomponents-parent-10.pom", + "sha256": "caaf967d94afb21753f36082c6086206bd1f48825ff596932cceba72b65d39fa", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpcomponents-parent/10/httpcomponents-parent-10.pom" + }, + "org.apache.httpcomponents:httpcomponents-parent:pom:9": { + "layout": "org/apache/httpcomponents/httpcomponents-parent/9/httpcomponents-parent-9.pom", + "sha256": "2656c7e40bdbe6b6f958798f7d6918b50b544df0e23b52ce3731b9ccc20b5f8c", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpcomponents-parent/9/httpcomponents-parent-9.pom" + }, + "org.apache.httpcomponents:httpcore-nio:jar:4.4.9": { + "layout": "org/apache/httpcomponents/httpcore-nio/4.4.9/httpcore-nio-4.4.9.jar", + "sha256": "366c7def45fb27816bb2d1ec4017fb1a5855940669123177234c4091ee269fdb", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpcore-nio/4.4.9/httpcore-nio-4.4.9.jar" + }, + "org.apache.httpcomponents:httpcore-nio:pom:4.4.9": { + "layout": "org/apache/httpcomponents/httpcore-nio/4.4.9/httpcore-nio-4.4.9.pom", + "sha256": "7e7a9e4f45487752a68467a9e9a009e51054913871ed15ac7777359b8a8f7260", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpcore-nio/4.4.9/httpcore-nio-4.4.9.pom" + }, + "org.apache.httpcomponents:httpcore-osgi:jar:4.4.9": { + "layout": "org/apache/httpcomponents/httpcore-osgi/4.4.9/httpcore-osgi-4.4.9.jar", + "sha256": "38b93bf15d43efe65ad5ceffd672b9077dfab205c41314af09e6d911423351f2", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpcore-osgi/4.4.9/httpcore-osgi-4.4.9.jar" + }, + "org.apache.httpcomponents:httpcore-osgi:pom:4.4.9": { + "layout": "org/apache/httpcomponents/httpcore-osgi/4.4.9/httpcore-osgi-4.4.9.pom", + "sha256": "b4f50caa5a2472aa936577b104468151f01f9bbfb8aa2e5cd1858a6c67c8af1f", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpcore-osgi/4.4.9/httpcore-osgi-4.4.9.pom" + }, + "org.apache.httpcomponents:httpcore:jar:4.0.1": { + "layout": "org/apache/httpcomponents/httpcore/4.0.1/httpcore-4.0.1.jar", + "sha256": "3b6bf92affa85d4169a91547ce3c7093ed993b41ad2df80469fc768ad01e6b6b", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpcore/4.0.1/httpcore-4.0.1.jar" + }, + "org.apache.httpcomponents:httpcore:jar:4.4.4": { + "layout": "org/apache/httpcomponents/httpcore/4.4.4/httpcore-4.4.4.jar", + "sha256": "f7bc09dc8a7003822d109634ffd3845d579d12e725ae54673e323a7ce7f5e325", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpcore/4.4.4/httpcore-4.4.4.jar" + }, + "org.apache.httpcomponents:httpcore:pom:4.0.1": { + "layout": "org/apache/httpcomponents/httpcore/4.0.1/httpcore-4.0.1.pom", + "sha256": "d1f666e38907c9471c64a3cdf87e0b1b0469d503e51cc14661860f7f6bd70c6e", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpcore/4.0.1/httpcore-4.0.1.pom" + }, + "org.apache.httpcomponents:httpcore:pom:4.4.4": { + "layout": "org/apache/httpcomponents/httpcore/4.4.4/httpcore-4.4.4.pom", + "sha256": "3ef432497e39958060d418111630f9a553599d82c3143eb18fae564a4cb28a2b", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpcore/4.4.4/httpcore-4.4.4.pom" + }, + "org.apache.httpcomponents:httpcore:pom:4.4.9": { + "layout": "org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.pom", + "sha256": "6e94bd777beedeff9b5e770cf654b530325781034b2746c632b131ec74ad513c", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpcore/4.4.9/httpcore-4.4.9.pom" + }, + "org.apache.httpcomponents:httpmime:jar:4.5.2": { + "layout": "org/apache/httpcomponents/httpmime/4.5.2/httpmime-4.5.2.jar", + "sha256": "231a3f7e4962053db2be8461d5422e68fc458a3a7dd7d8ada803a348e21f8f07", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpmime/4.5.2/httpmime-4.5.2.jar" + }, + "org.apache.httpcomponents:httpmime:jar:4.5.5": { + "layout": "org/apache/httpcomponents/httpmime/4.5.5/httpmime-4.5.5.jar", + "sha256": "e46206931b7426102e658f086f74ee582761264a8f9977fba02c1e200c51a9c5", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpmime/4.5.5/httpmime-4.5.5.jar" + }, + "org.apache.httpcomponents:httpmime:pom:4.5.2": { + "layout": "org/apache/httpcomponents/httpmime/4.5.2/httpmime-4.5.2.pom", + "sha256": "004b5b6272d820029adefcaaa92186ec46a485990b54d03509e441eda85b3784", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpmime/4.5.2/httpmime-4.5.2.pom" + }, + "org.apache.httpcomponents:httpmime:pom:4.5.5": { + "layout": "org/apache/httpcomponents/httpmime/4.5.5/httpmime-4.5.5.pom", + "sha256": "e6a671bad6956f7418fe109162e868eb4559c64fa09eef46ebe471a84aadea5c", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/httpmime/4.5.5/httpmime-4.5.5.pom" + }, + "org.apache.httpcomponents:project:pom:4.0": { + "layout": "org/apache/httpcomponents/project/4.0/project-4.0.pom", + "sha256": "a35e70c72865add3f9416194e02d6f9967dbf0c77d253933f80fbf46ad4d2169", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/project/4.0/project-4.0.pom" + }, + "org.apache.httpcomponents:project:pom:4.1": { + "layout": "org/apache/httpcomponents/project/4.1/project-4.1.pom", + "sha256": "dcd740b63214f341a758b23f19940d0c5f6523077e2daa92c88512de5f012b05", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/project/4.1/project-4.1.pom" + }, + "org.apache.httpcomponents:project:pom:7": { + "layout": "org/apache/httpcomponents/project/7/project-7.pom", + "sha256": "3d6eba428555a558de046b5d76eacc1f5a54b4f5f20b84d636ed7aff18aa48c3", + "url": "https://repo.maven.apache.org/maven2/org/apache/httpcomponents/project/7/project-7.pom" + }, + "org.apache.maven.doxia:doxia-core:jar:1.0-alpha-7": { + "layout": "org/apache/maven/doxia/doxia-core/1.0-alpha-7/doxia-core-1.0-alpha-7.jar", + "sha256": "0ca347aefddfb9d370b43e0531d4746eed9b762be41008236f7bd2e3c2749a89", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-core/1.0-alpha-7/doxia-core-1.0-alpha-7.jar" + }, + "org.apache.maven.doxia:doxia-core:jar:1.5": { + "layout": "org/apache/maven/doxia/doxia-core/1.5/doxia-core-1.5.jar", + "sha256": "b64c93f8a8c076af07fec48d7cea4ce60083624e7eb2b311c4f7f5c5037e5d89", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-core/1.5/doxia-core-1.5.jar" + }, + "org.apache.maven.doxia:doxia-core:pom:1.0-alpha-7": { + "layout": "org/apache/maven/doxia/doxia-core/1.0-alpha-7/doxia-core-1.0-alpha-7.pom", + "sha256": "dbbc71789029ae65237565e5142f899bd6fa2250bf7cdfb667f548d52cd12f24", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-core/1.0-alpha-7/doxia-core-1.0-alpha-7.pom" + }, + "org.apache.maven.doxia:doxia-core:pom:1.4": { + "layout": "org/apache/maven/doxia/doxia-core/1.4/doxia-core-1.4.pom", + "sha256": "3bbd1bd2f50b9a025eb7e879fdcfa63f0cc8d55a7325f12504e915fa0d6655cf", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-core/1.4/doxia-core-1.4.pom" + }, + "org.apache.maven.doxia:doxia-core:pom:1.5": { + "layout": "org/apache/maven/doxia/doxia-core/1.5/doxia-core-1.5.pom", + "sha256": "c9842f1e1a801b62916f6eda8014605d525bf282f98d9fedf4178874621f63c7", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-core/1.5/doxia-core-1.5.pom" + }, + "org.apache.maven.doxia:doxia-core:pom:1.7": { + "layout": "org/apache/maven/doxia/doxia-core/1.7/doxia-core-1.7.pom", + "sha256": "0d0882a3f6f55bc987cb42a9309e9254b1eb9255619a2e73c18dc7373d7c8374", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-core/1.7/doxia-core-1.7.pom" + }, + "org.apache.maven.doxia:doxia-decoration-model:jar:1.0-alpha-7": { + "layout": "org/apache/maven/doxia/doxia-decoration-model/1.0-alpha-7/doxia-decoration-model-1.0-alpha-7.jar", + "sha256": "a45e2a094468d7a8a1a63ae2712be2a30275f95dd5f90b66d7403fb6f864ab07", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-decoration-model/1.0-alpha-7/doxia-decoration-model-1.0-alpha-7.jar" + }, + "org.apache.maven.doxia:doxia-decoration-model:jar:1.7.4": { + "layout": "org/apache/maven/doxia/doxia-decoration-model/1.7.4/doxia-decoration-model-1.7.4.jar", + "sha256": "2da5e88be0074a96ba3407313de0a483a6a666da78e8c1f388594a485d4e58b0", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-decoration-model/1.7.4/doxia-decoration-model-1.7.4.jar" + }, + "org.apache.maven.doxia:doxia-decoration-model:pom:1.0-alpha-7": { + "layout": "org/apache/maven/doxia/doxia-decoration-model/1.0-alpha-7/doxia-decoration-model-1.0-alpha-7.pom", + "sha256": "c1b6cffb8d63e166c736e931ef785f3da3a0d180fb434d0a67691dbe087fb3f7", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-decoration-model/1.0-alpha-7/doxia-decoration-model-1.0-alpha-7.pom" + }, + "org.apache.maven.doxia:doxia-decoration-model:pom:1.4": { + "layout": "org/apache/maven/doxia/doxia-decoration-model/1.4/doxia-decoration-model-1.4.pom", + "sha256": "aca9bc8aaa986308607518c510b312706365d5bf97759304537765340dcd3936", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-decoration-model/1.4/doxia-decoration-model-1.4.pom" + }, + "org.apache.maven.doxia:doxia-decoration-model:pom:1.7.4": { + "layout": "org/apache/maven/doxia/doxia-decoration-model/1.7.4/doxia-decoration-model-1.7.4.pom", + "sha256": "8e3bb25159776ab6e235a5200a721b7408567cb8a48cb618ba3fc7fead9679f3", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-decoration-model/1.7.4/doxia-decoration-model-1.7.4.pom" + }, + "org.apache.maven.doxia:doxia-logging-api:jar:1.5": { + "layout": "org/apache/maven/doxia/doxia-logging-api/1.5/doxia-logging-api-1.5.jar", + "sha256": "35b0fd1fb6fd801206358c8d019727b713c7473e3fcf53d823ee041e68b4da35", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-logging-api/1.5/doxia-logging-api-1.5.jar" + }, + "org.apache.maven.doxia:doxia-logging-api:pom:1.4": { + "layout": "org/apache/maven/doxia/doxia-logging-api/1.4/doxia-logging-api-1.4.pom", + "sha256": "9333d4eeb5e615f6b62c69a46ae953f736a1a525724df10805f9fc50b86f3297", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-logging-api/1.4/doxia-logging-api-1.4.pom" + }, + "org.apache.maven.doxia:doxia-logging-api:pom:1.5": { + "layout": "org/apache/maven/doxia/doxia-logging-api/1.5/doxia-logging-api-1.5.pom", + "sha256": "b05a659cd6dbeb28d21fbe503054f9b96e01f9d377d42039e56531989250bec7", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-logging-api/1.5/doxia-logging-api-1.5.pom" + }, + "org.apache.maven.doxia:doxia-logging-api:pom:1.7": { + "layout": "org/apache/maven/doxia/doxia-logging-api/1.7/doxia-logging-api-1.7.pom", + "sha256": "456851e2f3b1ae877c8bf69849db19f93e5cfbf86ae13e22ffcc423334beac2e", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-logging-api/1.7/doxia-logging-api-1.7.pom" + }, + "org.apache.maven.doxia:doxia-module-fml:jar:1.4": { + "layout": "org/apache/maven/doxia/doxia-module-fml/1.4/doxia-module-fml-1.4.jar", + "sha256": "e606d636559871abcad60f54f5d3be35db3d0b3538296a319e59aec158bfcda7", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-module-fml/1.4/doxia-module-fml-1.4.jar" + }, + "org.apache.maven.doxia:doxia-module-fml:pom:1.4": { + "layout": "org/apache/maven/doxia/doxia-module-fml/1.4/doxia-module-fml-1.4.pom", + "sha256": "30c8a2c7c3c10b672deaff84930b91cd0159f5147fb27913d3dbd3b14cbd3bba", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-module-fml/1.4/doxia-module-fml-1.4.pom" + }, + "org.apache.maven.doxia:doxia-module-xhtml:jar:1.4": { + "layout": "org/apache/maven/doxia/doxia-module-xhtml/1.4/doxia-module-xhtml-1.4.jar", + "sha256": "7716473fe8ccb7c6ebd82817287ecbcf3f71103462302721790f14be6cb9eed0", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-module-xhtml/1.4/doxia-module-xhtml-1.4.jar" + }, + "org.apache.maven.doxia:doxia-module-xhtml:pom:1.4": { + "layout": "org/apache/maven/doxia/doxia-module-xhtml/1.4/doxia-module-xhtml-1.4.pom", + "sha256": "b9ecbe2fd3ac7300911c74f3b0d3e2f2b2b2325147f4927f814fa96eb8b007dd", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-module-xhtml/1.4/doxia-module-xhtml-1.4.pom" + }, + "org.apache.maven.doxia:doxia-module-xhtml:pom:1.7": { + "layout": "org/apache/maven/doxia/doxia-module-xhtml/1.7/doxia-module-xhtml-1.7.pom", + "sha256": "2f7c916db46b85b7015ee2c37bafbef0dc5e49278e757c9d83b247c7c51fa06f", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-module-xhtml/1.7/doxia-module-xhtml-1.7.pom" + }, + "org.apache.maven.doxia:doxia-modules:pom:1.4": { + "layout": "org/apache/maven/doxia/doxia-modules/1.4/doxia-modules-1.4.pom", + "sha256": "a91f1d7bbdc5178c0e4f3b3a76df4c7a9415b72ecab21e020285030ccf7821ea", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-modules/1.4/doxia-modules-1.4.pom" + }, + "org.apache.maven.doxia:doxia-modules:pom:1.7": { + "layout": "org/apache/maven/doxia/doxia-modules/1.7/doxia-modules-1.7.pom", + "sha256": "08a63724d2ce4af8f65628f783af912f1dc8b166596737b8734bdefe73ed4bf0", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-modules/1.7/doxia-modules-1.7.pom" + }, + "org.apache.maven.doxia:doxia-sink-api:jar:1.0": { + "layout": "org/apache/maven/doxia/doxia-sink-api/1.0/doxia-sink-api-1.0.jar", + "sha256": "1cd68e9b4cf427a2b6b9a943a9bef6da879d25702334ea5addb0d153bb8f8911", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-sink-api/1.0/doxia-sink-api-1.0.jar" + }, + "org.apache.maven.doxia:doxia-sink-api:jar:1.0-alpha-7": { + "layout": "org/apache/maven/doxia/doxia-sink-api/1.0-alpha-7/doxia-sink-api-1.0-alpha-7.jar", + "sha256": "37e66f15f348df957538f81f7e10a3fdcf84e5fbd687b2001ee3d6ab4a25aafe", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-sink-api/1.0-alpha-7/doxia-sink-api-1.0-alpha-7.jar" + }, + "org.apache.maven.doxia:doxia-sink-api:jar:1.0-alpha-9": { + "layout": "org/apache/maven/doxia/doxia-sink-api/1.0-alpha-9/doxia-sink-api-1.0-alpha-9.jar", + "sha256": "54d31281b701a78bd52f62b145a596d12ffabef642fb0bd2d168ee3d035ea1d4", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-sink-api/1.0-alpha-9/doxia-sink-api-1.0-alpha-9.jar" + }, + "org.apache.maven.doxia:doxia-sink-api:jar:1.5": { + "layout": "org/apache/maven/doxia/doxia-sink-api/1.5/doxia-sink-api-1.5.jar", + "sha256": "fe6c51cd4bd08567b99c90476c03d3ff983b364aa40d983c528c2dfff2a08380", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-sink-api/1.5/doxia-sink-api-1.5.jar" + }, + "org.apache.maven.doxia:doxia-sink-api:pom:1.0": { + "layout": "org/apache/maven/doxia/doxia-sink-api/1.0/doxia-sink-api-1.0.pom", + "sha256": "50d699f86369802baf2cd16c31d936ad8f0c1a8976120cd1dc3dc70c8abed99a", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-sink-api/1.0/doxia-sink-api-1.0.pom" + }, + "org.apache.maven.doxia:doxia-sink-api:pom:1.0-alpha-7": { + "layout": "org/apache/maven/doxia/doxia-sink-api/1.0-alpha-7/doxia-sink-api-1.0-alpha-7.pom", + "sha256": "3b0fa210dcbf0c92545ac31740fc2dd8af61dc72fd452cfca3d68aeabb642003", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-sink-api/1.0-alpha-7/doxia-sink-api-1.0-alpha-7.pom" + }, + "org.apache.maven.doxia:doxia-sink-api:pom:1.0-alpha-9": { + "layout": "org/apache/maven/doxia/doxia-sink-api/1.0-alpha-9/doxia-sink-api-1.0-alpha-9.pom", + "sha256": "eb9d0f060db1f03421dda050ba922b5f1f19653a334a68c50748e96c8f09a003", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-sink-api/1.0-alpha-9/doxia-sink-api-1.0-alpha-9.pom" + }, + "org.apache.maven.doxia:doxia-sink-api:pom:1.4": { + "layout": "org/apache/maven/doxia/doxia-sink-api/1.4/doxia-sink-api-1.4.pom", + "sha256": "ca39e6d7470a64bdea692254e9bdc47f095afe83f5b410dee552eeb99fa1756a", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-sink-api/1.4/doxia-sink-api-1.4.pom" + }, + "org.apache.maven.doxia:doxia-sink-api:pom:1.5": { + "layout": "org/apache/maven/doxia/doxia-sink-api/1.5/doxia-sink-api-1.5.pom", + "sha256": "7f5d6aa2ef300e93de562b8b10da3a4084ea1c9f5c95eaa0f16dc2a8763724e5", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-sink-api/1.5/doxia-sink-api-1.5.pom" + }, + "org.apache.maven.doxia:doxia-sink-api:pom:1.7": { + "layout": "org/apache/maven/doxia/doxia-sink-api/1.7/doxia-sink-api-1.7.pom", + "sha256": "05f6a72ff10b12e0a7eafbe8d17e8da8c70942bfbc74227d8e200a307fef9a5b", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-sink-api/1.7/doxia-sink-api-1.7.pom" + }, + "org.apache.maven.doxia:doxia-site-renderer:jar:1.0-alpha-7": { + "layout": "org/apache/maven/doxia/doxia-site-renderer/1.0-alpha-7/doxia-site-renderer-1.0-alpha-7.jar", + "sha256": "1afcbfd704b5959f291112831a1c4f7d735ea9fb9e0c0fe25c82c1fa9cd32795", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-site-renderer/1.0-alpha-7/doxia-site-renderer-1.0-alpha-7.jar" + }, + "org.apache.maven.doxia:doxia-site-renderer:jar:1.4": { + "layout": "org/apache/maven/doxia/doxia-site-renderer/1.4/doxia-site-renderer-1.4.jar", + "sha256": "aeb703af3fc536a61b4c596087c10bfe0a0727c8cb0f4f979ff799e981498b87", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-site-renderer/1.4/doxia-site-renderer-1.4.jar" + }, + "org.apache.maven.doxia:doxia-site-renderer:pom:1.0-alpha-7": { + "layout": "org/apache/maven/doxia/doxia-site-renderer/1.0-alpha-7/doxia-site-renderer-1.0-alpha-7.pom", + "sha256": "b3d22627ff432c40a01eead282a97e37264c0156173faf8c57e5fe21321801e8", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-site-renderer/1.0-alpha-7/doxia-site-renderer-1.0-alpha-7.pom" + }, + "org.apache.maven.doxia:doxia-site-renderer:pom:1.4": { + "layout": "org/apache/maven/doxia/doxia-site-renderer/1.4/doxia-site-renderer-1.4.pom", + "sha256": "7b5b24855a7e468244740b05e33058087c22f312034d932cf3fe8ca68dd4c8b6", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-site-renderer/1.4/doxia-site-renderer-1.4.pom" + }, + "org.apache.maven.doxia:doxia-site-renderer:pom:1.7.4": { + "layout": "org/apache/maven/doxia/doxia-site-renderer/1.7.4/doxia-site-renderer-1.7.4.pom", + "sha256": "a10e284048c67daae326d4b019b32d49e1dc366cb4593044fca64ad9b8f228f9", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-site-renderer/1.7.4/doxia-site-renderer-1.7.4.pom" + }, + "org.apache.maven.doxia:doxia-sitetools:pom:1.4": { + "layout": "org/apache/maven/doxia/doxia-sitetools/1.4/doxia-sitetools-1.4.pom", + "sha256": "9eafbabb727bd4d9a3d755169c67c3313120a8249b9fc6575f57bd1c32735485", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-sitetools/1.4/doxia-sitetools-1.4.pom" + }, + "org.apache.maven.doxia:doxia-sitetools:pom:1.7.4": { + "layout": "org/apache/maven/doxia/doxia-sitetools/1.7.4/doxia-sitetools-1.7.4.pom", + "sha256": "62e4b8dd8172265650f4ab17cb4d24b6426c5a3679768fbdd4ff624621069193", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-sitetools/1.7.4/doxia-sitetools-1.7.4.pom" + }, + "org.apache.maven.doxia:doxia-skin-model:pom:1.7.4": { + "layout": "org/apache/maven/doxia/doxia-skin-model/1.7.4/doxia-skin-model-1.7.4.pom", + "sha256": "ded04d7acabcf950b8fd226fbccac8db2f49c0a8aa45f17d3cfa5d4e9aa522ee", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-skin-model/1.7.4/doxia-skin-model-1.7.4.pom" + }, + "org.apache.maven.doxia:doxia:pom:1.0": { + "layout": "org/apache/maven/doxia/doxia/1.0/doxia-1.0.pom", + "sha256": "38246291439393fd08f54c6d7fedde2db0fd5c94d0910f17b99e8d59a2858e98", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia/1.0/doxia-1.0.pom" + }, + "org.apache.maven.doxia:doxia:pom:1.0-alpha-7": { + "layout": "org/apache/maven/doxia/doxia/1.0-alpha-7/doxia-1.0-alpha-7.pom", + "sha256": "172106a7ac51408883a074a1b847768e71c40fbbd969c8d645d2570026b811be", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia/1.0-alpha-7/doxia-1.0-alpha-7.pom" + }, + "org.apache.maven.doxia:doxia:pom:1.0-alpha-9": { + "layout": "org/apache/maven/doxia/doxia/1.0-alpha-9/doxia-1.0-alpha-9.pom", + "sha256": "88d361ecbf2f165352d7a20b7c2a6cac8d12146177dfd896b916d64c0c4ab745", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia/1.0-alpha-9/doxia-1.0-alpha-9.pom" + }, + "org.apache.maven.doxia:doxia:pom:1.4": { + "layout": "org/apache/maven/doxia/doxia/1.4/doxia-1.4.pom", + "sha256": "f290a2ea66adf00e049c22757089777c0f0a50e2c6db55a1bc2fb07a41f88c3f", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia/1.4/doxia-1.4.pom" + }, + "org.apache.maven.doxia:doxia:pom:1.5": { + "layout": "org/apache/maven/doxia/doxia/1.5/doxia-1.5.pom", + "sha256": "1059bc2e62382f78d201e13f10b6966d1f12479bd73196d7ec7a74afa8646957", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia/1.5/doxia-1.5.pom" + }, + "org.apache.maven.doxia:doxia:pom:1.7": { + "layout": "org/apache/maven/doxia/doxia/1.7/doxia-1.7.pom", + "sha256": "bf4d8dc55ade524f7f37cab634ffb3898af0e84c74f512406baa080fc44cd5d7", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia/1.7/doxia-1.7.pom" + }, + "org.apache.maven.plugins:maven-compiler-plugin:jar:3.7.0": { + "layout": "org/apache/maven/plugins/maven-compiler-plugin/3.7.0/maven-compiler-plugin-3.7.0.jar", + "sha256": "5b2e3a0c95d1125ba9312bd08f5e9e8b8bf0fa7842254a73e597728660044503", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/plugins/maven-compiler-plugin/3.7.0/maven-compiler-plugin-3.7.0.jar" + }, + "org.apache.maven.plugins:maven-compiler-plugin:pom:3.7.0": { + "layout": "org/apache/maven/plugins/maven-compiler-plugin/3.7.0/maven-compiler-plugin-3.7.0.pom", + "sha256": "0619c970c7c1c9f7c1e074ab48a4b396a8b404a5fd3176144fad1d949c9bc21b", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/plugins/maven-compiler-plugin/3.7.0/maven-compiler-plugin-3.7.0.pom" + }, + "org.apache.maven.plugins:maven-failsafe-plugin:jar:3.0.0-M5": { + "layout": "org/apache/maven/plugins/maven-failsafe-plugin/3.0.0-M5/maven-failsafe-plugin-3.0.0-M5.jar", + "sha256": "873ebaa1213e74b9dca5100a5eef3e73b6c641b781eb58c6071164b4cc088f3e", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/plugins/maven-failsafe-plugin/3.0.0-M5/maven-failsafe-plugin-3.0.0-M5.jar" + }, + "org.apache.maven.plugins:maven-failsafe-plugin:pom:3.0.0-M5": { + "layout": "org/apache/maven/plugins/maven-failsafe-plugin/3.0.0-M5/maven-failsafe-plugin-3.0.0-M5.pom", + "sha256": "8db83a0433b00566a4d45821885380142fb27ab6875ed8074f94de4509bba834", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/plugins/maven-failsafe-plugin/3.0.0-M5/maven-failsafe-plugin-3.0.0-M5.pom" + }, + "org.apache.maven.plugins:maven-install-plugin:jar:2.4": { + "layout": "org/apache/maven/plugins/maven-install-plugin/2.4/maven-install-plugin-2.4.jar", + "sha256": "7f8929179fc615b058d23f6f813c093c3706d75fe7b1878064868415997d1256", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/plugins/maven-install-plugin/2.4/maven-install-plugin-2.4.jar" + }, + "org.apache.maven.plugins:maven-install-plugin:pom:2.4": { + "layout": "org/apache/maven/plugins/maven-install-plugin/2.4/maven-install-plugin-2.4.pom", + "sha256": "be7ffec677085b258a22395816ad32ad8d0ddeeafbf9eebe6e03814c080ce2a6", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/plugins/maven-install-plugin/2.4/maven-install-plugin-2.4.pom" + }, + "org.apache.maven.plugins:maven-jar-plugin:jar:2.4": { + "layout": "org/apache/maven/plugins/maven-jar-plugin/2.4/maven-jar-plugin-2.4.jar", + "sha256": "1f22f2e528daddbc5d06518c4dbe4d5f4fa6995c8441c702ee5d7d506bb4b4f3", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/plugins/maven-jar-plugin/2.4/maven-jar-plugin-2.4.jar" + }, + "org.apache.maven.plugins:maven-jar-plugin:pom:2.4": { + "layout": "org/apache/maven/plugins/maven-jar-plugin/2.4/maven-jar-plugin-2.4.pom", + "sha256": "a98f60925af4a1729529a1e9c5aba78ffdd024c67a8f825ed974a0ab006d315a", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/plugins/maven-jar-plugin/2.4/maven-jar-plugin-2.4.pom" + }, + "org.apache.maven.plugins:maven-plugins:pom:22": { + "layout": "org/apache/maven/plugins/maven-plugins/22/maven-plugins-22.pom", + "sha256": "34d303bbdd31d34f5a1570549b0e134b72afb18db53d8fcfdad222d51e941630", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/plugins/maven-plugins/22/maven-plugins-22.pom" + }, + "org.apache.maven.plugins:maven-plugins:pom:23": { + "layout": "org/apache/maven/plugins/maven-plugins/23/maven-plugins-23.pom", + "sha256": "e07710c7d516a873c8fcafe85840d8a1a78f460c9a364d1c57b1d9e4554639ce", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/plugins/maven-plugins/23/maven-plugins-23.pom" + }, + "org.apache.maven.plugins:maven-plugins:pom:30": { + "layout": "org/apache/maven/plugins/maven-plugins/30/maven-plugins-30.pom", + "sha256": "439fe43023445ac759740e1ca29641fc54453911bc8b55f8c3d84b3d76843ab1", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/plugins/maven-plugins/30/maven-plugins-30.pom" + }, + "org.apache.maven.plugins:maven-plugins:pom:37": { + "layout": "org/apache/maven/plugins/maven-plugins/37/maven-plugins-37.pom", + "sha256": "bb8fba5306f2bb8fb92ce3379ba66fb2056aa0e150ce469f929445aeb900175f", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/plugins/maven-plugins/37/maven-plugins-37.pom" + }, + "org.apache.maven.plugins:maven-resources-plugin:jar:2.6": { + "layout": "org/apache/maven/plugins/maven-resources-plugin/2.6/maven-resources-plugin-2.6.jar", + "sha256": "07bd1b98b5b029af91fabcf99a9b3463b9dc09b993f28c2ee0ccc98265888ca6", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/plugins/maven-resources-plugin/2.6/maven-resources-plugin-2.6.jar" + }, + "org.apache.maven.plugins:maven-resources-plugin:pom:2.6": { + "layout": "org/apache/maven/plugins/maven-resources-plugin/2.6/maven-resources-plugin-2.6.pom", + "sha256": "a7842d002fbc7d2a84217106be909bc85ea35aa47031e410ab8afbb3b3364e2d", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/plugins/maven-resources-plugin/2.6/maven-resources-plugin-2.6.pom" + }, + "org.apache.maven.plugins:maven-shade-plugin:jar:3.4.1": { + "layout": "org/apache/maven/plugins/maven-shade-plugin/3.4.1/maven-shade-plugin-3.4.1.jar", + "sha256": "838f1c74c5fed6ee68994741e14823274cb145ebe701cef1c707746ebe048d6a", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/plugins/maven-shade-plugin/3.4.1/maven-shade-plugin-3.4.1.jar" + }, + "org.apache.maven.plugins:maven-shade-plugin:pom:3.4.1": { + "layout": "org/apache/maven/plugins/maven-shade-plugin/3.4.1/maven-shade-plugin-3.4.1.pom", + "sha256": "3b8cb4b38a8ed4b8b1cbe021e61455aeea69a94b71e375b6572c0e907bc8e9fa", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/plugins/maven-shade-plugin/3.4.1/maven-shade-plugin-3.4.1.pom" + }, + "org.apache.maven.plugins:maven-surefire-plugin:jar:3.0.0-M5": { + "layout": "org/apache/maven/plugins/maven-surefire-plugin/3.0.0-M5/maven-surefire-plugin-3.0.0-M5.jar", + "sha256": "598b82718ed905e5d67d4a70d191a7f5a1f2e3dd42207d1b8f808a27086f4f17", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/plugins/maven-surefire-plugin/3.0.0-M5/maven-surefire-plugin-3.0.0-M5.jar" + }, + "org.apache.maven.plugins:maven-surefire-plugin:pom:3.0.0-M5": { + "layout": "org/apache/maven/plugins/maven-surefire-plugin/3.0.0-M5/maven-surefire-plugin-3.0.0-M5.pom", + "sha256": "8c61a4eea9bd5b3bc9d96843ede772c0537bede17ed9b4a68e1e8ca4e839d0c8", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/plugins/maven-surefire-plugin/3.0.0-M5/maven-surefire-plugin-3.0.0-M5.pom" + }, + "org.apache.maven.reporting:maven-reporting-api:jar:2.0.4": { + "layout": "org/apache/maven/reporting/maven-reporting-api/2.0.4/maven-reporting-api-2.0.4.jar", + "sha256": "25bc507061bed79a5c33df735cb03d0d9f5b43effafd4bc429c9170ba3a4fea4", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/reporting/maven-reporting-api/2.0.4/maven-reporting-api-2.0.4.jar" + }, + "org.apache.maven.reporting:maven-reporting-api:jar:2.0.6": { + "layout": "org/apache/maven/reporting/maven-reporting-api/2.0.6/maven-reporting-api-2.0.6.jar", + "sha256": "ba372ac9e52b481671bb3ea913063cd66747780de340d0c3b1f18c49ec998786", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/reporting/maven-reporting-api/2.0.6/maven-reporting-api-2.0.6.jar" + }, + "org.apache.maven.reporting:maven-reporting-api:jar:3.0": { + "layout": "org/apache/maven/reporting/maven-reporting-api/3.0/maven-reporting-api-3.0.jar", + "sha256": "498949e5576b022559d1622e534c18e052f94dec883924b67e0a4e8676c07b17", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/reporting/maven-reporting-api/3.0/maven-reporting-api-3.0.jar" + }, + "org.apache.maven.reporting:maven-reporting-api:pom:2.0.4": { + "layout": "org/apache/maven/reporting/maven-reporting-api/2.0.4/maven-reporting-api-2.0.4.pom", + "sha256": "8e4428044a3126756355a7414539e5b669cb0cd10e02cae18d8a3dccb8f70ff1", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/reporting/maven-reporting-api/2.0.4/maven-reporting-api-2.0.4.pom" + }, + "org.apache.maven.reporting:maven-reporting-api:pom:2.0.6": { + "layout": "org/apache/maven/reporting/maven-reporting-api/2.0.6/maven-reporting-api-2.0.6.pom", + "sha256": "4ac659858bac5929ea8fc183d7ce4b588d7e69363a76277f6a26349393686657", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/reporting/maven-reporting-api/2.0.6/maven-reporting-api-2.0.6.pom" + }, + "org.apache.maven.reporting:maven-reporting-api:pom:3.0": { + "layout": "org/apache/maven/reporting/maven-reporting-api/3.0/maven-reporting-api-3.0.pom", + "sha256": "efaa4fc4832aad9703df46b89cb02845dbf4db6f6ac88534b7824c4956a3a5fb", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/reporting/maven-reporting-api/3.0/maven-reporting-api-3.0.pom" + }, + "org.apache.maven.reporting:maven-reporting-impl:jar:2.0.4": { + "layout": "org/apache/maven/reporting/maven-reporting-impl/2.0.4/maven-reporting-impl-2.0.4.jar", + "sha256": "e6de930d99b72740f0e68ab21fe6de0a5e84e758e54bbf15336cd1b0aa5608a2", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/reporting/maven-reporting-impl/2.0.4/maven-reporting-impl-2.0.4.jar" + }, + "org.apache.maven.reporting:maven-reporting-impl:jar:3.0.0": { + "layout": "org/apache/maven/reporting/maven-reporting-impl/3.0.0/maven-reporting-impl-3.0.0.jar", + "sha256": "721ce27b1403dbbeaa6a0171d1183de603bb7ba56c61cccb381b0bc0ff00fe36", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/reporting/maven-reporting-impl/3.0.0/maven-reporting-impl-3.0.0.jar" + }, + "org.apache.maven.reporting:maven-reporting-impl:pom:2.0.4": { + "layout": "org/apache/maven/reporting/maven-reporting-impl/2.0.4/maven-reporting-impl-2.0.4.pom", + "sha256": "c932717cb3c89d426bb827c6f9548aa2d185d45d6af315470b9246d1818917e9", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/reporting/maven-reporting-impl/2.0.4/maven-reporting-impl-2.0.4.pom" + }, + "org.apache.maven.reporting:maven-reporting-impl:pom:3.0.0": { + "layout": "org/apache/maven/reporting/maven-reporting-impl/3.0.0/maven-reporting-impl-3.0.0.pom", + "sha256": "b67e22d77fd823345b850f8544f87da5ffed4cb59ec4cdc538488149a0f692e8", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/reporting/maven-reporting-impl/3.0.0/maven-reporting-impl-3.0.0.pom" + }, + "org.apache.maven.reporting:maven-reporting:pom:2.0.4": { + "layout": "org/apache/maven/reporting/maven-reporting/2.0.4/maven-reporting-2.0.4.pom", + "sha256": "f4e686d6b7bb19f757196798b05840dc286b0211d05af707f387b37971c8300c", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/reporting/maven-reporting/2.0.4/maven-reporting-2.0.4.pom" + }, + "org.apache.maven.reporting:maven-reporting:pom:2.0.6": { + "layout": "org/apache/maven/reporting/maven-reporting/2.0.6/maven-reporting-2.0.6.pom", + "sha256": "ab803be997ac806ee7f2e179ad3cda640345ed8fc911082b1f80202c7fc463a4", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/reporting/maven-reporting/2.0.6/maven-reporting-2.0.6.pom" + }, + "org.apache.maven.shared:file-management:jar:1.2.1": { + "layout": "org/apache/maven/shared/file-management/1.2.1/file-management-1.2.1.jar", + "sha256": "009478892149c0141645276d2c74094e7db595a48765b74834565b1dd25b454e", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/file-management/1.2.1/file-management-1.2.1.jar" + }, + "org.apache.maven.shared:file-management:pom:1.2.1": { + "layout": "org/apache/maven/shared/file-management/1.2.1/file-management-1.2.1.pom", + "sha256": "156a09e226c60d5de9d5d6cd8e3dfea08959ede10347c2aa67b995fd9069cd3c", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/file-management/1.2.1/file-management-1.2.1.pom" + }, + "org.apache.maven.shared:maven-artifact-transfer:jar:0.11.0": { + "layout": "org/apache/maven/shared/maven-artifact-transfer/0.11.0/maven-artifact-transfer-0.11.0.jar", + "sha256": "1f474df0b9dd55e5bb755a131ec64b307c557293328711adb579d21c010dffde", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-artifact-transfer/0.11.0/maven-artifact-transfer-0.11.0.jar" + }, + "org.apache.maven.shared:maven-artifact-transfer:jar:0.13.1": { + "layout": "org/apache/maven/shared/maven-artifact-transfer/0.13.1/maven-artifact-transfer-0.13.1.jar", + "sha256": "1ac88accde99ed71e65253bd130868c0e654f940f01ade073b895eb2f817cf06", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-artifact-transfer/0.13.1/maven-artifact-transfer-0.13.1.jar" + }, + "org.apache.maven.shared:maven-artifact-transfer:pom:0.11.0": { + "layout": "org/apache/maven/shared/maven-artifact-transfer/0.11.0/maven-artifact-transfer-0.11.0.pom", + "sha256": "18cb9370815a5bd8002208fd8e51a53abee7eb056f3ae90fee9f276a4e4d909e", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-artifact-transfer/0.11.0/maven-artifact-transfer-0.11.0.pom" + }, + "org.apache.maven.shared:maven-artifact-transfer:pom:0.13.1": { + "layout": "org/apache/maven/shared/maven-artifact-transfer/0.13.1/maven-artifact-transfer-0.13.1.pom", + "sha256": "e4b15a1e7cfbfe480408cfbaa148d66ea3324bf19e9ac6d6c17053bdb18ac4cd", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-artifact-transfer/0.13.1/maven-artifact-transfer-0.13.1.pom" + }, + "org.apache.maven.shared:maven-common-artifact-filters:jar:3.1.0": { + "layout": "org/apache/maven/shared/maven-common-artifact-filters/3.1.0/maven-common-artifact-filters-3.1.0.jar", + "sha256": "82a584c58bd6a1b13861e2d4cc194b5afc09ca0adad9fda741f16337dcda2e96", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-common-artifact-filters/3.1.0/maven-common-artifact-filters-3.1.0.jar" + }, + "org.apache.maven.shared:maven-common-artifact-filters:pom:3.1.0": { + "layout": "org/apache/maven/shared/maven-common-artifact-filters/3.1.0/maven-common-artifact-filters-3.1.0.pom", + "sha256": "034e12a9d1d5f5618a9e0dda23aadda4ed659ec55240876b6e954cc2172be456", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-common-artifact-filters/3.1.0/maven-common-artifact-filters-3.1.0.pom" + }, + "org.apache.maven.shared:maven-dependency-tree:jar:3.2.0": { + "layout": "org/apache/maven/shared/maven-dependency-tree/3.2.0/maven-dependency-tree-3.2.0.jar", + "sha256": "03d3102672863761c2a39da09c444cc7dea74cc4a9efa2107f8f0bfd2519d330", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-dependency-tree/3.2.0/maven-dependency-tree-3.2.0.jar" + }, + "org.apache.maven.shared:maven-dependency-tree:pom:3.2.0": { + "layout": "org/apache/maven/shared/maven-dependency-tree/3.2.0/maven-dependency-tree-3.2.0.pom", + "sha256": "ee3079cb638a81b8ab658047e22ff4b7aeca65ca7f2c28cdbe10474225bc361e", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-dependency-tree/3.2.0/maven-dependency-tree-3.2.0.pom" + }, + "org.apache.maven.shared:maven-filtering:jar:1.1": { + "layout": "org/apache/maven/shared/maven-filtering/1.1/maven-filtering-1.1.jar", + "sha256": "05fa641c31894ce930c6eb76ec1aa53a9de4cd9baa837fe492e9e0407099d226", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-filtering/1.1/maven-filtering-1.1.jar" + }, + "org.apache.maven.shared:maven-filtering:pom:1.1": { + "layout": "org/apache/maven/shared/maven-filtering/1.1/maven-filtering-1.1.pom", + "sha256": "9f4bf710d0de3c4dde45182aae3d604784b4e2f91696e27f3411286ef96d181c", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-filtering/1.1/maven-filtering-1.1.pom" + }, + "org.apache.maven.shared:maven-shared-components:pom:10": { + "layout": "org/apache/maven/shared/maven-shared-components/10/maven-shared-components-10.pom", + "sha256": "833aa62469a7c812cccc5a572983d662a4a1805dcdc90ad0244ba159426ec00f", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-shared-components/10/maven-shared-components-10.pom" + }, + "org.apache.maven.shared:maven-shared-components:pom:15": { + "layout": "org/apache/maven/shared/maven-shared-components/15/maven-shared-components-15.pom", + "sha256": "6a58eb24291600f75ce0fe369b73fe6700f575ace4b664724d3cd0a6b85b63ee", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-shared-components/15/maven-shared-components-15.pom" + }, + "org.apache.maven.shared:maven-shared-components:pom:17": { + "layout": "org/apache/maven/shared/maven-shared-components/17/maven-shared-components-17.pom", + "sha256": "4b96931a6d12491f858b44b2dbea50c1070c960232c041b966189dc905ac2631", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-shared-components/17/maven-shared-components-17.pom" + }, + "org.apache.maven.shared:maven-shared-components:pom:18": { + "layout": "org/apache/maven/shared/maven-shared-components/18/maven-shared-components-18.pom", + "sha256": "a1d54fb81b5a8f197f5b3d0a928f63da2278c79bc8dd06e0be93593403f05775", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-shared-components/18/maven-shared-components-18.pom" + }, + "org.apache.maven.shared:maven-shared-components:pom:19": { + "layout": "org/apache/maven/shared/maven-shared-components/19/maven-shared-components-19.pom", + "sha256": "d82408269aada2eb1521ee8ff17f7c67333684f8ed2a09a9e35badd2e7575957", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-shared-components/19/maven-shared-components-19.pom" + }, + "org.apache.maven.shared:maven-shared-components:pom:30": { + "layout": "org/apache/maven/shared/maven-shared-components/30/maven-shared-components-30.pom", + "sha256": "ad9df3b73df8bbc0309ad42818fa9779cd10528df0708788f4aceddc514bd031", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-shared-components/30/maven-shared-components-30.pom" + }, + "org.apache.maven.shared:maven-shared-components:pom:33": { + "layout": "org/apache/maven/shared/maven-shared-components/33/maven-shared-components-33.pom", + "sha256": "f43ff6fee0b32533765b3406648d6a5532f85d5e488079480788cb36e79d0980", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-shared-components/33/maven-shared-components-33.pom" + }, + "org.apache.maven.shared:maven-shared-components:pom:34": { + "layout": "org/apache/maven/shared/maven-shared-components/34/maven-shared-components-34.pom", + "sha256": "64d0edb5f21cfff600b1c3ab7d45f9754cd18ba5fbf83b3d1bb7c4849437d8e3", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-shared-components/34/maven-shared-components-34.pom" + }, + "org.apache.maven.shared:maven-shared-components:pom:37": { + "layout": "org/apache/maven/shared/maven-shared-components/37/maven-shared-components-37.pom", + "sha256": "72ab6f7efae60892e6457871a951326a9ad7f92932d34a97715d0908b87874d3", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-shared-components/37/maven-shared-components-37.pom" + }, + "org.apache.maven.shared:maven-shared-components:pom:8": { + "layout": "org/apache/maven/shared/maven-shared-components/8/maven-shared-components-8.pom", + "sha256": "a8d03f96ac58a1d00bad2ed7d251e3eda857c592a09e58d6bdb79d5e8626a316", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-shared-components/8/maven-shared-components-8.pom" + }, + "org.apache.maven.shared:maven-shared-incremental:jar:1.1": { + "layout": "org/apache/maven/shared/maven-shared-incremental/1.1/maven-shared-incremental-1.1.jar", + "sha256": "61988e54486a5dc38f06c70fdae5b108556c63bd433697b9f4305fcdb30fa40e", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-shared-incremental/1.1/maven-shared-incremental-1.1.jar" + }, + "org.apache.maven.shared:maven-shared-incremental:pom:1.1": { + "layout": "org/apache/maven/shared/maven-shared-incremental/1.1/maven-shared-incremental-1.1.pom", + "sha256": "f21d19eb49b4a66cd85354a9ee7335439ea92a368173760a202766008cc19924", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-shared-incremental/1.1/maven-shared-incremental-1.1.pom" + }, + "org.apache.maven.shared:maven-shared-io:jar:1.1": { + "layout": "org/apache/maven/shared/maven-shared-io/1.1/maven-shared-io-1.1.jar", + "sha256": "10c0b971d692d2e3026aec6c49cbb12ddee4214e2a727603d1d309779ca2a62b", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-shared-io/1.1/maven-shared-io-1.1.jar" + }, + "org.apache.maven.shared:maven-shared-io:pom:1.1": { + "layout": "org/apache/maven/shared/maven-shared-io/1.1/maven-shared-io-1.1.pom", + "sha256": "ce42b015e3c2d6dafeb99763e360d0c0b08dfe054e204e944a23ddcc2463686a", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-shared-io/1.1/maven-shared-io-1.1.pom" + }, + "org.apache.maven.shared:maven-shared-utils:jar:3.1.0": { + "layout": "org/apache/maven/shared/maven-shared-utils/3.1.0/maven-shared-utils-3.1.0.jar", + "sha256": "88e5334c4c29a6e81c74a1d814c54a9a3b1e4fc6560a95da196fe16928095471", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-shared-utils/3.1.0/maven-shared-utils-3.1.0.jar" + }, + "org.apache.maven.shared:maven-shared-utils:jar:3.2.0": { + "layout": "org/apache/maven/shared/maven-shared-utils/3.2.0/maven-shared-utils-3.2.0.jar", + "sha256": "d62150f831761f11ad6947f0090f89f411c75179c4995ccccd774b5c2f8a90bd", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-shared-utils/3.2.0/maven-shared-utils-3.2.0.jar" + }, + "org.apache.maven.shared:maven-shared-utils:pom:0.1": { + "layout": "org/apache/maven/shared/maven-shared-utils/0.1/maven-shared-utils-0.1.pom", + "sha256": "9ecb36b0e0d7d1d0f0dabd8705368b710df58b943091e9fa9071a29ccdc15a33", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-shared-utils/0.1/maven-shared-utils-0.1.pom" + }, + "org.apache.maven.shared:maven-shared-utils:pom:3.1.0": { + "layout": "org/apache/maven/shared/maven-shared-utils/3.1.0/maven-shared-utils-3.1.0.pom", + "sha256": "68f9fdef85d2c89f53c63cbc559920e0115bd30eb6f7076c9854931d3829027b", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-shared-utils/3.1.0/maven-shared-utils-3.1.0.pom" + }, + "org.apache.maven.shared:maven-shared-utils:pom:3.2.0": { + "layout": "org/apache/maven/shared/maven-shared-utils/3.2.0/maven-shared-utils-3.2.0.pom", + "sha256": "06a9d67118fc1da7600de250b35449ca4b11de7f1d4c6ff0e1954359ed9bb700", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/shared/maven-shared-utils/3.2.0/maven-shared-utils-3.2.0.pom" + }, + "org.apache.maven.surefire:maven-surefire-common:jar:3.0.0-M5": { + "layout": "org/apache/maven/surefire/maven-surefire-common/3.0.0-M5/maven-surefire-common-3.0.0-M5.jar", + "sha256": "ff20ecb3c9ed1eef654c3e05b52bbca4916613c52c7135cd11d9bec92a7175f9", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/surefire/maven-surefire-common/3.0.0-M5/maven-surefire-common-3.0.0-M5.jar" + }, + "org.apache.maven.surefire:maven-surefire-common:pom:3.0.0-M5": { + "layout": "org/apache/maven/surefire/maven-surefire-common/3.0.0-M5/maven-surefire-common-3.0.0-M5.pom", + "sha256": "95ead013bfd67e469a484a71dfaba25381314a63d9b4b7714e06ca90e0d07c7b", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/surefire/maven-surefire-common/3.0.0-M5/maven-surefire-common-3.0.0-M5.pom" + }, + "org.apache.maven.surefire:surefire-api:jar:3.0.0-M5": { + "layout": "org/apache/maven/surefire/surefire-api/3.0.0-M5/surefire-api-3.0.0-M5.jar", + "sha256": "f963823ad9c422b26ece431704b0de740c925ab4bfde6a34098d48056eb53594", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/surefire/surefire-api/3.0.0-M5/surefire-api-3.0.0-M5.jar" + }, + "org.apache.maven.surefire:surefire-api:pom:3.0.0-M5": { + "layout": "org/apache/maven/surefire/surefire-api/3.0.0-M5/surefire-api-3.0.0-M5.pom", + "sha256": "316959873e9d8ca83c5a66228a68c65508388550067bc77a98645b4a1046d9bd", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/surefire/surefire-api/3.0.0-M5/surefire-api-3.0.0-M5.pom" + }, + "org.apache.maven.surefire:surefire-booter:jar:3.0.0-M5": { + "layout": "org/apache/maven/surefire/surefire-booter/3.0.0-M5/surefire-booter-3.0.0-M5.jar", + "sha256": "1078a430b772a69e2736770cf0d76bcd2533c33157261f185de013d69f0585c9", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/surefire/surefire-booter/3.0.0-M5/surefire-booter-3.0.0-M5.jar" + }, + "org.apache.maven.surefire:surefire-booter:pom:3.0.0-M5": { + "layout": "org/apache/maven/surefire/surefire-booter/3.0.0-M5/surefire-booter-3.0.0-M5.pom", + "sha256": "14c0ce0e92b56f149b348a3e08a6d7142ac00fd0df90d0ec501a624f59e86098", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/surefire/surefire-booter/3.0.0-M5/surefire-booter-3.0.0-M5.pom" + }, + "org.apache.maven.surefire:surefire-extensions-api:jar:3.0.0-M5": { + "layout": "org/apache/maven/surefire/surefire-extensions-api/3.0.0-M5/surefire-extensions-api-3.0.0-M5.jar", + "sha256": "9ffd2515eee4a071f2cf4883748db98645fc9f5952774cd8846ac506c6bbe0b2", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/surefire/surefire-extensions-api/3.0.0-M5/surefire-extensions-api-3.0.0-M5.jar" + }, + "org.apache.maven.surefire:surefire-extensions-api:pom:3.0.0-M5": { + "layout": "org/apache/maven/surefire/surefire-extensions-api/3.0.0-M5/surefire-extensions-api-3.0.0-M5.pom", + "sha256": "937f8af02e1ab842fea45523f0b2e501a742c1ebabaea455a934876d3cb776c8", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/surefire/surefire-extensions-api/3.0.0-M5/surefire-extensions-api-3.0.0-M5.pom" + }, + "org.apache.maven.surefire:surefire-extensions-spi:jar:3.0.0-M5": { + "layout": "org/apache/maven/surefire/surefire-extensions-spi/3.0.0-M5/surefire-extensions-spi-3.0.0-M5.jar", + "sha256": "1309a1c4a68e90d1abcdb2355ca3124d238cd07f1f4d5ad29ea7671fc4df47bb", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/surefire/surefire-extensions-spi/3.0.0-M5/surefire-extensions-spi-3.0.0-M5.jar" + }, + "org.apache.maven.surefire:surefire-extensions-spi:pom:3.0.0-M5": { + "layout": "org/apache/maven/surefire/surefire-extensions-spi/3.0.0-M5/surefire-extensions-spi-3.0.0-M5.pom", + "sha256": "1d85d5dc426d235d043b6a7a1e657fa77b395de51aa6688b0a2bf3c70b9229b5", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/surefire/surefire-extensions-spi/3.0.0-M5/surefire-extensions-spi-3.0.0-M5.pom" + }, + "org.apache.maven.surefire:surefire-logger-api:jar:3.0.0-M5": { + "layout": "org/apache/maven/surefire/surefire-logger-api/3.0.0-M5/surefire-logger-api-3.0.0-M5.jar", + "sha256": "739627f1ecb7b2253e5900c01d7c734187707034978a8aa35f6758abc0dc76f8", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/surefire/surefire-logger-api/3.0.0-M5/surefire-logger-api-3.0.0-M5.jar" + }, + "org.apache.maven.surefire:surefire-logger-api:pom:3.0.0-M5": { + "layout": "org/apache/maven/surefire/surefire-logger-api/3.0.0-M5/surefire-logger-api-3.0.0-M5.pom", + "sha256": "db64b7dc5d866b5e0ed850a9179e0aea55a801f3489efef9af2d4c91e13a2eec", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/surefire/surefire-logger-api/3.0.0-M5/surefire-logger-api-3.0.0-M5.pom" + }, + "org.apache.maven.surefire:surefire-shared-utils:jar:3.0.0-M4": { + "layout": "org/apache/maven/surefire/surefire-shared-utils/3.0.0-M4/surefire-shared-utils-3.0.0-M4.jar", + "sha256": "90574246a32a6d6d85e484bf075eb47bd5344581dc8496128b67527d2d28cd0d", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/surefire/surefire-shared-utils/3.0.0-M4/surefire-shared-utils-3.0.0-M4.jar" + }, + "org.apache.maven.surefire:surefire-shared-utils:pom:3.0.0-M4": { + "layout": "org/apache/maven/surefire/surefire-shared-utils/3.0.0-M4/surefire-shared-utils-3.0.0-M4.pom", + "sha256": "3cfb9c80f80ec190d4b5576da195ac4c43ea0a34656fc039a968e3a9f71bf7ac", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/surefire/surefire-shared-utils/3.0.0-M4/surefire-shared-utils-3.0.0-M4.pom" + }, + "org.apache.maven.surefire:surefire:pom:3.0.0-M4": { + "layout": "org/apache/maven/surefire/surefire/3.0.0-M4/surefire-3.0.0-M4.pom", + "sha256": "6e05711529bb3a792ab996bede47196082501d006e7f781ab7f30ad69fc3e102", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/surefire/surefire/3.0.0-M4/surefire-3.0.0-M4.pom" + }, + "org.apache.maven.surefire:surefire:pom:3.0.0-M5": { + "layout": "org/apache/maven/surefire/surefire/3.0.0-M5/surefire-3.0.0-M5.pom", + "sha256": "125ec88d3c4a8b18ca5fb755d2d40f9eca07a3ca2e3a5ac31d6ce3d9fc92a3b0", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/surefire/surefire/3.0.0-M5/surefire-3.0.0-M5.pom" + }, + "org.apache.maven.wagon:wagon-provider-api:jar:1.0-alpha-6": { + "layout": "org/apache/maven/wagon/wagon-provider-api/1.0-alpha-6/wagon-provider-api-1.0-alpha-6.jar", + "sha256": "a1e299e44d17d6eb67aacc4b5476bba4cbd4df7d1a16ef46ba9624e79d300c68", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/wagon/wagon-provider-api/1.0-alpha-6/wagon-provider-api-1.0-alpha-6.jar" + }, + "org.apache.maven.wagon:wagon-provider-api:pom:1.0-alpha-6": { + "layout": "org/apache/maven/wagon/wagon-provider-api/1.0-alpha-6/wagon-provider-api-1.0-alpha-6.pom", + "sha256": "0a752e52297e91d0d1266655e3058fda197fb3f0d9d37f01ac49053b223ccbc5", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/wagon/wagon-provider-api/1.0-alpha-6/wagon-provider-api-1.0-alpha-6.pom" + }, + "org.apache.maven.wagon:wagon:pom:1.0-alpha-6": { + "layout": "org/apache/maven/wagon/wagon/1.0-alpha-6/wagon-1.0-alpha-6.pom", + "sha256": "36632b9cd41cc3010ac8f5c8cc2a015956f2256a4045c63d968898c1567b0eaf", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/wagon/wagon/1.0-alpha-6/wagon-1.0-alpha-6.pom" + }, + "org.apache.maven:maven-aether-provider:jar:3.0": { + "layout": "org/apache/maven/maven-aether-provider/3.0/maven-aether-provider-3.0.jar", + "sha256": "1205a1f229999170dcadcfb885a278ad0bc2295540a251f4c438f887ead7bbd9", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-aether-provider/3.0/maven-aether-provider-3.0.jar" + }, + "org.apache.maven:maven-aether-provider:pom:3.0": { + "layout": "org/apache/maven/maven-aether-provider/3.0/maven-aether-provider-3.0.pom", + "sha256": "755c07a1ae47cff80f633265b224341d6d8cc26f02d37eb407bc45ff5db9a71d", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-aether-provider/3.0/maven-aether-provider-3.0.pom" + }, + "org.apache.maven:maven-archiver:jar:2.5": { + "layout": "org/apache/maven/maven-archiver/2.5/maven-archiver-2.5.jar", + "sha256": "9fd34ed18dc5a49011380e620be86314f9f734193b8fcf85fb11e7b3a3929d6e", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-archiver/2.5/maven-archiver-2.5.jar" + }, + "org.apache.maven:maven-archiver:pom:2.5": { + "layout": "org/apache/maven/maven-archiver/2.5/maven-archiver-2.5.pom", + "sha256": "86214750be31034691143c797a2656bc647f0defeb4988031aa5afaf39f16a31", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-archiver/2.5/maven-archiver-2.5.pom" + }, + "org.apache.maven:maven-artifact-manager:jar:2.0.2": { + "layout": "org/apache/maven/maven-artifact-manager/2.0.2/maven-artifact-manager-2.0.2.jar", + "sha256": "3ceec8c66c1d206d6e51e43659bccd26b53db32ace638b5f77f99c43f9a51e3d", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-artifact-manager/2.0.2/maven-artifact-manager-2.0.2.jar" + }, + "org.apache.maven:maven-artifact-manager:jar:2.0.4": { + "layout": "org/apache/maven/maven-artifact-manager/2.0.4/maven-artifact-manager-2.0.4.jar", + "sha256": "493210ee1941611759f3238c0c4f7655077d91944ca4d5b08703269509778123", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-artifact-manager/2.0.4/maven-artifact-manager-2.0.4.jar" + }, + "org.apache.maven:maven-artifact-manager:jar:2.0.6": { + "layout": "org/apache/maven/maven-artifact-manager/2.0.6/maven-artifact-manager-2.0.6.jar", + "sha256": "2cc0e74f4f8fe9f9733cd207101809273026464c64d3f1fb2af06b9d2a3c323e", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-artifact-manager/2.0.6/maven-artifact-manager-2.0.6.jar" + }, + "org.apache.maven:maven-artifact-manager:jar:2.2.1": { + "layout": "org/apache/maven/maven-artifact-manager/2.2.1/maven-artifact-manager-2.2.1.jar", + "sha256": "d1e247c4ed3952385fd704ac9db2a222247cfe7d20508b4f3c76b90f857952ed", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-artifact-manager/2.2.1/maven-artifact-manager-2.2.1.jar" + }, + "org.apache.maven:maven-artifact-manager:pom:2.0.2": { + "layout": "org/apache/maven/maven-artifact-manager/2.0.2/maven-artifact-manager-2.0.2.pom", + "sha256": "94348507ad85d221df9167e9db313dfdb474d59e715d2c0638d3eac341756222", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-artifact-manager/2.0.2/maven-artifact-manager-2.0.2.pom" + }, + "org.apache.maven:maven-artifact-manager:pom:2.0.4": { + "layout": "org/apache/maven/maven-artifact-manager/2.0.4/maven-artifact-manager-2.0.4.pom", + "sha256": "5620e8f30457d1f379bb3dc0f67e759d890177befdf5f2591809f9e0e302b68a", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-artifact-manager/2.0.4/maven-artifact-manager-2.0.4.pom" + }, + "org.apache.maven:maven-artifact-manager:pom:2.0.6": { + "layout": "org/apache/maven/maven-artifact-manager/2.0.6/maven-artifact-manager-2.0.6.pom", + "sha256": "a80884dfac999ebdda57904f929a14f28ab08e5411d515bdb1fbdaacf0ed6d6f", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-artifact-manager/2.0.6/maven-artifact-manager-2.0.6.pom" + }, + "org.apache.maven:maven-artifact-manager:pom:2.2.1": { + "layout": "org/apache/maven/maven-artifact-manager/2.2.1/maven-artifact-manager-2.2.1.pom", + "sha256": "ecf58351f8fe0c398b8b452216705bece5291b9b327d30202c16b28ac680450c", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-artifact-manager/2.2.1/maven-artifact-manager-2.2.1.pom" + }, + "org.apache.maven:maven-artifact:jar:2.0.2": { + "layout": "org/apache/maven/maven-artifact/2.0.2/maven-artifact-2.0.2.jar", + "sha256": "eb23083642d881e60d258dd558c6d7b642f1fa98398cb247363ae84c04d4c09b", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-artifact/2.0.2/maven-artifact-2.0.2.jar" + }, + "org.apache.maven:maven-artifact:jar:2.0.4": { + "layout": "org/apache/maven/maven-artifact/2.0.4/maven-artifact-2.0.4.jar", + "sha256": "7294778dc3eb8ede4950ccca80dab8debc725304116b19ccb1929023d12534c4", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-artifact/2.0.4/maven-artifact-2.0.4.jar" + }, + "org.apache.maven:maven-artifact:jar:2.0.6": { + "layout": "org/apache/maven/maven-artifact/2.0.6/maven-artifact-2.0.6.jar", + "sha256": "f45629a70af0dfec1c1b542e162a2aceb976bb492825b6ee192e6a14fff238b6", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-artifact/2.0.6/maven-artifact-2.0.6.jar" + }, + "org.apache.maven:maven-artifact:jar:2.2.1": { + "layout": "org/apache/maven/maven-artifact/2.2.1/maven-artifact-2.2.1.jar", + "sha256": "d53062ffe8677a4f5e1ad3a1d1fa37ed600fab39166d39be7ed204635c5f839b", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-artifact/2.2.1/maven-artifact-2.2.1.jar" + }, + "org.apache.maven:maven-artifact:jar:3.0": { + "layout": "org/apache/maven/maven-artifact/3.0/maven-artifact-3.0.jar", + "sha256": "759079b9cf0cddae5ba06c96fd72347d82d0bc1d903c95d398c96522b139e470", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-artifact/3.0/maven-artifact-3.0.jar" + }, + "org.apache.maven:maven-artifact:pom:2.0.2": { + "layout": "org/apache/maven/maven-artifact/2.0.2/maven-artifact-2.0.2.pom", + "sha256": "0123d59dd14fa00b8a50eb299bf59945dbd824b6deec43823e2612151ca38c17", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-artifact/2.0.2/maven-artifact-2.0.2.pom" + }, + "org.apache.maven:maven-artifact:pom:2.0.4": { + "layout": "org/apache/maven/maven-artifact/2.0.4/maven-artifact-2.0.4.pom", + "sha256": "d8e78bc70afb9631d4e7c9db8f52778c4fd8c7c7998143bd7c420c40cf63ded4", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-artifact/2.0.4/maven-artifact-2.0.4.pom" + }, + "org.apache.maven:maven-artifact:pom:2.0.6": { + "layout": "org/apache/maven/maven-artifact/2.0.6/maven-artifact-2.0.6.pom", + "sha256": "7231047667b34a36cfed19d51ef38c9755e97e1acf11595a4b503c5ce7f0c595", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-artifact/2.0.6/maven-artifact-2.0.6.pom" + }, + "org.apache.maven:maven-artifact:pom:2.2.1": { + "layout": "org/apache/maven/maven-artifact/2.2.1/maven-artifact-2.2.1.pom", + "sha256": "f658a628efd6e0efe416b977638ba144af660fe6413f3637a4d03feb6a1ce806", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-artifact/2.2.1/maven-artifact-2.2.1.pom" + }, + "org.apache.maven:maven-artifact:pom:3.0": { + "layout": "org/apache/maven/maven-artifact/3.0/maven-artifact-3.0.pom", + "sha256": "c56a0dbd90cea691f83e58fa9a6388fb3ac6bc3c14b8c04d2e112544651fa528", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-artifact/3.0/maven-artifact-3.0.pom" + }, + "org.apache.maven:maven-core:jar:2.0.6": { + "layout": "org/apache/maven/maven-core/2.0.6/maven-core-2.0.6.jar", + "sha256": "710d56c05add33beadea2f86254223955aca570c15a610f81be07c96c919d7b6", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-core/2.0.6/maven-core-2.0.6.jar" + }, + "org.apache.maven:maven-core:jar:3.0": { + "layout": "org/apache/maven/maven-core/3.0/maven-core-3.0.jar", + "sha256": "ba03294ee53e7ba31838e4950f280d033c7744c6c7b31253afc75aa351fbd989", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-core/3.0/maven-core-3.0.jar" + }, + "org.apache.maven:maven-core:pom:2.0.6": { + "layout": "org/apache/maven/maven-core/2.0.6/maven-core-2.0.6.pom", + "sha256": "1ab7fdd1b82382690c081d3ea3f53bad2902e1d62a11a8096488711e7a5f607e", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-core/2.0.6/maven-core-2.0.6.pom" + }, + "org.apache.maven:maven-core:pom:2.2.1": { + "layout": "org/apache/maven/maven-core/2.2.1/maven-core-2.2.1.pom", + "sha256": "5cc81603cab47bf20dbfd5e28e311da1fd26f2e3617b50547da5cd0b4f59edf3", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-core/2.2.1/maven-core-2.2.1.pom" + }, + "org.apache.maven:maven-core:pom:3.0": { + "layout": "org/apache/maven/maven-core/3.0/maven-core-3.0.pom", + "sha256": "f70e12ebea93f119f4f63766c2b8a3386c34bb48e588df710cb98c8e3822f7c7", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-core/3.0/maven-core-3.0.pom" + }, + "org.apache.maven:maven-error-diagnostics:jar:2.0.6": { + "layout": "org/apache/maven/maven-error-diagnostics/2.0.6/maven-error-diagnostics-2.0.6.jar", + "sha256": "59c637b910c0de53d28aeeb6444ee5fe1a8fa8f3da32dbbc4485250c166d3fee", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-error-diagnostics/2.0.6/maven-error-diagnostics-2.0.6.jar" + }, + "org.apache.maven:maven-error-diagnostics:pom:2.0.6": { + "layout": "org/apache/maven/maven-error-diagnostics/2.0.6/maven-error-diagnostics-2.0.6.pom", + "sha256": "cb80662bd8274131bb09e140a4075bff660bdab5be56bf0f926efee0389f3c4e", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-error-diagnostics/2.0.6/maven-error-diagnostics-2.0.6.pom" + }, + "org.apache.maven:maven-error-diagnostics:pom:2.2.1": { + "layout": "org/apache/maven/maven-error-diagnostics/2.2.1/maven-error-diagnostics-2.2.1.pom", + "sha256": "228367b7569fb1462a3eb1423bc2778e2fc7fbaee3d3767890c02b8924fa1889", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-error-diagnostics/2.2.1/maven-error-diagnostics-2.2.1.pom" + }, + "org.apache.maven:maven-model-builder:jar:3.0": { + "layout": "org/apache/maven/maven-model-builder/3.0/maven-model-builder-3.0.jar", + "sha256": "1c98a4ec9eb0cb86ecf01710aa75c0346ee3f96edc6edeabcb21ed984120e154", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-model-builder/3.0/maven-model-builder-3.0.jar" + }, + "org.apache.maven:maven-model-builder:pom:3.0": { + "layout": "org/apache/maven/maven-model-builder/3.0/maven-model-builder-3.0.pom", + "sha256": "c1413ace47dafabe7917072f26e0b667f5b3a762156f82893544cd71e6a6c4ba", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-model-builder/3.0/maven-model-builder-3.0.pom" + }, + "org.apache.maven:maven-model:jar:2.0.4": { + "layout": "org/apache/maven/maven-model/2.0.4/maven-model-2.0.4.jar", + "sha256": "025e9ba00b87e33c90ccb9f8befec4c3c6baa42436f158f00a69747007f84351", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-model/2.0.4/maven-model-2.0.4.jar" + }, + "org.apache.maven:maven-model:jar:2.0.6": { + "layout": "org/apache/maven/maven-model/2.0.6/maven-model-2.0.6.jar", + "sha256": "b86dbf20852da19fb17301ae7e2a40d87930c7d76fe43f0f93ff86b8a64c6962", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-model/2.0.6/maven-model-2.0.6.jar" + }, + "org.apache.maven:maven-model:jar:2.2.1": { + "layout": "org/apache/maven/maven-model/2.2.1/maven-model-2.2.1.jar", + "sha256": "153b32f474fd676ec36ad807c508885005139140fc92168bb76bf6be31f8efb8", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-model/2.2.1/maven-model-2.2.1.jar" + }, + "org.apache.maven:maven-model:jar:3.0": { + "layout": "org/apache/maven/maven-model/3.0/maven-model-3.0.jar", + "sha256": "27e426d73f8662b47f60df0e43439b3dec2909c42b89175a6e4431dfed3edafd", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-model/3.0/maven-model-3.0.jar" + }, + "org.apache.maven:maven-model:pom:2.0.4": { + "layout": "org/apache/maven/maven-model/2.0.4/maven-model-2.0.4.pom", + "sha256": "a0df5fc56a44d4a716291d8879737a80516db598ca9c823f1dad525149df82b0", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-model/2.0.4/maven-model-2.0.4.pom" + }, + "org.apache.maven:maven-model:pom:2.0.6": { + "layout": "org/apache/maven/maven-model/2.0.6/maven-model-2.0.6.pom", + "sha256": "e88566fd64906e8e9a2315e5cb67efb7e0f4947a1c45a45cff399f64a235ac1f", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-model/2.0.6/maven-model-2.0.6.pom" + }, + "org.apache.maven:maven-model:pom:2.2.1": { + "layout": "org/apache/maven/maven-model/2.2.1/maven-model-2.2.1.pom", + "sha256": "62dd8e35a2c4432bb22f8250bbfe08639635599b4064d5d747bd24cf3c02fac5", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-model/2.2.1/maven-model-2.2.1.pom" + }, + "org.apache.maven:maven-model:pom:3.0": { + "layout": "org/apache/maven/maven-model/3.0/maven-model-3.0.pom", + "sha256": "3d6fdeb72b2967f1fa2784134fb832d08d8d6e879b7ace7712f2c7281994fc1e", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-model/3.0/maven-model-3.0.pom" + }, + "org.apache.maven:maven-monitor:jar:2.0.6": { + "layout": "org/apache/maven/maven-monitor/2.0.6/maven-monitor-2.0.6.jar", + "sha256": "47289bc307849383d41eddbe3800f1945b2204ab319b0d38e391f6780c37b4e3", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-monitor/2.0.6/maven-monitor-2.0.6.jar" + }, + "org.apache.maven:maven-monitor:pom:2.0.6": { + "layout": "org/apache/maven/maven-monitor/2.0.6/maven-monitor-2.0.6.pom", + "sha256": "2cfd187d6465c5ad99552da8441d31ff0c1ced1808d2f624dbdb49223d3baa89", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-monitor/2.0.6/maven-monitor-2.0.6.pom" + }, + "org.apache.maven:maven-monitor:pom:2.2.1": { + "layout": "org/apache/maven/maven-monitor/2.2.1/maven-monitor-2.2.1.pom", + "sha256": "bc962d48dcebb463c1071004015c4609516d616e884ce36eb7390f9a8095a65b", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-monitor/2.2.1/maven-monitor-2.2.1.pom" + }, + "org.apache.maven:maven-parent:pom:10": { + "layout": "org/apache/maven/maven-parent/10/maven-parent-10.pom", + "sha256": "81fe14cb9779d36e0c610e1049e5b32a6b9974957f257921acf628b31c5486c8", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-parent/10/maven-parent-10.pom" + }, + "org.apache.maven:maven-parent:pom:11": { + "layout": "org/apache/maven/maven-parent/11/maven-parent-11.pom", + "sha256": "7450c3330cf06c254db9f0dc5ef49eac15502311cf19e0208ba473076ee043d6", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-parent/11/maven-parent-11.pom" + }, + "org.apache.maven:maven-parent:pom:15": { + "layout": "org/apache/maven/maven-parent/15/maven-parent-15.pom", + "sha256": "e25770d5d46dcdfdbb9e38ca04f272c5bdf476d88392ab4044ba90678e616d54", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-parent/15/maven-parent-15.pom" + }, + "org.apache.maven:maven-parent:pom:16": { + "layout": "org/apache/maven/maven-parent/16/maven-parent-16.pom", + "sha256": "70cef83d246309a2aa355c38f2004edda3621ae0bc5c55a7a139eaeef4d1231a", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-parent/16/maven-parent-16.pom" + }, + "org.apache.maven:maven-parent:pom:21": { + "layout": "org/apache/maven/maven-parent/21/maven-parent-21.pom", + "sha256": "fc45af8911ea307d1b57564eef1f78b69801e9c11a5619e7eb58d5d00ae9db8e", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-parent/21/maven-parent-21.pom" + }, + "org.apache.maven:maven-parent:pom:22": { + "layout": "org/apache/maven/maven-parent/22/maven-parent-22.pom", + "sha256": "165a409718070698b4eb18fdfee4325bc3361cbb8e96a35f4669982cd2adb79a", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-parent/22/maven-parent-22.pom" + }, + "org.apache.maven:maven-parent:pom:23": { + "layout": "org/apache/maven/maven-parent/23/maven-parent-23.pom", + "sha256": "5425501edd9e0bd7b01eca53cc92e06836d24851151304f9c6759e1713541685", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-parent/23/maven-parent-23.pom" + }, + "org.apache.maven:maven-parent:pom:27": { + "layout": "org/apache/maven/maven-parent/27/maven-parent-27.pom", + "sha256": "56987ec424c449a9dc4dd427458ea1cb09b38e67ef4c219378a268a5e0d1b8a0", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-parent/27/maven-parent-27.pom" + }, + "org.apache.maven:maven-parent:pom:30": { + "layout": "org/apache/maven/maven-parent/30/maven-parent-30.pom", + "sha256": "70709ad646f5aa57bb44e2a8b4f3de4993b108202ba095bd164e41cdc3181e70", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-parent/30/maven-parent-30.pom" + }, + "org.apache.maven:maven-parent:pom:33": { + "layout": "org/apache/maven/maven-parent/33/maven-parent-33.pom", + "sha256": "3856e3fcd169502d5f12fe2452604ebf6c7c025f15656bfa558ea99ed29d73ea", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-parent/33/maven-parent-33.pom" + }, + "org.apache.maven:maven-parent:pom:34": { + "layout": "org/apache/maven/maven-parent/34/maven-parent-34.pom", + "sha256": "1a8faf7a6a2b848acb26a959954ee115c0d79dbe75a6206fb3b8c7c2f45a237f", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-parent/34/maven-parent-34.pom" + }, + "org.apache.maven:maven-parent:pom:37": { + "layout": "org/apache/maven/maven-parent/37/maven-parent-37.pom", + "sha256": "bcf3700301e8221ef14da27a2f0cff71fcd03fc45276bfd84adace401e88bebc", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-parent/37/maven-parent-37.pom" + }, + "org.apache.maven:maven-parent:pom:5": { + "layout": "org/apache/maven/maven-parent/5/maven-parent-5.pom", + "sha256": "5d7c2a229173155823c45380332f221bf0d27e52c9db76e9217940306765bd50", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-parent/5/maven-parent-5.pom" + }, + "org.apache.maven:maven-parent:pom:6": { + "layout": "org/apache/maven/maven-parent/6/maven-parent-6.pom", + "sha256": "df8f5fc5e956249833fe9e9bd6df891ca7224ceff1cb729dd84848376545afda", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-parent/6/maven-parent-6.pom" + }, + "org.apache.maven:maven-parent:pom:7": { + "layout": "org/apache/maven/maven-parent/7/maven-parent-7.pom", + "sha256": "54adf65728e30283650f9a9fac0d2d33f60f2c99fbcdea27671e6f6b076f6df3", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-parent/7/maven-parent-7.pom" + }, + "org.apache.maven:maven-parent:pom:9": { + "layout": "org/apache/maven/maven-parent/9/maven-parent-9.pom", + "sha256": "8e7054879496abff4f6960b946dbf67ab33671bf8c9b98bc154b7e0fb8bad5ae", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-parent/9/maven-parent-9.pom" + }, + "org.apache.maven:maven-plugin-api:jar:2.0.4": { + "layout": "org/apache/maven/maven-plugin-api/2.0.4/maven-plugin-api-2.0.4.jar", + "sha256": "9ceee514d1ea380f2ebd146a55c6b504025b91411d53e0248391cbc84397da5a", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-plugin-api/2.0.4/maven-plugin-api-2.0.4.jar" + }, + "org.apache.maven:maven-plugin-api:jar:2.0.6": { + "layout": "org/apache/maven/maven-plugin-api/2.0.6/maven-plugin-api-2.0.6.jar", + "sha256": "a1b54bbe38d25ccd3179c5563156b3b0c0ecd014647c3ebaeba8e92b2e2e5053", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-plugin-api/2.0.6/maven-plugin-api-2.0.6.jar" + }, + "org.apache.maven:maven-plugin-api:jar:2.2.1": { + "layout": "org/apache/maven/maven-plugin-api/2.2.1/maven-plugin-api-2.2.1.jar", + "sha256": "72a47a963563009c5e8b851491ced3f63e2d276b862bde1f9d10d53abac5b22f", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-plugin-api/2.2.1/maven-plugin-api-2.2.1.jar" + }, + "org.apache.maven:maven-plugin-api:jar:3.0": { + "layout": "org/apache/maven/maven-plugin-api/3.0/maven-plugin-api-3.0.jar", + "sha256": "f5ecc6eaa4a32ee0c115d31525f588f491b2cc75fdeb4ed3c0c662c12ac0c32f", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-plugin-api/3.0/maven-plugin-api-3.0.jar" + }, + "org.apache.maven:maven-plugin-api:pom:2.0": { + "layout": "org/apache/maven/maven-plugin-api/2.0/maven-plugin-api-2.0.pom", + "sha256": "701487785d69eeb27ff700a2d7d1af708c5e4b66cd640095193c07fefaff92e7", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-plugin-api/2.0/maven-plugin-api-2.0.pom" + }, + "org.apache.maven:maven-plugin-api:pom:2.0.1": { + "layout": "org/apache/maven/maven-plugin-api/2.0.1/maven-plugin-api-2.0.1.pom", + "sha256": "dd27e6b59137130eae38770b212862a76cb7bd8e98825626a94385f71ea9588e", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-plugin-api/2.0.1/maven-plugin-api-2.0.1.pom" + }, + "org.apache.maven:maven-plugin-api:pom:2.0.4": { + "layout": "org/apache/maven/maven-plugin-api/2.0.4/maven-plugin-api-2.0.4.pom", + "sha256": "86eee056eb9e468bc78febc4dd2f6b69584e18f4670f7615f4b67e094dac662a", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-plugin-api/2.0.4/maven-plugin-api-2.0.4.pom" + }, + "org.apache.maven:maven-plugin-api:pom:2.0.6": { + "layout": "org/apache/maven/maven-plugin-api/2.0.6/maven-plugin-api-2.0.6.pom", + "sha256": "e5886cbf7478ed0a89d4502cb4b6b4d25095a53b74e07439ec0ab3f793405822", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-plugin-api/2.0.6/maven-plugin-api-2.0.6.pom" + }, + "org.apache.maven:maven-plugin-api:pom:2.2.1": { + "layout": "org/apache/maven/maven-plugin-api/2.2.1/maven-plugin-api-2.2.1.pom", + "sha256": "c10d0460c2d5c5076304598965991d6257d1bf31bdef921a17ce3d059bce654e", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-plugin-api/2.2.1/maven-plugin-api-2.2.1.pom" + }, + "org.apache.maven:maven-plugin-api:pom:3.0": { + "layout": "org/apache/maven/maven-plugin-api/3.0/maven-plugin-api-3.0.pom", + "sha256": "8a722af2564205ae996f9035cc04670d3e9e4ae592f5a643c58fb7b0f43e1501", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-plugin-api/3.0/maven-plugin-api-3.0.pom" + }, + "org.apache.maven:maven-plugin-descriptor:jar:2.0.6": { + "layout": "org/apache/maven/maven-plugin-descriptor/2.0.6/maven-plugin-descriptor-2.0.6.jar", + "sha256": "e6e99e03921e576358e24a797e3034d0587ad08dac8ebf78b3fcf3e1a96a37d9", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-plugin-descriptor/2.0.6/maven-plugin-descriptor-2.0.6.jar" + }, + "org.apache.maven:maven-plugin-descriptor:pom:2.0.6": { + "layout": "org/apache/maven/maven-plugin-descriptor/2.0.6/maven-plugin-descriptor-2.0.6.pom", + "sha256": "77ca407ccc76079a2b60f4d3e652c19552890303fa00748623e372eac09039a1", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-plugin-descriptor/2.0.6/maven-plugin-descriptor-2.0.6.pom" + }, + "org.apache.maven:maven-plugin-descriptor:pom:2.2.1": { + "layout": "org/apache/maven/maven-plugin-descriptor/2.2.1/maven-plugin-descriptor-2.2.1.pom", + "sha256": "d4ef608f90dc9599c0cc325ca2ccc2e1ceb439b3d2ff31ae22e30ac1a63a68f0", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-plugin-descriptor/2.2.1/maven-plugin-descriptor-2.2.1.pom" + }, + "org.apache.maven:maven-plugin-parameter-documenter:jar:2.0.6": { + "layout": "org/apache/maven/maven-plugin-parameter-documenter/2.0.6/maven-plugin-parameter-documenter-2.0.6.jar", + "sha256": "b5f0dd177264a6e25ce0ed8724f8b6f3bca48b215b9ff4576e1bec7b0773f9e8", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-plugin-parameter-documenter/2.0.6/maven-plugin-parameter-documenter-2.0.6.jar" + }, + "org.apache.maven:maven-plugin-parameter-documenter:pom:2.0.6": { + "layout": "org/apache/maven/maven-plugin-parameter-documenter/2.0.6/maven-plugin-parameter-documenter-2.0.6.pom", + "sha256": "b5b53025b13fa0013ae2caede21f9af4215c25279db4ee0a1f943aaa8815c174", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-plugin-parameter-documenter/2.0.6/maven-plugin-parameter-documenter-2.0.6.pom" + }, + "org.apache.maven:maven-plugin-parameter-documenter:pom:2.2.1": { + "layout": "org/apache/maven/maven-plugin-parameter-documenter/2.2.1/maven-plugin-parameter-documenter-2.2.1.pom", + "sha256": "902b0160f7b81ec76452468f8ae087fc1cfefc08367c84d9197512dfb01d845d", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-plugin-parameter-documenter/2.2.1/maven-plugin-parameter-documenter-2.2.1.pom" + }, + "org.apache.maven:maven-plugin-registry:jar:2.0.6": { + "layout": "org/apache/maven/maven-plugin-registry/2.0.6/maven-plugin-registry-2.0.6.jar", + "sha256": "98d6f3fbd17a67736d65e9c4ee484c5d6bc54589042e7cd3db65f87d91070f2a", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-plugin-registry/2.0.6/maven-plugin-registry-2.0.6.jar" + }, + "org.apache.maven:maven-plugin-registry:jar:2.2.1": { + "layout": "org/apache/maven/maven-plugin-registry/2.2.1/maven-plugin-registry-2.2.1.jar", + "sha256": "4ad0673155d7e0e5cf6d13689802d8d507f38e5ea00a6d2fb92aef206108213d", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-plugin-registry/2.2.1/maven-plugin-registry-2.2.1.jar" + }, + "org.apache.maven:maven-plugin-registry:pom:2.0.6": { + "layout": "org/apache/maven/maven-plugin-registry/2.0.6/maven-plugin-registry-2.0.6.pom", + "sha256": "cf633e8cde33e65580776d845756d332d24f7c6d5bf9ae90fc6c43d73cb5fe23", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-plugin-registry/2.0.6/maven-plugin-registry-2.0.6.pom" + }, + "org.apache.maven:maven-plugin-registry:pom:2.2.1": { + "layout": "org/apache/maven/maven-plugin-registry/2.2.1/maven-plugin-registry-2.2.1.pom", + "sha256": "3db15325cd620c0e54c3d88b6b7ec1bac43db376e18c9bf56bd0c05402ee6be8", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-plugin-registry/2.2.1/maven-plugin-registry-2.2.1.pom" + }, + "org.apache.maven:maven-profile:jar:2.0.4": { + "layout": "org/apache/maven/maven-profile/2.0.4/maven-profile-2.0.4.jar", + "sha256": "8e75daa4cc3f8a2aa68de0f8a28eaa655fdbb2e04708bad19d63657045ab7f14", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-profile/2.0.4/maven-profile-2.0.4.jar" + }, + "org.apache.maven:maven-profile:jar:2.0.6": { + "layout": "org/apache/maven/maven-profile/2.0.6/maven-profile-2.0.6.jar", + "sha256": "89c07a73ea3b41e6c3c7e126871c898f1741fbfddd35633f9524fda09c25dc01", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-profile/2.0.6/maven-profile-2.0.6.jar" + }, + "org.apache.maven:maven-profile:jar:2.2.1": { + "layout": "org/apache/maven/maven-profile/2.2.1/maven-profile-2.2.1.jar", + "sha256": "ecaffef655fea6b138f0855a12f7dbb59fc0d6bffb5c1bfd31803cccb49ea08c", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-profile/2.2.1/maven-profile-2.2.1.jar" + }, + "org.apache.maven:maven-profile:pom:2.0.4": { + "layout": "org/apache/maven/maven-profile/2.0.4/maven-profile-2.0.4.pom", + "sha256": "3dfe0725e12871248ac59c36abe1316fcfeab2051bd203ba53edfdd0e658ca33", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-profile/2.0.4/maven-profile-2.0.4.pom" + }, + "org.apache.maven:maven-profile:pom:2.0.6": { + "layout": "org/apache/maven/maven-profile/2.0.6/maven-profile-2.0.6.pom", + "sha256": "09455abf6ab86671fab0ae5bba8293c17382c8a9c53fafc3befb3e0720f2b707", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-profile/2.0.6/maven-profile-2.0.6.pom" + }, + "org.apache.maven:maven-profile:pom:2.2.1": { + "layout": "org/apache/maven/maven-profile/2.2.1/maven-profile-2.2.1.pom", + "sha256": "d125b3ade9f694ae60ef835f5ae000b6ba35fba8c34bafd8b40a1961375e63fa", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-profile/2.2.1/maven-profile-2.2.1.pom" + }, + "org.apache.maven:maven-project:jar:2.0.4": { + "layout": "org/apache/maven/maven-project/2.0.4/maven-project-2.0.4.jar", + "sha256": "27a6b287359f57c02a263369034b0c16a597c7f7b99df45e8f9b1c57a48c52a6", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-project/2.0.4/maven-project-2.0.4.jar" + }, + "org.apache.maven:maven-project:jar:2.0.6": { + "layout": "org/apache/maven/maven-project/2.0.6/maven-project-2.0.6.jar", + "sha256": "f091ef5587537947a4c6e43cf200c567e1bf44c101443e8f8cf51e2c126e0195", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-project/2.0.6/maven-project-2.0.6.jar" + }, + "org.apache.maven:maven-project:jar:2.2.1": { + "layout": "org/apache/maven/maven-project/2.2.1/maven-project-2.2.1.jar", + "sha256": "24ddb65b7a6c3befb6267ce5f739f237c84eba99389265c30df67c3dd8396a40", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-project/2.2.1/maven-project-2.2.1.jar" + }, + "org.apache.maven:maven-project:pom:2.0.4": { + "layout": "org/apache/maven/maven-project/2.0.4/maven-project-2.0.4.pom", + "sha256": "8dfa987422e1eeaa2f8da8d2696f9bc00c127647830e56254ffe6ae5709faeaf", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-project/2.0.4/maven-project-2.0.4.pom" + }, + "org.apache.maven:maven-project:pom:2.0.6": { + "layout": "org/apache/maven/maven-project/2.0.6/maven-project-2.0.6.pom", + "sha256": "7dd6468c154d99c39a94c7a8c734aa96366864334b6b2ea10778459860cbe3e5", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-project/2.0.6/maven-project-2.0.6.pom" + }, + "org.apache.maven:maven-project:pom:2.2.1": { + "layout": "org/apache/maven/maven-project/2.2.1/maven-project-2.2.1.pom", + "sha256": "34af0baedaef19375b7c1a7da967e9089d5e0754647fdbe9a302590392874b77", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-project/2.2.1/maven-project-2.2.1.pom" + }, + "org.apache.maven:maven-repository-metadata:jar:2.0.2": { + "layout": "org/apache/maven/maven-repository-metadata/2.0.2/maven-repository-metadata-2.0.2.jar", + "sha256": "d033d8cb402ded41b2716b67fad71ed22b2fa6fa4a35fc709e6fb079752eb2e9", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-repository-metadata/2.0.2/maven-repository-metadata-2.0.2.jar" + }, + "org.apache.maven:maven-repository-metadata:jar:2.0.4": { + "layout": "org/apache/maven/maven-repository-metadata/2.0.4/maven-repository-metadata-2.0.4.jar", + "sha256": "ade7663b94122c97aa718a2d19d9fe4318118c750d906031a3ab6492eb558ada", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-repository-metadata/2.0.4/maven-repository-metadata-2.0.4.jar" + }, + "org.apache.maven:maven-repository-metadata:jar:2.0.6": { + "layout": "org/apache/maven/maven-repository-metadata/2.0.6/maven-repository-metadata-2.0.6.jar", + "sha256": "dce817d7c4229d5e666247c05853ef8ad7ac1b72d27953501c4a1ea739f67b25", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-repository-metadata/2.0.6/maven-repository-metadata-2.0.6.jar" + }, + "org.apache.maven:maven-repository-metadata:jar:2.2.1": { + "layout": "org/apache/maven/maven-repository-metadata/2.2.1/maven-repository-metadata-2.2.1.jar", + "sha256": "5fe283f47b0e7f7d95a4252af3fa7a0db4d8f080cd9df308608c0472b8f168a1", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-repository-metadata/2.2.1/maven-repository-metadata-2.2.1.jar" + }, + "org.apache.maven:maven-repository-metadata:jar:3.0": { + "layout": "org/apache/maven/maven-repository-metadata/3.0/maven-repository-metadata-3.0.jar", + "sha256": "c938e4d8cdf0674496749a87e6d3b29aa41b1b35a39898a1ade2bc9eae214c17", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-repository-metadata/3.0/maven-repository-metadata-3.0.jar" + }, + "org.apache.maven:maven-repository-metadata:pom:2.0.2": { + "layout": "org/apache/maven/maven-repository-metadata/2.0.2/maven-repository-metadata-2.0.2.pom", + "sha256": "4dd5ff83a2089613e828ee6c8bd6888d8732217392e2c442f6302df4a025e629", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-repository-metadata/2.0.2/maven-repository-metadata-2.0.2.pom" + }, + "org.apache.maven:maven-repository-metadata:pom:2.0.4": { + "layout": "org/apache/maven/maven-repository-metadata/2.0.4/maven-repository-metadata-2.0.4.pom", + "sha256": "2d5e793791748e714becdbcfe29b73f512f9e9f87715982ff10f4e77eb01837a", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-repository-metadata/2.0.4/maven-repository-metadata-2.0.4.pom" + }, + "org.apache.maven:maven-repository-metadata:pom:2.0.6": { + "layout": "org/apache/maven/maven-repository-metadata/2.0.6/maven-repository-metadata-2.0.6.pom", + "sha256": "df10657745e39010954fd009276b65b75198bdc40d0adb9916f9697c71fcd986", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-repository-metadata/2.0.6/maven-repository-metadata-2.0.6.pom" + }, + "org.apache.maven:maven-repository-metadata:pom:2.2.1": { + "layout": "org/apache/maven/maven-repository-metadata/2.2.1/maven-repository-metadata-2.2.1.pom", + "sha256": "9dad0f56523955b60a9903f4e8342891355d7a59c77f36a3b53cf6ff2e4df625", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-repository-metadata/2.2.1/maven-repository-metadata-2.2.1.pom" + }, + "org.apache.maven:maven-repository-metadata:pom:3.0": { + "layout": "org/apache/maven/maven-repository-metadata/3.0/maven-repository-metadata-3.0.pom", + "sha256": "8d9ce34e4bc02c4df761578c5f48ac3da5af51f259f5e3e4ea9047ec345ed1b7", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-repository-metadata/3.0/maven-repository-metadata-3.0.pom" + }, + "org.apache.maven:maven-settings-builder:jar:3.0": { + "layout": "org/apache/maven/maven-settings-builder/3.0/maven-settings-builder-3.0.jar", + "sha256": "e17e706c6f03c453f6000599cab607c2af5f1cc6e3a3b1e6fce27e5ef4999eab", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-settings-builder/3.0/maven-settings-builder-3.0.jar" + }, + "org.apache.maven:maven-settings-builder:pom:3.0": { + "layout": "org/apache/maven/maven-settings-builder/3.0/maven-settings-builder-3.0.pom", + "sha256": "1e707086b2efabe7527e75539f87e5b4544ed20e8b5ae8aa35bcc24d7ba3a2b0", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-settings-builder/3.0/maven-settings-builder-3.0.pom" + }, + "org.apache.maven:maven-settings:jar:2.0.4": { + "layout": "org/apache/maven/maven-settings/2.0.4/maven-settings-2.0.4.jar", + "sha256": "6e541ac377478315f192e17690a66dd074c8d479bc09d6463eeada62f3784753", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-settings/2.0.4/maven-settings-2.0.4.jar" + }, + "org.apache.maven:maven-settings:jar:2.0.6": { + "layout": "org/apache/maven/maven-settings/2.0.6/maven-settings-2.0.6.jar", + "sha256": "bb7af11f28a37d21bf20e11705c78aa4adce3aaff1b9b981c031c6be1aa0ec97", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-settings/2.0.6/maven-settings-2.0.6.jar" + }, + "org.apache.maven:maven-settings:jar:2.2.1": { + "layout": "org/apache/maven/maven-settings/2.2.1/maven-settings-2.2.1.jar", + "sha256": "9a9f556713a404e770c9dbdaed7eb086078014c989291960c76fdde6db4192f7", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-settings/2.2.1/maven-settings-2.2.1.jar" + }, + "org.apache.maven:maven-settings:jar:3.0": { + "layout": "org/apache/maven/maven-settings/3.0/maven-settings-3.0.jar", + "sha256": "3b1a46b4bc26a0176acaf99312ff2f3a631faf3224b0996af546aa48bd73c647", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-settings/3.0/maven-settings-3.0.jar" + }, + "org.apache.maven:maven-settings:pom:2.0.4": { + "layout": "org/apache/maven/maven-settings/2.0.4/maven-settings-2.0.4.pom", + "sha256": "9469dbe562fcf278a4a31d6050629790c1158bab3cd39b5ce8f6fb4ccdd13deb", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-settings/2.0.4/maven-settings-2.0.4.pom" + }, + "org.apache.maven:maven-settings:pom:2.0.6": { + "layout": "org/apache/maven/maven-settings/2.0.6/maven-settings-2.0.6.pom", + "sha256": "eececa44387deebbac73192967eabad80f2f43fe96f70b98f3e21951b1bfaea1", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-settings/2.0.6/maven-settings-2.0.6.pom" + }, + "org.apache.maven:maven-settings:pom:2.2.1": { + "layout": "org/apache/maven/maven-settings/2.2.1/maven-settings-2.2.1.pom", + "sha256": "0d25a88a1b1e44801f8912206a40ff249cb5702ee87cf3d243d5213f7bcf534f", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-settings/2.2.1/maven-settings-2.2.1.pom" + }, + "org.apache.maven:maven-settings:pom:3.0": { + "layout": "org/apache/maven/maven-settings/3.0/maven-settings-3.0.pom", + "sha256": "2340855d40ce6125d9a23ab80d94848efa50b2957cf93531e2a7dcf631b4f22b", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-settings/3.0/maven-settings-3.0.pom" + }, + "org.apache.maven:maven-toolchain:jar:3.0-alpha-2": { + "layout": "org/apache/maven/maven-toolchain/3.0-alpha-2/maven-toolchain-3.0-alpha-2.jar", + "sha256": "e0bb64267e6cd6e51d38cc88ba72e8a8adf616c2dc317782127cd61a8ae2a65c", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-toolchain/3.0-alpha-2/maven-toolchain-3.0-alpha-2.jar" + }, + "org.apache.maven:maven-toolchain:pom:3.0-alpha-2": { + "layout": "org/apache/maven/maven-toolchain/3.0-alpha-2/maven-toolchain-3.0-alpha-2.pom", + "sha256": "764a6ebbc61180bdfd5ab35cb9d8460eadcbc05ceea1fbfbcd355f34f8f19c19", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven-toolchain/3.0-alpha-2/maven-toolchain-3.0-alpha-2.pom" + }, + "org.apache.maven:maven:pom:2.0": { + "layout": "org/apache/maven/maven/2.0/maven-2.0.pom", + "sha256": "26f4354dd76180a0a397e7733cdcb5f7ff8744fd327390f989d8e3ecb4ddf2bb", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven/2.0/maven-2.0.pom" + }, + "org.apache.maven:maven:pom:2.0.1": { + "layout": "org/apache/maven/maven/2.0.1/maven-2.0.1.pom", + "sha256": "b2d9ce65d7b65efd42a465b517bf32687afaf341eb3bc67dda787c773c79db1a", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven/2.0.1/maven-2.0.1.pom" + }, + "org.apache.maven:maven:pom:2.0.2": { + "layout": "org/apache/maven/maven/2.0.2/maven-2.0.2.pom", + "sha256": "1eb80693b5d9c6d8c0124766606cca4e8199f7a07724d09fdf4e9c000ee8c304", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven/2.0.2/maven-2.0.2.pom" + }, + "org.apache.maven:maven:pom:2.0.4": { + "layout": "org/apache/maven/maven/2.0.4/maven-2.0.4.pom", + "sha256": "39ed32405d60da7d1e194b3a13e53ead998558668db87d21bef7fe69d9cbd287", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven/2.0.4/maven-2.0.4.pom" + }, + "org.apache.maven:maven:pom:2.0.6": { + "layout": "org/apache/maven/maven/2.0.6/maven-2.0.6.pom", + "sha256": "f5755c01058f048c61c3baf11e03c605b9c0ec1021ab40a3dcb76fb5bf51ff34", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven/2.0.6/maven-2.0.6.pom" + }, + "org.apache.maven:maven:pom:2.2.1": { + "layout": "org/apache/maven/maven/2.2.1/maven-2.2.1.pom", + "sha256": "d135cff96dcbbc8a5fab30180e557cae620373cf26941d4c738a88896a2d98ed", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven/2.2.1/maven-2.2.1.pom" + }, + "org.apache.maven:maven:pom:3.0": { + "layout": "org/apache/maven/maven/3.0/maven-3.0.pom", + "sha256": "28fc63720c4a5ff92bf0e358ed55a6f24626f35bccc13cc3e194231e158848f6", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven/3.0/maven-3.0.pom" + }, + "org.apache.maven:maven:pom:3.0-alpha-2": { + "layout": "org/apache/maven/maven/3.0-alpha-2/maven-3.0-alpha-2.pom", + "sha256": "4c2f8341518aeb9d488844e334c02fa66dd4bb091bfdeb965c8a848ac6ea5aa2", + "url": "https://repo.maven.apache.org/maven2/org/apache/maven/maven/3.0-alpha-2/maven-3.0-alpha-2.pom" + }, + "org.apache.struts:struts-core:jar:1.3.8": { + "layout": "org/apache/struts/struts-core/1.3.8/struts-core-1.3.8.jar", + "sha256": "a7881710517dd6a50fa81c04d494e1493ad326bcc1adf2eb9493e5eb9ca9e077", + "url": "https://repo.maven.apache.org/maven2/org/apache/struts/struts-core/1.3.8/struts-core-1.3.8.jar" + }, + "org.apache.struts:struts-core:pom:1.3.8": { + "layout": "org/apache/struts/struts-core/1.3.8/struts-core-1.3.8.pom", + "sha256": "9751850b8c81e1c20091ab5cbfa309491c7a59f816a8566c475f901a8f554e1e", + "url": "https://repo.maven.apache.org/maven2/org/apache/struts/struts-core/1.3.8/struts-core-1.3.8.pom" + }, + "org.apache.struts:struts-master:pom:4": { + "layout": "org/apache/struts/struts-master/4/struts-master-4.pom", + "sha256": "7a9ee24480959cfbef9ccc8ca9b55da3a2bf9cbed81518de340a21289d12cec6", + "url": "https://repo.maven.apache.org/maven2/org/apache/struts/struts-master/4/struts-master-4.pom" + }, + "org.apache.struts:struts-parent:pom:1.3.8": { + "layout": "org/apache/struts/struts-parent/1.3.8/struts-parent-1.3.8.pom", + "sha256": "e6137fdb8a229c10d12b8aa4808c55ca4e07b4e759f2f352f3869dec1227c750", + "url": "https://repo.maven.apache.org/maven2/org/apache/struts/struts-parent/1.3.8/struts-parent-1.3.8.pom" + }, + "org.apache.struts:struts-taglib:jar:1.3.8": { + "layout": "org/apache/struts/struts-taglib/1.3.8/struts-taglib-1.3.8.jar", + "sha256": "0b54adf308e50d8fdb82066b058bfa57ee244d1cdcf4bf7b6c12fb11d91f44a5", + "url": "https://repo.maven.apache.org/maven2/org/apache/struts/struts-taglib/1.3.8/struts-taglib-1.3.8.jar" + }, + "org.apache.struts:struts-taglib:pom:1.3.8": { + "layout": "org/apache/struts/struts-taglib/1.3.8/struts-taglib-1.3.8.pom", + "sha256": "93349fdf9c95458fad1b8105f20ce75155a37d2888fa7daeffbad1b30fa27ec0", + "url": "https://repo.maven.apache.org/maven2/org/apache/struts/struts-taglib/1.3.8/struts-taglib-1.3.8.pom" + }, + "org.apache.struts:struts-tiles:jar:1.3.8": { + "layout": "org/apache/struts/struts-tiles/1.3.8/struts-tiles-1.3.8.jar", + "sha256": "3d66e61734b2ddad6e4b34aaa2382480ad6061e59e5e178e346cc275c0429e57", + "url": "https://repo.maven.apache.org/maven2/org/apache/struts/struts-tiles/1.3.8/struts-tiles-1.3.8.jar" + }, + "org.apache.struts:struts-tiles:pom:1.3.8": { + "layout": "org/apache/struts/struts-tiles/1.3.8/struts-tiles-1.3.8.pom", + "sha256": "d678f338f62da055bafb93483ec8ddcbed7c2762649e31766ad8347f59ef1c6c", + "url": "https://repo.maven.apache.org/maven2/org/apache/struts/struts-tiles/1.3.8/struts-tiles-1.3.8.pom" + }, + "org.apache.velocity:velocity-tools:jar:2.0": { + "layout": "org/apache/velocity/velocity-tools/2.0/velocity-tools-2.0.jar", + "sha256": "b174eb36bc48c25dce10571c7d3d5dca4e4c1b3e2e31a92b9ed68fe9dea688d9", + "url": "https://repo.maven.apache.org/maven2/org/apache/velocity/velocity-tools/2.0/velocity-tools-2.0.jar" + }, + "org.apache.velocity:velocity-tools:pom:2.0": { + "layout": "org/apache/velocity/velocity-tools/2.0/velocity-tools-2.0.pom", + "sha256": "b12f13ab462281d48c573acabf124e067a9d49e65ec72b27597db9e91f721b95", + "url": "https://repo.maven.apache.org/maven2/org/apache/velocity/velocity-tools/2.0/velocity-tools-2.0.pom" + }, + "org.apache.velocity:velocity:jar:1.5": { + "layout": "org/apache/velocity/velocity/1.5/velocity-1.5.jar", + "sha256": "e06403f9cd69033e523bec43195a2a1b6106e28c5d7d053b569ae771e9e49a62", + "url": "https://repo.maven.apache.org/maven2/org/apache/velocity/velocity/1.5/velocity-1.5.jar" + }, + "org.apache.velocity:velocity:pom:1.5": { + "layout": "org/apache/velocity/velocity/1.5/velocity-1.5.pom", + "sha256": "de893ceb40c4659cbcd3f5507edb9474c825f72c5c0b61892cc87ace7197f6fe", + "url": "https://repo.maven.apache.org/maven2/org/apache/velocity/velocity/1.5/velocity-1.5.pom" + }, + "org.apache.velocity:velocity:pom:1.6.2": { + "layout": "org/apache/velocity/velocity/1.6.2/velocity-1.6.2.pom", + "sha256": "f663422e0b92069dcb30d58a2652660b727252ae94e40e8616e710723c64cdec", + "url": "https://repo.maven.apache.org/maven2/org/apache/velocity/velocity/1.6.2/velocity-1.6.2.pom" + }, + "org.apache.velocity:velocity:pom:1.7": { + "layout": "org/apache/velocity/velocity/1.7/velocity-1.7.pom", + "sha256": "a3f97ceab5f073ed93ef8fe6304e35252d83ecf2442c83fe0492b8b73da3b40b", + "url": "https://repo.maven.apache.org/maven2/org/apache/velocity/velocity/1.7/velocity-1.7.pom" + }, + "org.apache:apache:pom:10": { + "layout": "org/apache/apache/10/apache-10.pom", + "sha256": "802feece72852dafcbd0a425a60367c72c5cb9b6ea5aae59481128569189daf9", + "url": "https://repo.maven.apache.org/maven2/org/apache/apache/10/apache-10.pom" + }, + "org.apache:apache:pom:11": { + "layout": "org/apache/apache/11/apache-11.pom", + "sha256": "9a4fb5addb41d8116b6441e9e3c48764d9cc562243d5608652bea6db0509297b", + "url": "https://repo.maven.apache.org/maven2/org/apache/apache/11/apache-11.pom" + }, + "org.apache:apache:pom:13": { + "layout": "org/apache/apache/13/apache-13.pom", + "sha256": "ff513db0361fd41237bef4784968bc15aae478d4ec0a9496f811072ccaf3841d", + "url": "https://repo.maven.apache.org/maven2/org/apache/apache/13/apache-13.pom" + }, + "org.apache:apache:pom:15": { + "layout": "org/apache/apache/15/apache-15.pom", + "sha256": "36c2f2f979ac67b450c0cb480e4e9baf6b40f3a681f22ba9692287d1139ad494", + "url": "https://repo.maven.apache.org/maven2/org/apache/apache/15/apache-15.pom" + }, + "org.apache:apache:pom:16": { + "layout": "org/apache/apache/16/apache-16.pom", + "sha256": "9f85ff2fd7d6cb3097aa47fb419ee7f0ebe869109f98aba9f4eca3f49e74a40e", + "url": "https://repo.maven.apache.org/maven2/org/apache/apache/16/apache-16.pom" + }, + "org.apache:apache:pom:17": { + "layout": "org/apache/apache/17/apache-17.pom", + "sha256": "398044b74b5a719326be218ae08124e5e2f3318ab5d78fe199d504efc2e0d43f", + "url": "https://repo.maven.apache.org/maven2/org/apache/apache/17/apache-17.pom" + }, + "org.apache:apache:pom:18": { + "layout": "org/apache/apache/18/apache-18.pom", + "sha256": "7831307285fd475bbc36b20ae38e7882f11c3153b1d5930f852d44eda8f33c17", + "url": "https://repo.maven.apache.org/maven2/org/apache/apache/18/apache-18.pom" + }, + "org.apache:apache:pom:19": { + "layout": "org/apache/apache/19/apache-19.pom", + "sha256": "91f7a33096ea69bac2cbaf6d01feb934cac002c48d8c8cfa9c240b40f1ec21df", + "url": "https://repo.maven.apache.org/maven2/org/apache/apache/19/apache-19.pom" + }, + "org.apache:apache:pom:2": { + "layout": "org/apache/apache/2/apache-2.pom", + "sha256": "c0242e48994aad79edfa2e983959d840aa6ba0930317c64c2dff4c3134b5e0e1", + "url": "https://repo.maven.apache.org/maven2/org/apache/apache/2/apache-2.pom" + }, + "org.apache:apache:pom:21": { + "layout": "org/apache/apache/21/apache-21.pom", + "sha256": "af10c108da014f17cafac7b52b2b4b5a3a1c18265fa2af97a325d9143537b380", + "url": "https://repo.maven.apache.org/maven2/org/apache/apache/21/apache-21.pom" + }, + "org.apache:apache:pom:23": { + "layout": "org/apache/apache/23/apache-23.pom", + "sha256": "bc10624e0623f36577fac5639ca2936d3240ed152fb6d8d533ab4d270543491c", + "url": "https://repo.maven.apache.org/maven2/org/apache/apache/23/apache-23.pom" + }, + "org.apache:apache:pom:27": { + "layout": "org/apache/apache/27/apache-27.pom", + "sha256": "b2b0fc69e22a650c3892f1c366d77076f29575c6738df4c7a70a44844484cdf9", + "url": "https://repo.maven.apache.org/maven2/org/apache/apache/27/apache-27.pom" + }, + "org.apache:apache:pom:3": { + "layout": "org/apache/apache/3/apache-3.pom", + "sha256": "393c50afb4b7aa6eb57e5377a55a1a0610b19f75b52ece01308db04a1187a20e", + "url": "https://repo.maven.apache.org/maven2/org/apache/apache/3/apache-3.pom" + }, + "org.apache:apache:pom:4": { + "layout": "org/apache/apache/4/apache-4.pom", + "sha256": "9e9323a26ba8eb2394efef0c96d31b70df570808630dc147cab1e73541cc5194", + "url": "https://repo.maven.apache.org/maven2/org/apache/apache/4/apache-4.pom" + }, + "org.apache:apache:pom:5": { + "layout": "org/apache/apache/5/apache-5.pom", + "sha256": "1933a6037439b389bda2feaccfc0113880fd8d88f7d240d2052b91108dd5ae89", + "url": "https://repo.maven.apache.org/maven2/org/apache/apache/5/apache-5.pom" + }, + "org.apache:apache:pom:6": { + "layout": "org/apache/apache/6/apache-6.pom", + "sha256": "12edb5096e13f40c362d0bd40902589fa9586505123fa26799ce50b116fa5bb3", + "url": "https://repo.maven.apache.org/maven2/org/apache/apache/6/apache-6.pom" + }, + "org.apache:apache:pom:7": { + "layout": "org/apache/apache/7/apache-7.pom", + "sha256": "1397ce1db433adc9f223dbf07496d133681448751f4ae29e58f68e78fb4b6c25", + "url": "https://repo.maven.apache.org/maven2/org/apache/apache/7/apache-7.pom" + }, + "org.apache:apache:pom:9": { + "layout": "org/apache/apache/9/apache-9.pom", + "sha256": "4946e60a547c8eda69f3bc23c5b6f0dadcf8469ea49b1d1da7de34aecfcf18dd", + "url": "https://repo.maven.apache.org/maven2/org/apache/apache/9/apache-9.pom" + }, + "org.beanshell:beanshell:pom:2.0b4": { + "layout": "org/beanshell/beanshell/2.0b4/beanshell-2.0b4.pom", + "sha256": "0792e581f159243127661c65859d14f4f0b66fc774d478da2a874292bb47b874", + "url": "https://repo.maven.apache.org/maven2/org/beanshell/beanshell/2.0b4/beanshell-2.0b4.pom" + }, + "org.beanshell:bsh:jar:2.0b4": { + "layout": "org/beanshell/bsh/2.0b4/bsh-2.0b4.jar", + "sha256": "91395c07885839a8c6986d5b7c577cd9bacf01bf129c89141f35e8ea858427b6", + "url": "https://repo.maven.apache.org/maven2/org/beanshell/bsh/2.0b4/bsh-2.0b4.jar" + }, + "org.beanshell:bsh:pom:2.0b4": { + "layout": "org/beanshell/bsh/2.0b4/bsh-2.0b4.pom", + "sha256": "9c048fddf29663d9380a68f414d089717ae3e30b915e6c96015bf4a64a82f18f", + "url": "https://repo.maven.apache.org/maven2/org/beanshell/bsh/2.0b4/bsh-2.0b4.pom" + }, + "org.codehaus.mojo:animal-sniffer-annotations:jar:1.14": { + "layout": "org/codehaus/mojo/animal-sniffer-annotations/1.14/animal-sniffer-annotations-1.14.jar", + "sha256": "2068320bd6bad744c3673ab048f67e30bef8f518996fa380033556600669905d", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/mojo/animal-sniffer-annotations/1.14/animal-sniffer-annotations-1.14.jar" + }, + "org.codehaus.mojo:animal-sniffer-annotations:pom:1.14": { + "layout": "org/codehaus/mojo/animal-sniffer-annotations/1.14/animal-sniffer-annotations-1.14.pom", + "sha256": "1879f19a05991e3ed95910b96689333396b0c467a215dc4d1f90018404b72a26", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/mojo/animal-sniffer-annotations/1.14/animal-sniffer-annotations-1.14.pom" + }, + "org.codehaus.mojo:animal-sniffer-parent:pom:1.14": { + "layout": "org/codehaus/mojo/animal-sniffer-parent/1.14/animal-sniffer-parent-1.14.pom", + "sha256": "f51550a06b1410bd4962cb0e71df0b921a60a7ef47bfa9c4825a14be72316eea", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/mojo/animal-sniffer-parent/1.14/animal-sniffer-parent-1.14.pom" + }, + "org.codehaus.mojo:build-helper-maven-plugin:jar:1.8": { + "layout": "org/codehaus/mojo/build-helper-maven-plugin/1.8/build-helper-maven-plugin-1.8.jar", + "sha256": "402bcefdddf087f4ad2774cf2cdf7c3b1a05a2bbc5cd5da2b320b2f05aecc0a6", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/mojo/build-helper-maven-plugin/1.8/build-helper-maven-plugin-1.8.jar" + }, + "org.codehaus.mojo:build-helper-maven-plugin:pom:1.8": { + "layout": "org/codehaus/mojo/build-helper-maven-plugin/1.8/build-helper-maven-plugin-1.8.pom", + "sha256": "ef9cf388f226e19823e5a15e6684c16af8fe7196f7e361df22235135ec9fd0a4", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/mojo/build-helper-maven-plugin/1.8/build-helper-maven-plugin-1.8.pom" + }, + "org.codehaus.mojo:javacc-maven-plugin:jar:2.6": { + "layout": "org/codehaus/mojo/javacc-maven-plugin/2.6/javacc-maven-plugin-2.6.jar", + "sha256": "6ad67577c21378f29329b8e3224643fb956884f163bf657cf8b3080128b3728d", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/mojo/javacc-maven-plugin/2.6/javacc-maven-plugin-2.6.jar" + }, + "org.codehaus.mojo:javacc-maven-plugin:pom:2.6": { + "layout": "org/codehaus/mojo/javacc-maven-plugin/2.6/javacc-maven-plugin-2.6.pom", + "sha256": "9860c27a42398257207da5bba84df009a95a19a166952676de032057d24eaeab", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/mojo/javacc-maven-plugin/2.6/javacc-maven-plugin-2.6.pom" + }, + "org.codehaus.mojo:license-maven-plugin:jar:1.14": { + "layout": "org/codehaus/mojo/license-maven-plugin/1.14/license-maven-plugin-1.14.jar", + "sha256": "0a6dc7094884882f863f30e7cdf0ff3cb52c5860a3cffcaa9325a6d81a1974e3", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/mojo/license-maven-plugin/1.14/license-maven-plugin-1.14.jar" + }, + "org.codehaus.mojo:license-maven-plugin:pom:1.14": { + "layout": "org/codehaus/mojo/license-maven-plugin/1.14/license-maven-plugin-1.14.pom", + "sha256": "d9748a6c1d911eed4291b8fd091c5e4adbe970d6b1902a86d03cfa012dedd6c6", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/mojo/license-maven-plugin/1.14/license-maven-plugin-1.14.pom" + }, + "org.codehaus.mojo:mojo-parent:pom:22": { + "layout": "org/codehaus/mojo/mojo-parent/22/mojo-parent-22.pom", + "sha256": "acbfefa664c719655b0c41d83fae8b65c2d65db94c19621d18473b6ba71e65cd", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/mojo/mojo-parent/22/mojo-parent-22.pom" + }, + "org.codehaus.mojo:mojo-parent:pom:30": { + "layout": "org/codehaus/mojo/mojo-parent/30/mojo-parent-30.pom", + "sha256": "1fdd62b17ba4533265e5d4bd5a42081ed3f0649e2cefb6546a94ca14527b449b", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/mojo/mojo-parent/30/mojo-parent-30.pom" + }, + "org.codehaus.mojo:mojo-parent:pom:34": { + "layout": "org/codehaus/mojo/mojo-parent/34/mojo-parent-34.pom", + "sha256": "3e395d6fbc43c09a3774cac8694ce527398305ea3fd5492d80e25af27d382a9c", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/mojo/mojo-parent/34/mojo-parent-34.pom" + }, + "org.codehaus.mojo:mojo-parent:pom:40": { + "layout": "org/codehaus/mojo/mojo-parent/40/mojo-parent-40.pom", + "sha256": "fc648dcdc404f8bf66e0583914ecf980176618201227bea114fae85043cb755e", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/mojo/mojo-parent/40/mojo-parent-40.pom" + }, + "org.codehaus.plexus:plexus-archiver:jar:2.1": { + "layout": "org/codehaus/plexus/plexus-archiver/2.1/plexus-archiver-2.1.jar", + "sha256": "5a49a4c13e29da41c24bbb35b2f94b82bde259e25d4dd55ee3159e31d20677b8", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-archiver/2.1/plexus-archiver-2.1.jar" + }, + "org.codehaus.plexus:plexus-archiver:pom:2.1": { + "layout": "org/codehaus/plexus/plexus-archiver/2.1/plexus-archiver-2.1.pom", + "sha256": "d2e14a5c6bed6ac4fc27d57f6ba227bb64a96742f71ee3fbafd2c019fa9d4449", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-archiver/2.1/plexus-archiver-2.1.pom" + }, + "org.codehaus.plexus:plexus-classworlds:jar:2.2.3": { + "layout": "org/codehaus/plexus/plexus-classworlds/2.2.3/plexus-classworlds-2.2.3.jar", + "sha256": "7d95ad21733b060bfda2142b62439a196bde7644f9f127c299ae86d92179b518", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-classworlds/2.2.3/plexus-classworlds-2.2.3.jar" + }, + "org.codehaus.plexus:plexus-classworlds:pom:1.2-alpha-9": { + "layout": "org/codehaus/plexus/plexus-classworlds/1.2-alpha-9/plexus-classworlds-1.2-alpha-9.pom", + "sha256": "224fe4d0c650f085c012f0a03c1995c598c7b5c506bc5350b727c75874330f00", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-classworlds/1.2-alpha-9/plexus-classworlds-1.2-alpha-9.pom" + }, + "org.codehaus.plexus:plexus-classworlds:pom:2.2.3": { + "layout": "org/codehaus/plexus/plexus-classworlds/2.2.3/plexus-classworlds-2.2.3.pom", + "sha256": "a2d14b6752e30a100a6cb03c040d0160b71b61928daf8ea97cabfb4a3335b213", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-classworlds/2.2.3/plexus-classworlds-2.2.3.pom" + }, + "org.codehaus.plexus:plexus-compiler-api:jar:2.8.2": { + "layout": "org/codehaus/plexus/plexus-compiler-api/2.8.2/plexus-compiler-api-2.8.2.jar", + "sha256": "643b756033ee640e681a62b4a78de1f11b2aceb129f4331d27809fa791d9685e", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-compiler-api/2.8.2/plexus-compiler-api-2.8.2.jar" + }, + "org.codehaus.plexus:plexus-compiler-api:pom:2.8.2": { + "layout": "org/codehaus/plexus/plexus-compiler-api/2.8.2/plexus-compiler-api-2.8.2.pom", + "sha256": "bd8199751c891b82d42b09561d2b42e3af6a57ce3c167c148bac202bc7b22eaf", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-compiler-api/2.8.2/plexus-compiler-api-2.8.2.pom" + }, + "org.codehaus.plexus:plexus-compiler-javac:jar:2.8.2": { + "layout": "org/codehaus/plexus/plexus-compiler-javac/2.8.2/plexus-compiler-javac-2.8.2.jar", + "sha256": "0d83c0bd17953e929f4a6b28cb809a5a79e4fbae82340cfe93785f10ae0363e4", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-compiler-javac/2.8.2/plexus-compiler-javac-2.8.2.jar" + }, + "org.codehaus.plexus:plexus-compiler-javac:pom:2.8.2": { + "layout": "org/codehaus/plexus/plexus-compiler-javac/2.8.2/plexus-compiler-javac-2.8.2.pom", + "sha256": "3d300388d9cca3fe814eb3ec3a7e7fd369419fc823000e33e2671a08147583ec", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-compiler-javac/2.8.2/plexus-compiler-javac-2.8.2.pom" + }, + "org.codehaus.plexus:plexus-compiler-manager:jar:2.8.2": { + "layout": "org/codehaus/plexus/plexus-compiler-manager/2.8.2/plexus-compiler-manager-2.8.2.jar", + "sha256": "56ccc3d5815133308b2edee65b8b0b041db14913f7459a46ab1e722e39b70d61", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-compiler-manager/2.8.2/plexus-compiler-manager-2.8.2.jar" + }, + "org.codehaus.plexus:plexus-compiler-manager:pom:2.8.2": { + "layout": "org/codehaus/plexus/plexus-compiler-manager/2.8.2/plexus-compiler-manager-2.8.2.pom", + "sha256": "6551ace4307437594c9891d63c30f2cdaddb425020e6d6a3b69872e33bd3be83", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-compiler-manager/2.8.2/plexus-compiler-manager-2.8.2.pom" + }, + "org.codehaus.plexus:plexus-compiler:pom:2.8.2": { + "layout": "org/codehaus/plexus/plexus-compiler/2.8.2/plexus-compiler-2.8.2.pom", + "sha256": "1cd7b9f07ba6708d2036009aee15b0db0b76d6ad962eb7fe0ab1d759a52ec7bd", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-compiler/2.8.2/plexus-compiler-2.8.2.pom" + }, + "org.codehaus.plexus:plexus-compilers:pom:2.8.2": { + "layout": "org/codehaus/plexus/plexus-compilers/2.8.2/plexus-compilers-2.8.2.pom", + "sha256": "655e73eed878cabb71370fadac653e5f6a821931c437323e9addc5cfe2788bf3", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-compilers/2.8.2/plexus-compilers-2.8.2.pom" + }, + "org.codehaus.plexus:plexus-component-annotations:jar:1.5.5": { + "layout": "org/codehaus/plexus/plexus-component-annotations/1.5.5/plexus-component-annotations-1.5.5.jar", + "sha256": "4df7a6a7be64b35bbccf60b5c115697f9ea3421d22674ae67135dde375fcca1f", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-component-annotations/1.5.5/plexus-component-annotations-1.5.5.jar" + }, + "org.codehaus.plexus:plexus-component-annotations:jar:1.6": { + "layout": "org/codehaus/plexus/plexus-component-annotations/1.6/plexus-component-annotations-1.6.jar", + "sha256": "2b3a6ca5f19a9ad490bc233f45e68d3093c8c01b4acc3c1d14bad4ca7c7ff438", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-component-annotations/1.6/plexus-component-annotations-1.6.jar" + }, + "org.codehaus.plexus:plexus-component-annotations:jar:2.0.0": { + "layout": "org/codehaus/plexus/plexus-component-annotations/2.0.0/plexus-component-annotations-2.0.0.jar", + "sha256": "405eef6fc9188241ec88579c3e473f5c8997455c69bcd62e142492aca15106bc", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-component-annotations/2.0.0/plexus-component-annotations-2.0.0.jar" + }, + "org.codehaus.plexus:plexus-component-annotations:jar:2.1.1": { + "layout": "org/codehaus/plexus/plexus-component-annotations/2.1.1/plexus-component-annotations-2.1.1.jar", + "sha256": "623711b3db0d3c6beaaf22578aea8b2706baee8b26ea328dfe94d96cbe2835dd", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-component-annotations/2.1.1/plexus-component-annotations-2.1.1.jar" + }, + "org.codehaus.plexus:plexus-component-annotations:pom:1.5.4": { + "layout": "org/codehaus/plexus/plexus-component-annotations/1.5.4/plexus-component-annotations-1.5.4.pom", + "sha256": "0124227bc47efc9a00b9aa4fc3ef7f70823d322213c26489e5369a914339c84a", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-component-annotations/1.5.4/plexus-component-annotations-1.5.4.pom" + }, + "org.codehaus.plexus:plexus-component-annotations:pom:1.5.5": { + "layout": "org/codehaus/plexus/plexus-component-annotations/1.5.5/plexus-component-annotations-1.5.5.pom", + "sha256": "815f3ec316b8c5fa701385fdf4009bfb51e07d780e8f6a6e2afe720c52d7e292", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-component-annotations/1.5.5/plexus-component-annotations-1.5.5.pom" + }, + "org.codehaus.plexus:plexus-component-annotations:pom:1.6": { + "layout": "org/codehaus/plexus/plexus-component-annotations/1.6/plexus-component-annotations-1.6.pom", + "sha256": "593a0ff086fb81700e17707c303f8552880bf2a50ce41d9dcb5918e8443710dd", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-component-annotations/1.6/plexus-component-annotations-1.6.pom" + }, + "org.codehaus.plexus:plexus-component-annotations:pom:2.0.0": { + "layout": "org/codehaus/plexus/plexus-component-annotations/2.0.0/plexus-component-annotations-2.0.0.pom", + "sha256": "dcf193612b315713771e267b42de2d44de090be5945b2577345ed5ab8de2d271", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-component-annotations/2.0.0/plexus-component-annotations-2.0.0.pom" + }, + "org.codehaus.plexus:plexus-component-annotations:pom:2.1.1": { + "layout": "org/codehaus/plexus/plexus-component-annotations/2.1.1/plexus-component-annotations-2.1.1.pom", + "sha256": "2389d533bb8e0bc90f45016342cf8af9ec80caa28b44922a91ebcccceef8a51d", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-component-annotations/2.1.1/plexus-component-annotations-2.1.1.pom" + }, + "org.codehaus.plexus:plexus-components:pom:1.1.12": { + "layout": "org/codehaus/plexus/plexus-components/1.1.12/plexus-components-1.1.12.pom", + "sha256": "a854365061c28821ddf1a520b8a197991613fd1d56f50f42c468b789b4714f20", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-components/1.1.12/plexus-components-1.1.12.pom" + }, + "org.codehaus.plexus:plexus-components:pom:1.1.14": { + "layout": "org/codehaus/plexus/plexus-components/1.1.14/plexus-components-1.1.14.pom", + "sha256": "381d72c526be217b770f9f8c3f749a86d3b1548ac5c1fcb48d267530ec60d43f", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-components/1.1.14/plexus-components-1.1.14.pom" + }, + "org.codehaus.plexus:plexus-components:pom:1.1.15": { + "layout": "org/codehaus/plexus/plexus-components/1.1.15/plexus-components-1.1.15.pom", + "sha256": "7940cd305323b8409fdb7e78398f6efd8ff8a642c7dd8f353e519abe91ab0da3", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-components/1.1.15/plexus-components-1.1.15.pom" + }, + "org.codehaus.plexus:plexus-components:pom:1.1.18": { + "layout": "org/codehaus/plexus/plexus-components/1.1.18/plexus-components-1.1.18.pom", + "sha256": "ef5dbc7fa918b6dbba71d27e5b3d7a00df624bcfa2549a7297f36fe275f634d7", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-components/1.1.18/plexus-components-1.1.18.pom" + }, + "org.codehaus.plexus:plexus-components:pom:1.1.19": { + "layout": "org/codehaus/plexus/plexus-components/1.1.19/plexus-components-1.1.19.pom", + "sha256": "d3e7f3adf5d002c01f362760ad1c2dce98e6354009a7c07c0a105c3eccb17159", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-components/1.1.19/plexus-components-1.1.19.pom" + }, + "org.codehaus.plexus:plexus-components:pom:1.1.4": { + "layout": "org/codehaus/plexus/plexus-components/1.1.4/plexus-components-1.1.4.pom", + "sha256": "3002aea8cc5ff29b0702e01b7fe853f6950d63722c1f1cb55be36fed801b7d67", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-components/1.1.4/plexus-components-1.1.4.pom" + }, + "org.codehaus.plexus:plexus-components:pom:1.1.7": { + "layout": "org/codehaus/plexus/plexus-components/1.1.7/plexus-components-1.1.7.pom", + "sha256": "c60215df2b9a6dc1c8129bac53f9bf6374c9fd21480072978c342549aaf4ebf9", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-components/1.1.7/plexus-components-1.1.7.pom" + }, + "org.codehaus.plexus:plexus-components:pom:4.0": { + "layout": "org/codehaus/plexus/plexus-components/4.0/plexus-components-4.0.pom", + "sha256": "1a5c0f95f65ed3e98edcf4f3b27c21cbcb14567384d9e4cf07f83a49675347ed", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-components/4.0/plexus-components-4.0.pom" + }, + "org.codehaus.plexus:plexus-container-default:jar:1.0-alpha-9": { + "layout": "org/codehaus/plexus/plexus-container-default/1.0-alpha-9/plexus-container-default-1.0-alpha-9.jar", + "sha256": "dccfc47a4245e2d648e3bdeadb7a4daf51efc70fbd8b7b456454377c9cc5584a", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-container-default/1.0-alpha-9/plexus-container-default-1.0-alpha-9.jar" + }, + "org.codehaus.plexus:plexus-container-default:jar:1.0-alpha-9-stable-1": { + "layout": "org/codehaus/plexus/plexus-container-default/1.0-alpha-9-stable-1/plexus-container-default-1.0-alpha-9-stable-1.jar", + "sha256": "7c758612888782ccfe376823aee7cdcc7e0cdafb097f7ef50295a0b0c3a16edf", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-container-default/1.0-alpha-9-stable-1/plexus-container-default-1.0-alpha-9-stable-1.jar" + }, + "org.codehaus.plexus:plexus-container-default:pom:1.0-alpha-30": { + "layout": "org/codehaus/plexus/plexus-container-default/1.0-alpha-30/plexus-container-default-1.0-alpha-30.pom", + "sha256": "8858248de2cab772fa26741b8972137058a6f4457b0a2b3e7cd8771d03d9373b", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-container-default/1.0-alpha-30/plexus-container-default-1.0-alpha-30.pom" + }, + "org.codehaus.plexus:plexus-container-default:pom:1.0-alpha-7": { + "layout": "org/codehaus/plexus/plexus-container-default/1.0-alpha-7/plexus-container-default-1.0-alpha-7.pom", + "sha256": "39b2035b94953ac38162d6b671be1aac22e383f2e38354cc011b1b1e9b104d23", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-container-default/1.0-alpha-7/plexus-container-default-1.0-alpha-7.pom" + }, + "org.codehaus.plexus:plexus-container-default:pom:1.0-alpha-8": { + "layout": "org/codehaus/plexus/plexus-container-default/1.0-alpha-8/plexus-container-default-1.0-alpha-8.pom", + "sha256": "b6617c2c7169b8d6c2440972e0d36e1265a80ed1d0def0263f1b400f4f3494a3", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-container-default/1.0-alpha-8/plexus-container-default-1.0-alpha-8.pom" + }, + "org.codehaus.plexus:plexus-container-default:pom:1.0-alpha-9": { + "layout": "org/codehaus/plexus/plexus-container-default/1.0-alpha-9/plexus-container-default-1.0-alpha-9.pom", + "sha256": "8864b08e353614d0c4111f455f8b3907179f8b0be6c0845bc7f31ef6daf206ec", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-container-default/1.0-alpha-9/plexus-container-default-1.0-alpha-9.pom" + }, + "org.codehaus.plexus:plexus-container-default:pom:1.0-alpha-9-stable-1": { + "layout": "org/codehaus/plexus/plexus-container-default/1.0-alpha-9-stable-1/plexus-container-default-1.0-alpha-9-stable-1.pom", + "sha256": "ef71d45a49edfe76be0f520312a76bc2aae73ec0743a5ffffe10d30122c6e2b2", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-container-default/1.0-alpha-9-stable-1/plexus-container-default-1.0-alpha-9-stable-1.pom" + }, + "org.codehaus.plexus:plexus-containers:pom:1.0-alpha-30": { + "layout": "org/codehaus/plexus/plexus-containers/1.0-alpha-30/plexus-containers-1.0-alpha-30.pom", + "sha256": "74b039c9d08454c6abcf2f2581e0cfddfabd01360480876039c8de5104878d3b", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-containers/1.0-alpha-30/plexus-containers-1.0-alpha-30.pom" + }, + "org.codehaus.plexus:plexus-containers:pom:1.0.3": { + "layout": "org/codehaus/plexus/plexus-containers/1.0.3/plexus-containers-1.0.3.pom", + "sha256": "7c75075badcb014443ee94c8c4cad2f4a9905be3ce9430fe7b220afc7fa3a80f", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-containers/1.0.3/plexus-containers-1.0.3.pom" + }, + "org.codehaus.plexus:plexus-containers:pom:1.5.4": { + "layout": "org/codehaus/plexus/plexus-containers/1.5.4/plexus-containers-1.5.4.pom", + "sha256": "18b4a1b0a65c0d6b7cf9cd48ee9f3467b6deb8ace4c1309522c184f94c4cfa2e", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-containers/1.5.4/plexus-containers-1.5.4.pom" + }, + "org.codehaus.plexus:plexus-containers:pom:1.5.5": { + "layout": "org/codehaus/plexus/plexus-containers/1.5.5/plexus-containers-1.5.5.pom", + "sha256": "1bc264824ec876b0ca6f4f5838175c541c638cbc43326a268b9aee7d4778b5ef", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-containers/1.5.5/plexus-containers-1.5.5.pom" + }, + "org.codehaus.plexus:plexus-containers:pom:1.6": { + "layout": "org/codehaus/plexus/plexus-containers/1.6/plexus-containers-1.6.pom", + "sha256": "0829f2a50f20b098223d1f92c19badb0738267d953726ac56cac9f3c9c6fc9bb", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-containers/1.6/plexus-containers-1.6.pom" + }, + "org.codehaus.plexus:plexus-containers:pom:2.0.0": { + "layout": "org/codehaus/plexus/plexus-containers/2.0.0/plexus-containers-2.0.0.pom", + "sha256": "be5e3f8e59edce852a0fdaef8caedb32f364bf13db654d15f98e17930e456487", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-containers/2.0.0/plexus-containers-2.0.0.pom" + }, + "org.codehaus.plexus:plexus-containers:pom:2.1.1": { + "layout": "org/codehaus/plexus/plexus-containers/2.1.1/plexus-containers-2.1.1.pom", + "sha256": "c129ae94bc6de8ba2596fcdad4b3fcfe4f67640a4cb6d76a5540182214aa833c", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-containers/2.1.1/plexus-containers-2.1.1.pom" + }, + "org.codehaus.plexus:plexus-digest:jar:1.0": { + "layout": "org/codehaus/plexus/plexus-digest/1.0/plexus-digest-1.0.jar", + "sha256": "d88701a0c1097f97256be35f788dccc4a9c9d377cafc9184d23c262f0e1aff18", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-digest/1.0/plexus-digest-1.0.jar" + }, + "org.codehaus.plexus:plexus-digest:pom:1.0": { + "layout": "org/codehaus/plexus/plexus-digest/1.0/plexus-digest-1.0.pom", + "sha256": "52a0c2d6e4022fc4f08753c9e71b058686960e6e1a783f6958b2cdf3c5cba19b", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-digest/1.0/plexus-digest-1.0.pom" + }, + "org.codehaus.plexus:plexus-i18n:jar:1.0-beta-6": { + "layout": "org/codehaus/plexus/plexus-i18n/1.0-beta-6/plexus-i18n-1.0-beta-6.jar", + "sha256": "8d5c55f504184711cf155af434d0a501efe982e3cc6cba41aaf53c50968d1978", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-i18n/1.0-beta-6/plexus-i18n-1.0-beta-6.jar" + }, + "org.codehaus.plexus:plexus-i18n:jar:1.0-beta-7": { + "layout": "org/codehaus/plexus/plexus-i18n/1.0-beta-7/plexus-i18n-1.0-beta-7.jar", + "sha256": "fff07392dc6b29ef90c435ab004671a715f0aa36653e53b44c358eb842ce67d9", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-i18n/1.0-beta-7/plexus-i18n-1.0-beta-7.jar" + }, + "org.codehaus.plexus:plexus-i18n:pom:1.0-beta-6": { + "layout": "org/codehaus/plexus/plexus-i18n/1.0-beta-6/plexus-i18n-1.0-beta-6.pom", + "sha256": "2b10aa54fc08ef9612acb72b2f9be9fac65fb3f8e5b4a05a4a04a448efd676fe", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-i18n/1.0-beta-6/plexus-i18n-1.0-beta-6.pom" + }, + "org.codehaus.plexus:plexus-i18n:pom:1.0-beta-7": { + "layout": "org/codehaus/plexus/plexus-i18n/1.0-beta-7/plexus-i18n-1.0-beta-7.pom", + "sha256": "9a4c894f2601f403396363f31b22ff55f665de9393c0ec941df38a05d5ab4d66", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-i18n/1.0-beta-7/plexus-i18n-1.0-beta-7.pom" + }, + "org.codehaus.plexus:plexus-interactivity-api:jar:1.0-alpha-4": { + "layout": "org/codehaus/plexus/plexus-interactivity-api/1.0-alpha-4/plexus-interactivity-api-1.0-alpha-4.jar", + "sha256": "4f60eb379f93d8b616bc3b4d299f466bc54fcced959f7ad082dae78b89d6a3f0", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-interactivity-api/1.0-alpha-4/plexus-interactivity-api-1.0-alpha-4.jar" + }, + "org.codehaus.plexus:plexus-interactivity-api:pom:1.0-alpha-4": { + "layout": "org/codehaus/plexus/plexus-interactivity-api/1.0-alpha-4/plexus-interactivity-api-1.0-alpha-4.pom", + "sha256": "42aada809ec125bbfe4d38f9d196bbeb59f298b389df96e610269e369b8eb2c9", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-interactivity-api/1.0-alpha-4/plexus-interactivity-api-1.0-alpha-4.pom" + }, + "org.codehaus.plexus:plexus-interpolation:jar:1.11": { + "layout": "org/codehaus/plexus/plexus-interpolation/1.11/plexus-interpolation-1.11.jar", + "sha256": "fd9507feb858fa620d1b4aa4b7039fdea1a77e09d3fd28cfbddfff468d9d8c28", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-interpolation/1.11/plexus-interpolation-1.11.jar" + }, + "org.codehaus.plexus:plexus-interpolation:jar:1.13": { + "layout": "org/codehaus/plexus/plexus-interpolation/1.13/plexus-interpolation-1.13.jar", + "sha256": "8e149132ff32907f39560398e222f1733ae959cedd099b32ee31c7b9d3bc1d6f", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-interpolation/1.13/plexus-interpolation-1.13.jar" + }, + "org.codehaus.plexus:plexus-interpolation:jar:1.14": { + "layout": "org/codehaus/plexus/plexus-interpolation/1.14/plexus-interpolation-1.14.jar", + "sha256": "7fc63378d3e84663619b9bedace9f9fe78b276c2be3c62ca2245449294c84176", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-interpolation/1.14/plexus-interpolation-1.14.jar" + }, + "org.codehaus.plexus:plexus-interpolation:jar:1.15": { + "layout": "org/codehaus/plexus/plexus-interpolation/1.15/plexus-interpolation-1.15.jar", + "sha256": "7111a4eb5f137781b68127a5a02d0208c28f26d2626fbd7a81d1172cd56449a8", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-interpolation/1.15/plexus-interpolation-1.15.jar" + }, + "org.codehaus.plexus:plexus-interpolation:pom:1.11": { + "layout": "org/codehaus/plexus/plexus-interpolation/1.11/plexus-interpolation-1.11.pom", + "sha256": "b84d281f59b9da528139e0752a0e1cab0bd98d52c58442b00e45c9748e1d9eee", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-interpolation/1.11/plexus-interpolation-1.11.pom" + }, + "org.codehaus.plexus:plexus-interpolation:pom:1.12": { + "layout": "org/codehaus/plexus/plexus-interpolation/1.12/plexus-interpolation-1.12.pom", + "sha256": "9e1d13d08550026de20cf8120b7e62e0ee842fc9df94140974c082b067fa5b72", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-interpolation/1.12/plexus-interpolation-1.12.pom" + }, + "org.codehaus.plexus:plexus-interpolation:pom:1.13": { + "layout": "org/codehaus/plexus/plexus-interpolation/1.13/plexus-interpolation-1.13.pom", + "sha256": "e37731ea5ed19d276f33b77c93399fb5df026e1191133b15fbeab73342c6363b", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-interpolation/1.13/plexus-interpolation-1.13.pom" + }, + "org.codehaus.plexus:plexus-interpolation:pom:1.14": { + "layout": "org/codehaus/plexus/plexus-interpolation/1.14/plexus-interpolation-1.14.pom", + "sha256": "d08155c497df37b2c3d9b5b0dfdb02ec0525b2070b5be3739fffde942fcac9af", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-interpolation/1.14/plexus-interpolation-1.14.pom" + }, + "org.codehaus.plexus:plexus-interpolation:pom:1.15": { + "layout": "org/codehaus/plexus/plexus-interpolation/1.15/plexus-interpolation-1.15.pom", + "sha256": "4a9f28e95f82a80fbd0632e6305b3c676f4d5e946f93028226d5365f53492bc7", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-interpolation/1.15/plexus-interpolation-1.15.pom" + }, + "org.codehaus.plexus:plexus-io:jar:2.0.2": { + "layout": "org/codehaus/plexus/plexus-io/2.0.2/plexus-io-2.0.2.jar", + "sha256": "e2f88c8813463aabfb1b689f0be551c24c58e8e5508fd5a44f8d20a327b1517f", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-io/2.0.2/plexus-io-2.0.2.jar" + }, + "org.codehaus.plexus:plexus-io:pom:2.0.2": { + "layout": "org/codehaus/plexus/plexus-io/2.0.2/plexus-io-2.0.2.pom", + "sha256": "67a1ce072ad9ff4ce37985a1c32827eaeda4660cabe7f69eff2ca1ebff2d23bd", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-io/2.0.2/plexus-io-2.0.2.pom" + }, + "org.codehaus.plexus:plexus-java:jar:0.9.2": { + "layout": "org/codehaus/plexus/plexus-java/0.9.2/plexus-java-0.9.2.jar", + "sha256": "1f732f4d5e6c58de22e7fc14209d6b7345b1761da498c491ff36c27a02d77b13", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-java/0.9.2/plexus-java-0.9.2.jar" + }, + "org.codehaus.plexus:plexus-java:jar:1.0.5": { + "layout": "org/codehaus/plexus/plexus-java/1.0.5/plexus-java-1.0.5.jar", + "sha256": "1c823e3f3ac75e804d79cb16bd31d525370e6d0d76ca5c82a9d31f17331ceee8", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-java/1.0.5/plexus-java-1.0.5.jar" + }, + "org.codehaus.plexus:plexus-java:pom:0.9.2": { + "layout": "org/codehaus/plexus/plexus-java/0.9.2/plexus-java-0.9.2.pom", + "sha256": "a3fea3eca2a696e11d9299e48181da81ca030e9edb7818613f93ad0fbcdc8752", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-java/0.9.2/plexus-java-0.9.2.pom" + }, + "org.codehaus.plexus:plexus-java:pom:1.0.5": { + "layout": "org/codehaus/plexus/plexus-java/1.0.5/plexus-java-1.0.5.pom", + "sha256": "4da92114a3ecf41715046ffa12714b6f16217bcc1dfe50e3affe0e2005b21584", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-java/1.0.5/plexus-java-1.0.5.pom" + }, + "org.codehaus.plexus:plexus-languages:pom:0.9.2": { + "layout": "org/codehaus/plexus/plexus-languages/0.9.2/plexus-languages-0.9.2.pom", + "sha256": "3aeecce7c4eded0cf1538a68cdf36b140efc68ff77da5f082aa67ef168d4c653", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-languages/0.9.2/plexus-languages-0.9.2.pom" + }, + "org.codehaus.plexus:plexus-languages:pom:1.0.5": { + "layout": "org/codehaus/plexus/plexus-languages/1.0.5/plexus-languages-1.0.5.pom", + "sha256": "87102f4ea6de726c12968e552d2785f4c8e03562c547a58aa028da7d8f09462e", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-languages/1.0.5/plexus-languages-1.0.5.pom" + }, + "org.codehaus.plexus:plexus-utils:jar:1.1": { + "layout": "org/codehaus/plexus/plexus-utils/1.1/plexus-utils-1.1.jar", + "sha256": "c0b20bb7c354291d1c0a4fd58973b3ec9f0de6b62fde3bacb0fb27f1d24f439a", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/1.1/plexus-utils-1.1.jar" + }, + "org.codehaus.plexus:plexus-utils:jar:1.5.6": { + "layout": "org/codehaus/plexus/plexus-utils/1.5.6/plexus-utils-1.5.6.jar", + "sha256": "6990ec1b05c978c9940ebf7ec1b4dd911d16c524ee9f4a386a14ec0b07016ab4", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/1.5.6/plexus-utils-1.5.6.jar" + }, + "org.codehaus.plexus:plexus-utils:jar:1.5.8": { + "layout": "org/codehaus/plexus/plexus-utils/1.5.8/plexus-utils-1.5.8.jar", + "sha256": "b7554a41499282e3b2226a22aff3ebe984f7e159798c461d917c1b829b130cd1", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/1.5.8/plexus-utils-1.5.8.jar" + }, + "org.codehaus.plexus:plexus-utils:jar:2.0.4": { + "layout": "org/codehaus/plexus/plexus-utils/2.0.4/plexus-utils-2.0.4.jar", + "sha256": "6a17cfbfffe6bb87215ad38bcaac716383e552ec2ba7b204e2673ee66a2afaaa", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/2.0.4/plexus-utils-2.0.4.jar" + }, + "org.codehaus.plexus:plexus-utils:jar:2.0.5": { + "layout": "org/codehaus/plexus/plexus-utils/2.0.5/plexus-utils-2.0.5.jar", + "sha256": "b4c51a337078b934ad656ee78a2d3a805a507129dc034692c67db0f94b659d3e", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/2.0.5/plexus-utils-2.0.5.jar" + }, + "org.codehaus.plexus:plexus-utils:jar:2.0.7": { + "layout": "org/codehaus/plexus/plexus-utils/2.0.7/plexus-utils-2.0.7.jar", + "sha256": "3c96ee6b1e420ecb1a5caf0ee03964294c16c342c626707212cbef0878fb3acf", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/2.0.7/plexus-utils-2.0.7.jar" + }, + "org.codehaus.plexus:plexus-utils:jar:3.0": { + "layout": "org/codehaus/plexus/plexus-utils/3.0/plexus-utils-3.0.jar", + "sha256": "e67fc0b78b5cbabe6748677ebec9844db5014ac397ab1537558bcd614b5c41e1", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/3.0/plexus-utils-3.0.jar" + }, + "org.codehaus.plexus:plexus-utils:jar:3.0.22": { + "layout": "org/codehaus/plexus/plexus-utils/3.0.22/plexus-utils-3.0.22.jar", + "sha256": "0f31c44b275f87e56d46a582ce96d03b9e2ab344cf87c4e268b34d3ad046beab", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/3.0.22/plexus-utils-3.0.22.jar" + }, + "org.codehaus.plexus:plexus-utils:jar:3.0.24": { + "layout": "org/codehaus/plexus/plexus-utils/3.0.24/plexus-utils-3.0.24.jar", + "sha256": "83ee748b12d06afb0ad4050a591132b3e8025fbb1990f1ed002e8b73293e69b4", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/3.0.24/plexus-utils-3.0.24.jar" + }, + "org.codehaus.plexus:plexus-utils:jar:3.0.5": { + "layout": "org/codehaus/plexus/plexus-utils/3.0.5/plexus-utils-3.0.5.jar", + "sha256": "94cf1e3fbb141cfdea012dd205a2f6919d1b2fc5449579ed1816bb39e80531c2", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/3.0.5/plexus-utils-3.0.5.jar" + }, + "org.codehaus.plexus:plexus-utils:jar:3.4.2": { + "layout": "org/codehaus/plexus/plexus-utils/3.4.2/plexus-utils-3.4.2.jar", + "sha256": "f957f13604ea1686de805801862f339dbbb6eab9a66f9cc7e4a5c5b27e4fcecc", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/3.4.2/plexus-utils-3.4.2.jar" + }, + "org.codehaus.plexus:plexus-utils:pom:1.0.4": { + "layout": "org/codehaus/plexus/plexus-utils/1.0.4/plexus-utils-1.0.4.pom", + "sha256": "36623a9539061d87f078af61ca62c3eacf422eb374641cf8903cdeb759671eb3", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/1.0.4/plexus-utils-1.0.4.pom" + }, + "org.codehaus.plexus:plexus-utils:pom:1.1": { + "layout": "org/codehaus/plexus/plexus-utils/1.1/plexus-utils-1.1.pom", + "sha256": "e1772a8a6be92088ae069a3dd4f7c9dcbc0888434341028735da0f22481bcd47", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/1.1/plexus-utils-1.1.pom" + }, + "org.codehaus.plexus:plexus-utils:pom:1.4.1": { + "layout": "org/codehaus/plexus/plexus-utils/1.4.1/plexus-utils-1.4.1.pom", + "sha256": "894261f5b21a2a18519537086181426450300385518e53d2555f5b4a6c1260e7", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/1.4.1/plexus-utils-1.4.1.pom" + }, + "org.codehaus.plexus:plexus-utils:pom:1.4.5": { + "layout": "org/codehaus/plexus/plexus-utils/1.4.5/plexus-utils-1.4.5.pom", + "sha256": "687d05a9521ecb8e319e6beb46abcf53e0e61be647f1c7642a86e22f46814336", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/1.4.5/plexus-utils-1.4.5.pom" + }, + "org.codehaus.plexus:plexus-utils:pom:1.4.6": { + "layout": "org/codehaus/plexus/plexus-utils/1.4.6/plexus-utils-1.4.6.pom", + "sha256": "6c68126854b084eed9b25ebc7dd08e965a3b0e73d7538fc750b4a56e44e7b920", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/1.4.6/plexus-utils-1.4.6.pom" + }, + "org.codehaus.plexus:plexus-utils:pom:1.5.15": { + "layout": "org/codehaus/plexus/plexus-utils/1.5.15/plexus-utils-1.5.15.pom", + "sha256": "12a3c9a32b82fdc95223cab1f9d344e14ef3e396da14c4d0013451646f3280e7", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/1.5.15/plexus-utils-1.5.15.pom" + }, + "org.codehaus.plexus:plexus-utils:pom:1.5.5": { + "layout": "org/codehaus/plexus/plexus-utils/1.5.5/plexus-utils-1.5.5.pom", + "sha256": "f860675cad10e561bfa175d5717e2d8617d40c62321086ca4a85c006a0fa30d1", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/1.5.5/plexus-utils-1.5.5.pom" + }, + "org.codehaus.plexus:plexus-utils:pom:1.5.6": { + "layout": "org/codehaus/plexus/plexus-utils/1.5.6/plexus-utils-1.5.6.pom", + "sha256": "0d473f85e79ae843569b9302f634fc3f70bdf135200ab2a486770f57deddbf39", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/1.5.6/plexus-utils-1.5.6.pom" + }, + "org.codehaus.plexus:plexus-utils:pom:1.5.8": { + "layout": "org/codehaus/plexus/plexus-utils/1.5.8/plexus-utils-1.5.8.pom", + "sha256": "1ff4fb95c218af4a46f71d625212c70f377ccf97ad2e26cb8d4c10709265bf62", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/1.5.8/plexus-utils-1.5.8.pom" + }, + "org.codehaus.plexus:plexus-utils:pom:2.0.4": { + "layout": "org/codehaus/plexus/plexus-utils/2.0.4/plexus-utils-2.0.4.pom", + "sha256": "2896dbf57e8c82121481400e8be4df6110edd37e346a6c144b3156f24bf98f72", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/2.0.4/plexus-utils-2.0.4.pom" + }, + "org.codehaus.plexus:plexus-utils:pom:2.0.5": { + "layout": "org/codehaus/plexus/plexus-utils/2.0.5/plexus-utils-2.0.5.pom", + "sha256": "35bc7d1213616236571072b2c56da18f7a57658de8b4a4100645b7054a2b273b", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/2.0.5/plexus-utils-2.0.5.pom" + }, + "org.codehaus.plexus:plexus-utils:pom:2.0.7": { + "layout": "org/codehaus/plexus/plexus-utils/2.0.7/plexus-utils-2.0.7.pom", + "sha256": "4ba8c3e6b5adcc10df7f2f7992d74dc532c8803a14f1fbc5509299ba7f75ff82", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/2.0.7/plexus-utils-2.0.7.pom" + }, + "org.codehaus.plexus:plexus-utils:pom:3.0": { + "layout": "org/codehaus/plexus/plexus-utils/3.0/plexus-utils-3.0.pom", + "sha256": "f479d0dc224974b50cfefae14344a4cd8b692b8dbf58df2d8c5c2f13db01d642", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/3.0/plexus-utils-3.0.pom" + }, + "org.codehaus.plexus:plexus-utils:pom:3.0.10": { + "layout": "org/codehaus/plexus/plexus-utils/3.0.10/plexus-utils-3.0.10.pom", + "sha256": "1f4ed909a012a1ae3eec7b649ae84c0425a05cbb05c48b6a644d122fb6f6bc4b", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/3.0.10/plexus-utils-3.0.10.pom" + }, + "org.codehaus.plexus:plexus-utils:pom:3.0.22": { + "layout": "org/codehaus/plexus/plexus-utils/3.0.22/plexus-utils-3.0.22.pom", + "sha256": "f20db219a9c2ebbfea479a1c58a252d795689b8627d43442748d8a21e0052f57", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/3.0.22/plexus-utils-3.0.22.pom" + }, + "org.codehaus.plexus:plexus-utils:pom:3.0.24": { + "layout": "org/codehaus/plexus/plexus-utils/3.0.24/plexus-utils-3.0.24.pom", + "sha256": "11067f6a75fded12bcdc8daf7a66ddd942ce289c3daf88a3fe0f8b12858a2ee6", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/3.0.24/plexus-utils-3.0.24.pom" + }, + "org.codehaus.plexus:plexus-utils:pom:3.0.5": { + "layout": "org/codehaus/plexus/plexus-utils/3.0.5/plexus-utils-3.0.5.pom", + "sha256": "3bcf1c8aeec542fc6e9177aa49eb75322a5431b6be67b31c65aabda64b6b4222", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/3.0.5/plexus-utils-3.0.5.pom" + }, + "org.codehaus.plexus:plexus-utils:pom:3.3.0": { + "layout": "org/codehaus/plexus/plexus-utils/3.3.0/plexus-utils-3.3.0.pom", + "sha256": "79c9792073fdee3cdbebd61a76ba8c2dd11624a9f85d128bae56bda19e20475c", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/3.3.0/plexus-utils-3.3.0.pom" + }, + "org.codehaus.plexus:plexus-utils:pom:3.4.2": { + "layout": "org/codehaus/plexus/plexus-utils/3.4.2/plexus-utils-3.4.2.pom", + "sha256": "0515626f5abaa0bdb5ad87d291953eae358b97e2d10d6ecab84aca0d486063bf", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-utils/3.4.2/plexus-utils-3.4.2.pom" + }, + "org.codehaus.plexus:plexus-velocity:jar:1.1.2": { + "layout": "org/codehaus/plexus/plexus-velocity/1.1.2/plexus-velocity-1.1.2.jar", + "sha256": "767909fbe35e179e28e48ea6c81705ac233cfadff7a884a5302d9b4a5176372a", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-velocity/1.1.2/plexus-velocity-1.1.2.jar" + }, + "org.codehaus.plexus:plexus-velocity:jar:1.1.7": { + "layout": "org/codehaus/plexus/plexus-velocity/1.1.7/plexus-velocity-1.1.7.jar", + "sha256": "1c9c994fbcd31526d451797072d7afb19f9b1962e710f3088f54fd1267b45fae", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-velocity/1.1.7/plexus-velocity-1.1.7.jar" + }, + "org.codehaus.plexus:plexus-velocity:pom:1.1.2": { + "layout": "org/codehaus/plexus/plexus-velocity/1.1.2/plexus-velocity-1.1.2.pom", + "sha256": "f45fc03d76b3b1318f1fd0b9d8562cccb15f975396f38157a7cbb961678126ce", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-velocity/1.1.2/plexus-velocity-1.1.2.pom" + }, + "org.codehaus.plexus:plexus-velocity:pom:1.1.7": { + "layout": "org/codehaus/plexus/plexus-velocity/1.1.7/plexus-velocity-1.1.7.pom", + "sha256": "f3812df661f0a82e501d6b1ee14df1b277df29ab58c6c8c785624d6b1149b76d", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-velocity/1.1.7/plexus-velocity-1.1.7.pom" + }, + "org.codehaus.plexus:plexus-velocity:pom:1.2": { + "layout": "org/codehaus/plexus/plexus-velocity/1.2/plexus-velocity-1.2.pom", + "sha256": "508a1682a95da8220e9bd582e2a9e1629d016cfe67c4769ee0b1755279ff5fd6", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-velocity/1.2/plexus-velocity-1.2.pom" + }, + "org.codehaus.plexus:plexus:pom:1.0.10": { + "layout": "org/codehaus/plexus/plexus/1.0.10/plexus-1.0.10.pom", + "sha256": "09b999a969e73525a6cc3ad2868ea744766e1d93b25c6c656d61a5ff9c881da9", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus/1.0.10/plexus-1.0.10.pom" + }, + "org.codehaus.plexus:plexus:pom:1.0.11": { + "layout": "org/codehaus/plexus/plexus/1.0.11/plexus-1.0.11.pom", + "sha256": "5197630dcd2336f5b4ab8e6d26e5b8675f5ebd83bd8c91d6aba431b09627d626", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus/1.0.11/plexus-1.0.11.pom" + }, + "org.codehaus.plexus:plexus:pom:1.0.12": { + "layout": "org/codehaus/plexus/plexus/1.0.12/plexus-1.0.12.pom", + "sha256": "e3feb169478a21ea8e3af27f90b2d8551309ee771f24176e37ad48bbd7bdad04", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus/1.0.12/plexus-1.0.12.pom" + }, + "org.codehaus.plexus:plexus:pom:1.0.4": { + "layout": "org/codehaus/plexus/plexus/1.0.4/plexus-1.0.4.pom", + "sha256": "2242fd02d12b1ca73267fb3d89863025517200e7a4ee426cba4667d0172c74c3", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus/1.0.4/plexus-1.0.4.pom" + }, + "org.codehaus.plexus:plexus:pom:1.0.8": { + "layout": "org/codehaus/plexus/plexus/1.0.8/plexus-1.0.8.pom", + "sha256": "a89a2f99088e244b3e52e35f19f5f7d3aba03dbb3cfaea044e2a694119e88f79", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus/1.0.8/plexus-1.0.8.pom" + }, + "org.codehaus.plexus:plexus:pom:2.0.2": { + "layout": "org/codehaus/plexus/plexus/2.0.2/plexus-2.0.2.pom", + "sha256": "e246e2a062b5d989fdefc521c9c56431ba5554ff8d2344edee9218a34a546a33", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus/2.0.2/plexus-2.0.2.pom" + }, + "org.codehaus.plexus:plexus:pom:2.0.3": { + "layout": "org/codehaus/plexus/plexus/2.0.3/plexus-2.0.3.pom", + "sha256": "fcc5670db8864c50c16bd96972f0da876f6651a8edc99850e68b2d569c5a4776", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus/2.0.3/plexus-2.0.3.pom" + }, + "org.codehaus.plexus:plexus:pom:2.0.5": { + "layout": "org/codehaus/plexus/plexus/2.0.5/plexus-2.0.5.pom", + "sha256": "72b31dc11351a5bf4f5841221be5b1afc2b802ff96f23f2b77838f6d46cd3ad5", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus/2.0.5/plexus-2.0.5.pom" + }, + "org.codehaus.plexus:plexus:pom:2.0.6": { + "layout": "org/codehaus/plexus/plexus/2.0.6/plexus-2.0.6.pom", + "sha256": "bea12e747708d25e73410ca1c731ebdfa102e8bdb6ec7d81bd4522583b234bcc", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus/2.0.6/plexus-2.0.6.pom" + }, + "org.codehaus.plexus:plexus:pom:2.0.7": { + "layout": "org/codehaus/plexus/plexus/2.0.7/plexus-2.0.7.pom", + "sha256": "2b59062030ab0a15c5d0977ba22421706368926488739a65f25793e715cc8a74", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus/2.0.7/plexus-2.0.7.pom" + }, + "org.codehaus.plexus:plexus:pom:3.0.1": { + "layout": "org/codehaus/plexus/plexus/3.0.1/plexus-3.0.1.pom", + "sha256": "1649f67caab553dd7e6b98002dcc670dab3f624c78f1259c8323e705b0c41e32", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus/3.0.1/plexus-3.0.1.pom" + }, + "org.codehaus.plexus:plexus:pom:3.1": { + "layout": "org/codehaus/plexus/plexus/3.1/plexus-3.1.pom", + "sha256": "adcd7cf9191d7d47ed50c1bc53b699cd076eec4cb31699b224d93eb9dfb892b5", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus/3.1/plexus-3.1.pom" + }, + "org.codehaus.plexus:plexus:pom:3.3": { + "layout": "org/codehaus/plexus/plexus/3.3/plexus-3.3.pom", + "sha256": "3d2ad3a8bfd49d95952443afe2c14183136811019435bf16eed40796b2210ad2", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus/3.3/plexus-3.3.pom" + }, + "org.codehaus.plexus:plexus:pom:3.3.1": { + "layout": "org/codehaus/plexus/plexus/3.3.1/plexus-3.3.1.pom", + "sha256": "6ec96f889bc29250f90b167c14e547f1b05aa23565c63f9079595befbde816bb", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus/3.3.1/plexus-3.3.1.pom" + }, + "org.codehaus.plexus:plexus:pom:3.3.2": { + "layout": "org/codehaus/plexus/plexus/3.3.2/plexus-3.3.2.pom", + "sha256": "2fd3853acd7d8b548ab59507f1a5a06897efdc0c4f05acb7c2a49bc78ad83eff", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus/3.3.2/plexus-3.3.2.pom" + }, + "org.codehaus.plexus:plexus:pom:4.0": { + "layout": "org/codehaus/plexus/plexus/4.0/plexus-4.0.pom", + "sha256": "0a1b692d7fcc90d6a45dae2e50f4660d48f7a44504f174aa60ef34fbe1327f6a", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus/4.0/plexus-4.0.pom" + }, + "org.codehaus.plexus:plexus:pom:5.1": { + "layout": "org/codehaus/plexus/plexus/5.1/plexus-5.1.pom", + "sha256": "a343e44ff5796aed0ea60be11454c935ce20ab1c5f164acc8da574482dcbc7e9", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus/5.1/plexus-5.1.pom" + }, + "org.codehaus.plexus:plexus:pom:6.2": { + "layout": "org/codehaus/plexus/plexus/6.2/plexus-6.2.pom", + "sha256": "193be48e6ac6f88bef63e0c87b2ebd5691eb26330372331ed8fb03c5ae585147", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus/6.2/plexus-6.2.pom" + }, + "org.codehaus.plexus:plexus:pom:6.5": { + "layout": "org/codehaus/plexus/plexus/6.5/plexus-6.5.pom", + "sha256": "737ff2200498dd54c920712404caa4426d38639d05014ff3ef1b3e7c5bd91397", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus/6.5/plexus-6.5.pom" + }, + "org.codehaus.plexus:plexus:pom:8": { + "layout": "org/codehaus/plexus/plexus/8/plexus-8.pom", + "sha256": "ffa349db04e7abf65885bdc5a2062f4197c0ff9d3f1f4e2aa5720b77233f742c", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus/8/plexus-8.pom" + }, + "org.codehaus:codehaus-parent:pom:4": { + "layout": "org/codehaus/codehaus-parent/4/codehaus-parent-4.pom", + "sha256": "6b87237de8c2e1740cf80627c7f3ce3e15de1930bb250c55a1eca94fa3e014df", + "url": "https://repo.maven.apache.org/maven2/org/codehaus/codehaus-parent/4/codehaus-parent-4.pom" + }, + "org.eclipse.aether:aether-api:jar:1.0.0.v20140518": { + "layout": "org/eclipse/aether/aether-api/1.0.0.v20140518/aether-api-1.0.0.v20140518.jar", + "sha256": "84b98521684ab22f9528470fa6d8ab68a230e1b211623c989ba7016c306eb773", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/aether/aether-api/1.0.0.v20140518/aether-api-1.0.0.v20140518.jar" + }, + "org.eclipse.aether:aether-api:pom:1.0.0.v20140518": { + "layout": "org/eclipse/aether/aether-api/1.0.0.v20140518/aether-api-1.0.0.v20140518.pom", + "sha256": "2cbb6b666dd1518153afd4939b126c2336506cf3b1bc4541263c09ba87c34b96", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/aether/aether-api/1.0.0.v20140518/aether-api-1.0.0.v20140518.pom" + }, + "org.eclipse.aether:aether-util:jar:1.0.0.v20140518": { + "layout": "org/eclipse/aether/aether-util/1.0.0.v20140518/aether-util-1.0.0.v20140518.jar", + "sha256": "aff0951639837c4e3a4699a421fa79f410032f603f5c6a5bba435e98531f3984", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/aether/aether-util/1.0.0.v20140518/aether-util-1.0.0.v20140518.jar" + }, + "org.eclipse.aether:aether-util:pom:1.0.0.v20140518": { + "layout": "org/eclipse/aether/aether-util/1.0.0.v20140518/aether-util-1.0.0.v20140518.pom", + "sha256": "5c8b507a80901fcdaef89f50c639176b516e8866c6bf07be1ab8ab9da5a4877f", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/aether/aether-util/1.0.0.v20140518/aether-util-1.0.0.v20140518.pom" + }, + "org.eclipse.aether:aether:pom:1.0.0.v20140518": { + "layout": "org/eclipse/aether/aether/1.0.0.v20140518/aether-1.0.0.v20140518.pom", + "sha256": "606b5fa03b171d8204aac0fbace11ee28e71175a0f869bd45f09c9319e7e88dc", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/aether/aether/1.0.0.v20140518/aether-1.0.0.v20140518.pom" + }, + "org.eclipse.jgit:org.eclipse.jgit-parent:pom:4.5.0.201609210915-r": { + "layout": "org/eclipse/jgit/org.eclipse.jgit-parent/4.5.0.201609210915-r/org.eclipse.jgit-parent-4.5.0.201609210915-r.pom", + "sha256": "54440a9bc18ec0481fc1a8a13f5aa5e84872880be717ecc875b4c9cb062591ea", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/jgit/org.eclipse.jgit-parent/4.5.0.201609210915-r/org.eclipse.jgit-parent-4.5.0.201609210915-r.pom" + }, + "org.eclipse.jgit:org.eclipse.jgit:jar:4.5.0.201609210915-r": { + "layout": "org/eclipse/jgit/org.eclipse.jgit/4.5.0.201609210915-r/org.eclipse.jgit-4.5.0.201609210915-r.jar", + "sha256": "e6739bf218062dd09c17d637e2ac7a98435e29e47100b38b1f2d926383f6cee4", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/jgit/org.eclipse.jgit/4.5.0.201609210915-r/org.eclipse.jgit-4.5.0.201609210915-r.jar" + }, + "org.eclipse.jgit:org.eclipse.jgit:pom:4.5.0.201609210915-r": { + "layout": "org/eclipse/jgit/org.eclipse.jgit/4.5.0.201609210915-r/org.eclipse.jgit-4.5.0.201609210915-r.pom", + "sha256": "32fe3db899b038c728ae49093afbba0f14deb3217c3f581869a9d115b9757880", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/jgit/org.eclipse.jgit/4.5.0.201609210915-r/org.eclipse.jgit-4.5.0.201609210915-r.pom" + }, + "org.eclipse.rdf4j:rdf4j-client-parent:pom:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-client-parent/2.3.2/rdf4j-client-parent-2.3.2.pom", + "sha256": "3643a92e5956871eb467e064bfe184b5c7690e404329a60bcccea4ba848bed5b", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-client-parent/2.3.2/rdf4j-client-parent-2.3.2.pom" + }, + "org.eclipse.rdf4j:rdf4j-model:jar:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-model/2.3.2/rdf4j-model-2.3.2.jar", + "sha256": "3743db4ea34aea9e71a184d8cd7a47c96f65befb091d3e4e8e88a10b762e9077", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-model/2.3.2/rdf4j-model-2.3.2.jar" + }, + "org.eclipse.rdf4j:rdf4j-model:pom:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-model/2.3.2/rdf4j-model-2.3.2.pom", + "sha256": "a65d7383aa52bc00b0a2424b076fe8146192baf8e76f6e13d20d7900e1103a3c", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-model/2.3.2/rdf4j-model-2.3.2.pom" + }, + "org.eclipse.rdf4j:rdf4j-rio-api:jar:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio-api/2.3.2/rdf4j-rio-api-2.3.2.jar", + "sha256": "63ecaf147f4795f53c25a7c34d617eac16bbd0ac6375b80fce9a947cc0e2a1b4", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio-api/2.3.2/rdf4j-rio-api-2.3.2.jar" + }, + "org.eclipse.rdf4j:rdf4j-rio-api:pom:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio-api/2.3.2/rdf4j-rio-api-2.3.2.pom", + "sha256": "ec5bb027215d15c4d6219d404a8683e62b4b40433c903512a52dbafc5c6cfd50", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio-api/2.3.2/rdf4j-rio-api-2.3.2.pom" + }, + "org.eclipse.rdf4j:rdf4j-rio-binary:jar:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio-binary/2.3.2/rdf4j-rio-binary-2.3.2.jar", + "sha256": "dff3fd9986c9a6109bea469831f80511f5c962d5757475609c2777c007726309", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio-binary/2.3.2/rdf4j-rio-binary-2.3.2.jar" + }, + "org.eclipse.rdf4j:rdf4j-rio-binary:pom:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio-binary/2.3.2/rdf4j-rio-binary-2.3.2.pom", + "sha256": "fc7f3d0554ac86ce52883dd6c23ec94b7a785fdd589404d982073389937c69ef", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio-binary/2.3.2/rdf4j-rio-binary-2.3.2.pom" + }, + "org.eclipse.rdf4j:rdf4j-rio-datatypes:jar:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio-datatypes/2.3.2/rdf4j-rio-datatypes-2.3.2.jar", + "sha256": "91347e39d3a9525da894912317490d5334561f30e32f30d18eeb822afa1b4185", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio-datatypes/2.3.2/rdf4j-rio-datatypes-2.3.2.jar" + }, + "org.eclipse.rdf4j:rdf4j-rio-datatypes:pom:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio-datatypes/2.3.2/rdf4j-rio-datatypes-2.3.2.pom", + "sha256": "c36c7ced8ef5680843918c7c2cbe28e9a21474c49eb1e5b522f8e7a956568989", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio-datatypes/2.3.2/rdf4j-rio-datatypes-2.3.2.pom" + }, + "org.eclipse.rdf4j:rdf4j-rio-jsonld:jar:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio-jsonld/2.3.2/rdf4j-rio-jsonld-2.3.2.jar", + "sha256": "a6f9e30a3457dd5aec2bfe11e9474d3245124bad90a2aa58deafac9a7538f5b7", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio-jsonld/2.3.2/rdf4j-rio-jsonld-2.3.2.jar" + }, + "org.eclipse.rdf4j:rdf4j-rio-jsonld:pom:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio-jsonld/2.3.2/rdf4j-rio-jsonld-2.3.2.pom", + "sha256": "adf40187c4437bd2d45f9bd76eddbe53625e029c7951ce1f6d2e7bd026db9c2e", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio-jsonld/2.3.2/rdf4j-rio-jsonld-2.3.2.pom" + }, + "org.eclipse.rdf4j:rdf4j-rio-languages:jar:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio-languages/2.3.2/rdf4j-rio-languages-2.3.2.jar", + "sha256": "86c7a299be3a7b549e7ed641842eb76ed7c2b155342a51eef179db07029223f1", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio-languages/2.3.2/rdf4j-rio-languages-2.3.2.jar" + }, + "org.eclipse.rdf4j:rdf4j-rio-languages:pom:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio-languages/2.3.2/rdf4j-rio-languages-2.3.2.pom", + "sha256": "38179c96031d601c63f802c0b67a186791e97289bbf32f12998e355b60c3c8c1", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio-languages/2.3.2/rdf4j-rio-languages-2.3.2.pom" + }, + "org.eclipse.rdf4j:rdf4j-rio-n3:jar:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio-n3/2.3.2/rdf4j-rio-n3-2.3.2.jar", + "sha256": "3fb7d7b81f5c0274da198c7acee061f05ea1f856062572d9020d64945ae12118", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio-n3/2.3.2/rdf4j-rio-n3-2.3.2.jar" + }, + "org.eclipse.rdf4j:rdf4j-rio-n3:pom:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio-n3/2.3.2/rdf4j-rio-n3-2.3.2.pom", + "sha256": "6e43d9f24e60852520b19994377dc1409cb8dff99ba88620e6d54e0abe086ed5", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio-n3/2.3.2/rdf4j-rio-n3-2.3.2.pom" + }, + "org.eclipse.rdf4j:rdf4j-rio-nquads:jar:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio-nquads/2.3.2/rdf4j-rio-nquads-2.3.2.jar", + "sha256": "1fcd903ee5d512eeb6c38b3646399ebaaefb2484a80eaac501935d950989f6b4", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio-nquads/2.3.2/rdf4j-rio-nquads-2.3.2.jar" + }, + "org.eclipse.rdf4j:rdf4j-rio-nquads:pom:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio-nquads/2.3.2/rdf4j-rio-nquads-2.3.2.pom", + "sha256": "5b0010bd2625b1c2b41768ddc87964ab3b48f45bd8fee4e9b4d0a10c4798a53d", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio-nquads/2.3.2/rdf4j-rio-nquads-2.3.2.pom" + }, + "org.eclipse.rdf4j:rdf4j-rio-ntriples:jar:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio-ntriples/2.3.2/rdf4j-rio-ntriples-2.3.2.jar", + "sha256": "803c83f6b6fcf8a578daec1b7912c030ae15923f2e0c20eddfb00e3b9fd1d770", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio-ntriples/2.3.2/rdf4j-rio-ntriples-2.3.2.jar" + }, + "org.eclipse.rdf4j:rdf4j-rio-ntriples:pom:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio-ntriples/2.3.2/rdf4j-rio-ntriples-2.3.2.pom", + "sha256": "a43a2bf6d36105b88eb3c1438368a6d88166afb2bb794a55f3862e750ba935a8", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio-ntriples/2.3.2/rdf4j-rio-ntriples-2.3.2.pom" + }, + "org.eclipse.rdf4j:rdf4j-rio-rdfjson:jar:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio-rdfjson/2.3.2/rdf4j-rio-rdfjson-2.3.2.jar", + "sha256": "50e449c6fc0b9f9fa1058ccc07931729c25b5b76c11651e1b9a0b1172ebb53f5", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio-rdfjson/2.3.2/rdf4j-rio-rdfjson-2.3.2.jar" + }, + "org.eclipse.rdf4j:rdf4j-rio-rdfjson:pom:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio-rdfjson/2.3.2/rdf4j-rio-rdfjson-2.3.2.pom", + "sha256": "b8f0177e4cd5d7fd172d530d26d3d81d7c0a2945da350bba6cbf43f11d73cc7e", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio-rdfjson/2.3.2/rdf4j-rio-rdfjson-2.3.2.pom" + }, + "org.eclipse.rdf4j:rdf4j-rio-rdfxml:jar:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio-rdfxml/2.3.2/rdf4j-rio-rdfxml-2.3.2.jar", + "sha256": "c3163d9ac8e7ecc8b11887952b8f86895fc850361408c2b9be3f642a67d24a1b", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio-rdfxml/2.3.2/rdf4j-rio-rdfxml-2.3.2.jar" + }, + "org.eclipse.rdf4j:rdf4j-rio-rdfxml:pom:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio-rdfxml/2.3.2/rdf4j-rio-rdfxml-2.3.2.pom", + "sha256": "0844c4930e20e664589e3b6a9b25c238de0fff7d7c047a4a997905724e2de8be", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio-rdfxml/2.3.2/rdf4j-rio-rdfxml-2.3.2.pom" + }, + "org.eclipse.rdf4j:rdf4j-rio-trig:jar:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio-trig/2.3.2/rdf4j-rio-trig-2.3.2.jar", + "sha256": "c140ff124e7c1c3388064f723486d38a98db3f8c47295ee46ed7502aae822a5f", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio-trig/2.3.2/rdf4j-rio-trig-2.3.2.jar" + }, + "org.eclipse.rdf4j:rdf4j-rio-trig:pom:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio-trig/2.3.2/rdf4j-rio-trig-2.3.2.pom", + "sha256": "1ebd8d3c2bfaa618bb5bd43e172244a30acf06c176740f7d22065f0f45040586", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio-trig/2.3.2/rdf4j-rio-trig-2.3.2.pom" + }, + "org.eclipse.rdf4j:rdf4j-rio-trix:jar:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio-trix/2.3.2/rdf4j-rio-trix-2.3.2.jar", + "sha256": "2aea0394601e5699c2b8b8155463770ed9ffcdbfcc306ee1b452a433ac6506ab", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio-trix/2.3.2/rdf4j-rio-trix-2.3.2.jar" + }, + "org.eclipse.rdf4j:rdf4j-rio-trix:pom:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio-trix/2.3.2/rdf4j-rio-trix-2.3.2.pom", + "sha256": "2796c99600a534f360afe35d976c33ee9af353823e439087192ec8c9cb985e28", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio-trix/2.3.2/rdf4j-rio-trix-2.3.2.pom" + }, + "org.eclipse.rdf4j:rdf4j-rio-turtle:jar:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio-turtle/2.3.2/rdf4j-rio-turtle-2.3.2.jar", + "sha256": "21c5852401c370c51faf37de1c04ced64e497338a280c5ddd7482be80ac24c1b", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio-turtle/2.3.2/rdf4j-rio-turtle-2.3.2.jar" + }, + "org.eclipse.rdf4j:rdf4j-rio-turtle:pom:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio-turtle/2.3.2/rdf4j-rio-turtle-2.3.2.pom", + "sha256": "8032a36c88e5143d4af375c19f914e946d556ce3bc57654e5f63a4c54d914836", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio-turtle/2.3.2/rdf4j-rio-turtle-2.3.2.pom" + }, + "org.eclipse.rdf4j:rdf4j-rio:pom:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-rio/2.3.2/rdf4j-rio-2.3.2.pom", + "sha256": "39c490b3603dbc77e32da29f2eb24b2c1e5fb3caec632fc0bf187751eed25cb1", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-rio/2.3.2/rdf4j-rio-2.3.2.pom" + }, + "org.eclipse.rdf4j:rdf4j-util:jar:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-util/2.3.2/rdf4j-util-2.3.2.jar", + "sha256": "1f04337201d3a8c23eb15af29c48d25b904e435e49fdc288aa42cf788b06745c", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-util/2.3.2/rdf4j-util-2.3.2.jar" + }, + "org.eclipse.rdf4j:rdf4j-util:pom:2.3.2": { + "layout": "org/eclipse/rdf4j/rdf4j-util/2.3.2/rdf4j-util-2.3.2.pom", + "sha256": "fb4ebdcc408d35bdffddda155b9ccdb7352804534aa413c68c805e35671a6654", + "url": "https://repo.maven.apache.org/maven2/org/eclipse/rdf4j/rdf4j-util/2.3.2/rdf4j-util-2.3.2.pom" + }, + "org.eluder.coveralls:coveralls-maven-plugin:jar:4.3.0": { + "layout": "org/eluder/coveralls/coveralls-maven-plugin/4.3.0/coveralls-maven-plugin-4.3.0.jar", + "sha256": "685f5ab0904fff3078e54f7dbad1eb6dc04507b00e7f8f13629051187c002273", + "url": "https://repo.maven.apache.org/maven2/org/eluder/coveralls/coveralls-maven-plugin/4.3.0/coveralls-maven-plugin-4.3.0.jar" + }, + "org.eluder.coveralls:coveralls-maven-plugin:pom:4.3.0": { + "layout": "org/eluder/coveralls/coveralls-maven-plugin/4.3.0/coveralls-maven-plugin-4.3.0.pom", + "sha256": "f81774cc3b876dfd035de11aebdf0e5a33f1c44b33d8d2e648f40eb3bdc8b210", + "url": "https://repo.maven.apache.org/maven2/org/eluder/coveralls/coveralls-maven-plugin/4.3.0/coveralls-maven-plugin-4.3.0.pom" + }, + "org.eluder:eluder-parent:pom:8": { + "layout": "org/eluder/eluder-parent/8/eluder-parent-8.pom", + "sha256": "bb5e5a46f27308ec534dd35f3a175daff44911fdcfdb2e6672eb5ac124b1936c", + "url": "https://repo.maven.apache.org/maven2/org/eluder/eluder-parent/8/eluder-parent-8.pom" + }, + "org.freemarker:freemarker:jar:2.3.20": { + "layout": "org/freemarker/freemarker/2.3.20/freemarker-2.3.20.jar", + "sha256": "10fc697ec84499aeba45b1a54afa8b730d2278693b3d9f1629950801e9344ed5", + "url": "https://repo.maven.apache.org/maven2/org/freemarker/freemarker/2.3.20/freemarker-2.3.20.jar" + }, + "org.freemarker:freemarker:pom:2.3.20": { + "layout": "org/freemarker/freemarker/2.3.20/freemarker-2.3.20.pom", + "sha256": "9307327dcc3d38ebd2de265416475acd852b6058347e8af9500cb773155ef79a", + "url": "https://repo.maven.apache.org/maven2/org/freemarker/freemarker/2.3.20/freemarker-2.3.20.pom" + }, + "org.fusesource.jansi:jansi-project:pom:1.18": { + "layout": "org/fusesource/jansi/jansi-project/1.18/jansi-project-1.18.pom", + "sha256": "02c836893cfd1f5459925a21635a5d2c41075442fb9673ad3cdaed573b9e800d", + "url": "https://repo.maven.apache.org/maven2/org/fusesource/jansi/jansi-project/1.18/jansi-project-1.18.pom" + }, + "org.fusesource.jansi:jansi:jar:1.18": { + "layout": "org/fusesource/jansi/jansi/1.18/jansi-1.18.jar", + "sha256": "109e64fc65767c7a1a3bd654709d76f107b0a3b39db32cbf11139e13a6f5229b", + "url": "https://repo.maven.apache.org/maven2/org/fusesource/jansi/jansi/1.18/jansi-1.18.jar" + }, + "org.fusesource.jansi:jansi:pom:1.18": { + "layout": "org/fusesource/jansi/jansi/1.18/jansi-1.18.pom", + "sha256": "dd8f619c2c2a4d7b6f81589428db770514528e59ce0532f0718fee33a265fb4b", + "url": "https://repo.maven.apache.org/maven2/org/fusesource/jansi/jansi/1.18/jansi-1.18.pom" + }, + "org.fusesource:fusesource-pom:pom:1.11": { + "layout": "org/fusesource/fusesource-pom/1.11/fusesource-pom-1.11.pom", + "sha256": "695db8335e33de3d403b3d1f0425970f3710a9db4a528ec460199055ef1e8ab8", + "url": "https://repo.maven.apache.org/maven2/org/fusesource/fusesource-pom/1.11/fusesource-pom-1.11.pom" + }, + "org.hamcrest:hamcrest-core:jar:1.3": { + "layout": "org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar", + "sha256": "66fdef91e9739348df7a096aa384a5685f4e875584cce89386a7a47251c4d8e9", + "url": "https://repo.maven.apache.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar" + }, + "org.hamcrest:hamcrest-core:pom:1.1": { + "layout": "org/hamcrest/hamcrest-core/1.1/hamcrest-core-1.1.pom", + "sha256": "397387f406c68ccb403f477ccbec1c818cfc6b8ff4fada5a33e26183a8417c83", + "url": "https://repo.maven.apache.org/maven2/org/hamcrest/hamcrest-core/1.1/hamcrest-core-1.1.pom" + }, + "org.hamcrest:hamcrest-core:pom:1.3": { + "layout": "org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.pom", + "sha256": "fde386a7905173a1b103de6ab820727584b50d0e32282e2797787c20a64ffa93", + "url": "https://repo.maven.apache.org/maven2/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.pom" + }, + "org.hamcrest:hamcrest-parent:pom:1.1": { + "layout": "org/hamcrest/hamcrest-parent/1.1/hamcrest-parent-1.1.pom", + "sha256": "14e6950a1a6298cbcb83cf9429cac415ff273167f6f15876859413b0d6c07e3a", + "url": "https://repo.maven.apache.org/maven2/org/hamcrest/hamcrest-parent/1.1/hamcrest-parent-1.1.pom" + }, + "org.hamcrest:hamcrest-parent:pom:1.3": { + "layout": "org/hamcrest/hamcrest-parent/1.3/hamcrest-parent-1.3.pom", + "sha256": "6d535f94efb663bdb682c9f27a50335394688009642ba7a9677504bc1be4129b", + "url": "https://repo.maven.apache.org/maven2/org/hamcrest/hamcrest-parent/1.3/hamcrest-parent-1.3.pom" + }, + "org.jacoco:jacoco-maven-plugin:jar:0.8.8": { + "layout": "org/jacoco/jacoco-maven-plugin/0.8.8/jacoco-maven-plugin-0.8.8.jar", + "sha256": "53903560c1814f49df8cd1c03fb5bf249d24d236215770bff747b2dfe6be406a", + "url": "https://repo.maven.apache.org/maven2/org/jacoco/jacoco-maven-plugin/0.8.8/jacoco-maven-plugin-0.8.8.jar" + }, + "org.jacoco:jacoco-maven-plugin:pom:0.8.8": { + "layout": "org/jacoco/jacoco-maven-plugin/0.8.8/jacoco-maven-plugin-0.8.8.pom", + "sha256": "b80f26e1bc6cf4f84f9d8d7ada6275cc83efcfb1f1297967558804c5a1fea423", + "url": "https://repo.maven.apache.org/maven2/org/jacoco/jacoco-maven-plugin/0.8.8/jacoco-maven-plugin-0.8.8.pom" + }, + "org.jacoco:org.jacoco.agent:jar:runtime:0.8.8": { + "layout": "org/jacoco/org.jacoco.agent/0.8.8/org.jacoco.agent-0.8.8-runtime.jar", + "sha256": "67de51e9ca1db044f3a3d10613518befb02e8eee1015f2ff6d56cfb9d4506546", + "url": "https://repo.maven.apache.org/maven2/org/jacoco/org.jacoco.agent/0.8.8/org.jacoco.agent-0.8.8-runtime.jar" + }, + "org.jacoco:org.jacoco.agent:pom:0.8.8": { + "layout": "org/jacoco/org.jacoco.agent/0.8.8/org.jacoco.agent-0.8.8.pom", + "sha256": "7dd13c80aff315032983357c650a887d6fdb4c8a8870b207bb4802c49809e7b4", + "url": "https://repo.maven.apache.org/maven2/org/jacoco/org.jacoco.agent/0.8.8/org.jacoco.agent-0.8.8.pom" + }, + "org.jacoco:org.jacoco.build:pom:0.8.8": { + "layout": "org/jacoco/org.jacoco.build/0.8.8/org.jacoco.build-0.8.8.pom", + "sha256": "f4ce0b1285fd24fc6c772f42857298315904dde8fd5677267a0fad5ff9ce2aef", + "url": "https://repo.maven.apache.org/maven2/org/jacoco/org.jacoco.build/0.8.8/org.jacoco.build-0.8.8.pom" + }, + "org.jacoco:org.jacoco.core:jar:0.8.8": { + "layout": "org/jacoco/org.jacoco.core/0.8.8/org.jacoco.core-0.8.8.jar", + "sha256": "474c782f809d88924713dfdbf0acb79d330f904be576484803463d0465611643", + "url": "https://repo.maven.apache.org/maven2/org/jacoco/org.jacoco.core/0.8.8/org.jacoco.core-0.8.8.jar" + }, + "org.jacoco:org.jacoco.core:pom:0.8.8": { + "layout": "org/jacoco/org.jacoco.core/0.8.8/org.jacoco.core-0.8.8.pom", + "sha256": "f5fab5a48df823b83c0ea35026032368cc9b81800efb257cc7a5928298fee225", + "url": "https://repo.maven.apache.org/maven2/org/jacoco/org.jacoco.core/0.8.8/org.jacoco.core-0.8.8.pom" + }, + "org.jacoco:org.jacoco.report:jar:0.8.8": { + "layout": "org/jacoco/org.jacoco.report/0.8.8/org.jacoco.report-0.8.8.jar", + "sha256": "2c129110f3e3fcaa1f8179578ea3894586199cb0826be5c7790278084c9622a9", + "url": "https://repo.maven.apache.org/maven2/org/jacoco/org.jacoco.report/0.8.8/org.jacoco.report-0.8.8.jar" + }, + "org.jacoco:org.jacoco.report:pom:0.8.8": { + "layout": "org/jacoco/org.jacoco.report/0.8.8/org.jacoco.report-0.8.8.pom", + "sha256": "5213af2916bb2690be871917d82a0f9c2ba1e88b796a59343fc03df8ae138716", + "url": "https://repo.maven.apache.org/maven2/org/jacoco/org.jacoco.report/0.8.8/org.jacoco.report-0.8.8.pom" + }, + "org.jdom:jdom2:jar:2.0.6.1": { + "layout": "org/jdom/jdom2/2.0.6.1/jdom2-2.0.6.1.jar", + "sha256": "0b20f45e3a0fd8f0d12cdc5316b06776e902b1365db00118876f9175c60f302c", + "url": "https://repo.maven.apache.org/maven2/org/jdom/jdom2/2.0.6.1/jdom2-2.0.6.1.jar" + }, + "org.jdom:jdom2:pom:2.0.6.1": { + "layout": "org/jdom/jdom2/2.0.6.1/jdom2-2.0.6.1.pom", + "sha256": "55795e1018b8ae647b937967cf810a99b08582c2374e7873c128734c8c914bf3", + "url": "https://repo.maven.apache.org/maven2/org/jdom/jdom2/2.0.6.1/jdom2-2.0.6.1.pom" + }, + "org.jgrapht:jgrapht-core:jar:0.9.0": { + "layout": "org/jgrapht/jgrapht-core/0.9.0/jgrapht-core-0.9.0.jar", + "sha256": "3b2a25e4c2a332997b87c40093e4c692fe26df9824251472212264286aa0a4a4", + "url": "https://repo.maven.apache.org/maven2/org/jgrapht/jgrapht-core/0.9.0/jgrapht-core-0.9.0.jar" + }, + "org.jgrapht:jgrapht-core:pom:0.9.0": { + "layout": "org/jgrapht/jgrapht-core/0.9.0/jgrapht-core-0.9.0.pom", + "sha256": "401226246dd874516251a3564de1701b22303f212c06114888bdf7efc337b03f", + "url": "https://repo.maven.apache.org/maven2/org/jgrapht/jgrapht-core/0.9.0/jgrapht-core-0.9.0.pom" + }, + "org.jgrapht:jgrapht:pom:0.9.0": { + "layout": "org/jgrapht/jgrapht/0.9.0/jgrapht-0.9.0.pom", + "sha256": "1d2942ca82603ec469372815824db3fe513e21d6659c459c5c78899f26abf0ad", + "url": "https://repo.maven.apache.org/maven2/org/jgrapht/jgrapht/0.9.0/jgrapht-0.9.0.pom" + }, + "org.jline:jline-parent:pom:3.16.0": { + "layout": "org/jline/jline-parent/3.16.0/jline-parent-3.16.0.pom", + "sha256": "1694b9ee7478d0e95665351d672c13512aba05abc5e25a3a854ed98ffff4dd59", + "url": "https://repo.maven.apache.org/maven2/org/jline/jline-parent/3.16.0/jline-parent-3.16.0.pom" + }, + "org.jline:jline:jar:3.16.0": { + "layout": "org/jline/jline/3.16.0/jline-3.16.0.jar", + "sha256": "3a123696535d44cf5974a05b8c70a00c7846ac1dfff148cd6b22ba69c29e27ea", + "url": "https://repo.maven.apache.org/maven2/org/jline/jline/3.16.0/jline-3.16.0.jar" + }, + "org.jline:jline:pom:3.16.0": { + "layout": "org/jline/jline/3.16.0/jline-3.16.0.pom", + "sha256": "1cb2d39eef125c9e138883778f5ec9ab19c4f05424c67083d114addaee6baf84", + "url": "https://repo.maven.apache.org/maven2/org/jline/jline/3.16.0/jline-3.16.0.pom" + }, + "org.mockito:mockito-core:jar:2.28.2": { + "layout": "org/mockito/mockito-core/2.28.2/mockito-core-2.28.2.jar", + "sha256": "b0af36fed3a6c2147c0cd9028a1d814fd4f4e8196c539f2befddb61ca6ec9e27", + "url": "https://repo.maven.apache.org/maven2/org/mockito/mockito-core/2.28.2/mockito-core-2.28.2.jar" + }, + "org.mockito:mockito-core:pom:2.28.2": { + "layout": "org/mockito/mockito-core/2.28.2/mockito-core-2.28.2.pom", + "sha256": "b3bf322abfee6c054935d68ddbb785a90f7934a97a17fcd140a4e6ad58e59d53", + "url": "https://repo.maven.apache.org/maven2/org/mockito/mockito-core/2.28.2/mockito-core-2.28.2.pom" + }, + "org.nuiton.processor:nuiton-processor:jar:1.3": { + "layout": "org/nuiton/processor/nuiton-processor/1.3/nuiton-processor-1.3.jar", + "sha256": "94e6807e8eaed7396976254372fbbb5d64d21a2440f062ab8229900a0207161e", + "url": "https://repo.maven.apache.org/maven2/org/nuiton/processor/nuiton-processor/1.3/nuiton-processor-1.3.jar" + }, + "org.nuiton.processor:nuiton-processor:pom:1.3": { + "layout": "org/nuiton/processor/nuiton-processor/1.3/nuiton-processor-1.3.pom", + "sha256": "804c11c30a4649a1966262ebac341fb9ca49546684cb26c65b669bcbb3b947aa", + "url": "https://repo.maven.apache.org/maven2/org/nuiton/processor/nuiton-processor/1.3/nuiton-processor-1.3.pom" + }, + "org.nuiton:mavenpom4redmine:pom:3.4.4": { + "layout": "org/nuiton/mavenpom4redmine/3.4.4/mavenpom4redmine-3.4.4.pom", + "sha256": "a6ebddfc1c17329878d260192f753d872848511ecd313231e8859725ca258dc6", + "url": "https://repo.maven.apache.org/maven2/org/nuiton/mavenpom4redmine/3.4.4/mavenpom4redmine-3.4.4.pom" + }, + "org.nuiton:mavenpom4redmineAndCentral:pom:3.4.4": { + "layout": "org/nuiton/mavenpom4redmineAndCentral/3.4.4/mavenpom4redmineAndCentral-3.4.4.pom", + "sha256": "f528966f11be1e5f0f8583937b5d645753e9c2cb51e8fcd7fd4e7c8970cf96a5", + "url": "https://repo.maven.apache.org/maven2/org/nuiton/mavenpom4redmineAndCentral/3.4.4/mavenpom4redmineAndCentral-3.4.4.pom" + }, + "org.nuiton:mavenpom:pom:3.4.4": { + "layout": "org/nuiton/mavenpom/3.4.4/mavenpom-3.4.4.pom", + "sha256": "a138f0218a0b3d261cc08de0a688901207310f83b4221ee139b34897f3e32f49", + "url": "https://repo.maven.apache.org/maven2/org/nuiton/mavenpom/3.4.4/mavenpom-3.4.4.pom" + }, + "org.nuiton:processor:pom:1.3": { + "layout": "org/nuiton/processor/1.3/processor-1.3.pom", + "sha256": "a5b2ddec87de81e533749dc29712d8d612a5e145594c7fe3eb63dd6bc3b2b04b", + "url": "https://repo.maven.apache.org/maven2/org/nuiton/processor/1.3/processor-1.3.pom" + }, + "org.objenesis:objenesis-parent:pom:2.6": { + "layout": "org/objenesis/objenesis-parent/2.6/objenesis-parent-2.6.pom", + "sha256": "3825feca2a3c176400b063dec7c6b0643e2b5256bbbfd4e0a7c11e0dd0983baa", + "url": "https://repo.maven.apache.org/maven2/org/objenesis/objenesis-parent/2.6/objenesis-parent-2.6.pom" + }, + "org.objenesis:objenesis:jar:2.6": { + "layout": "org/objenesis/objenesis/2.6/objenesis-2.6.jar", + "sha256": "5e168368fbc250af3c79aa5fef0c3467a2d64e5a7bd74005f25d8399aeb0708d", + "url": "https://repo.maven.apache.org/maven2/org/objenesis/objenesis/2.6/objenesis-2.6.jar" + }, + "org.objenesis:objenesis:pom:2.6": { + "layout": "org/objenesis/objenesis/2.6/objenesis-2.6.pom", + "sha256": "4c1307909dc62df1bd91f075503f8bdef5ae445e13353f1752af9448bea1d3f1", + "url": "https://repo.maven.apache.org/maven2/org/objenesis/objenesis/2.6/objenesis-2.6.pom" + }, + "org.openrdf.sesame:sesame-core:pom:2.7.16": { + "layout": "org/openrdf/sesame/sesame-core/2.7.16/sesame-core-2.7.16.pom", + "sha256": "0a091d76a324c9117af2ed3903d426afec4dc371be5038fc7d09d3b0221f1c2b", + "url": "https://repo.maven.apache.org/maven2/org/openrdf/sesame/sesame-core/2.7.16/sesame-core-2.7.16.pom" + }, + "org.openrdf.sesame:sesame-model:jar:2.7.16": { + "layout": "org/openrdf/sesame/sesame-model/2.7.16/sesame-model-2.7.16.jar", + "sha256": "fb74940f1082367b24860fa1c41b1e3cdeb553415b0ca3128dbd2fc4845664cc", + "url": "https://repo.maven.apache.org/maven2/org/openrdf/sesame/sesame-model/2.7.16/sesame-model-2.7.16.jar" + }, + "org.openrdf.sesame:sesame-model:pom:2.7.16": { + "layout": "org/openrdf/sesame/sesame-model/2.7.16/sesame-model-2.7.16.pom", + "sha256": "b4f7c9f66e10454e416f9f0294032b94f11af71a8d948f4a93f9a498309bd108", + "url": "https://repo.maven.apache.org/maven2/org/openrdf/sesame/sesame-model/2.7.16/sesame-model-2.7.16.pom" + }, + "org.openrdf.sesame:sesame-rio-api:jar:2.7.16": { + "layout": "org/openrdf/sesame/sesame-rio-api/2.7.16/sesame-rio-api-2.7.16.jar", + "sha256": "67edbe5359a8e956db2d435b920f3091fb345e2f61a9a54a7110ba5583c2159d", + "url": "https://repo.maven.apache.org/maven2/org/openrdf/sesame/sesame-rio-api/2.7.16/sesame-rio-api-2.7.16.jar" + }, + "org.openrdf.sesame:sesame-rio-api:pom:2.7.16": { + "layout": "org/openrdf/sesame/sesame-rio-api/2.7.16/sesame-rio-api-2.7.16.pom", + "sha256": "cbcb3044f4720193023d43bd07a513d61bf7556bc6a74dd1073096268baef17f", + "url": "https://repo.maven.apache.org/maven2/org/openrdf/sesame/sesame-rio-api/2.7.16/sesame-rio-api-2.7.16.pom" + }, + "org.openrdf.sesame:sesame-rio-datatypes:jar:2.7.16": { + "layout": "org/openrdf/sesame/sesame-rio-datatypes/2.7.16/sesame-rio-datatypes-2.7.16.jar", + "sha256": "d0a16c24dcd57c2b9978aa7447bdfcd71231b86a4bc5820af64aa54a9b768d72", + "url": "https://repo.maven.apache.org/maven2/org/openrdf/sesame/sesame-rio-datatypes/2.7.16/sesame-rio-datatypes-2.7.16.jar" + }, + "org.openrdf.sesame:sesame-rio-datatypes:pom:2.7.16": { + "layout": "org/openrdf/sesame/sesame-rio-datatypes/2.7.16/sesame-rio-datatypes-2.7.16.pom", + "sha256": "db161ec89e569f73e22f584edd425f3e0ebfce8f0a161cc0eca755e3df8eef0d", + "url": "https://repo.maven.apache.org/maven2/org/openrdf/sesame/sesame-rio-datatypes/2.7.16/sesame-rio-datatypes-2.7.16.pom" + }, + "org.openrdf.sesame:sesame-rio-languages:jar:2.7.16": { + "layout": "org/openrdf/sesame/sesame-rio-languages/2.7.16/sesame-rio-languages-2.7.16.jar", + "sha256": "afdf5f5e964bf6e623d6321d0277192584a95f07408756da8fcd28dbc05d52c7", + "url": "https://repo.maven.apache.org/maven2/org/openrdf/sesame/sesame-rio-languages/2.7.16/sesame-rio-languages-2.7.16.jar" + }, + "org.openrdf.sesame:sesame-rio-languages:pom:2.7.16": { + "layout": "org/openrdf/sesame/sesame-rio-languages/2.7.16/sesame-rio-languages-2.7.16.pom", + "sha256": "3991484b099b9274a8717798de11526bba14034daa7ef194ff07d02c11d57a35", + "url": "https://repo.maven.apache.org/maven2/org/openrdf/sesame/sesame-rio-languages/2.7.16/sesame-rio-languages-2.7.16.pom" + }, + "org.openrdf.sesame:sesame-rio-ntriples:jar:2.7.16": { + "layout": "org/openrdf/sesame/sesame-rio-ntriples/2.7.16/sesame-rio-ntriples-2.7.16.jar", + "sha256": "a38eb1be6aa25abfb8a5ba4ae97089adc31d74114d7311eea14a62947e515a46", + "url": "https://repo.maven.apache.org/maven2/org/openrdf/sesame/sesame-rio-ntriples/2.7.16/sesame-rio-ntriples-2.7.16.jar" + }, + "org.openrdf.sesame:sesame-rio-ntriples:pom:2.7.16": { + "layout": "org/openrdf/sesame/sesame-rio-ntriples/2.7.16/sesame-rio-ntriples-2.7.16.pom", + "sha256": "421679b4838c3d7b45cb49b94e0649b01caa1d7d0202f23af696ed861b2e1f0d", + "url": "https://repo.maven.apache.org/maven2/org/openrdf/sesame/sesame-rio-ntriples/2.7.16/sesame-rio-ntriples-2.7.16.pom" + }, + "org.openrdf.sesame:sesame-rio-rdfxml:jar:2.7.16": { + "layout": "org/openrdf/sesame/sesame-rio-rdfxml/2.7.16/sesame-rio-rdfxml-2.7.16.jar", + "sha256": "b594ee00645c538ce928b9e46dd69716e17731dff386aeedf1fa39dc639adefe", + "url": "https://repo.maven.apache.org/maven2/org/openrdf/sesame/sesame-rio-rdfxml/2.7.16/sesame-rio-rdfxml-2.7.16.jar" + }, + "org.openrdf.sesame:sesame-rio-rdfxml:pom:2.7.16": { + "layout": "org/openrdf/sesame/sesame-rio-rdfxml/2.7.16/sesame-rio-rdfxml-2.7.16.pom", + "sha256": "21d890944cbd654ec02123e5c8f2df33b4e4d8eaec7fe695a46d8674a497251d", + "url": "https://repo.maven.apache.org/maven2/org/openrdf/sesame/sesame-rio-rdfxml/2.7.16/sesame-rio-rdfxml-2.7.16.pom" + }, + "org.openrdf.sesame:sesame-rio-turtle:jar:2.7.16": { + "layout": "org/openrdf/sesame/sesame-rio-turtle/2.7.16/sesame-rio-turtle-2.7.16.jar", + "sha256": "5980e4bc4e7c98fea08b55d3eefc2c45b9f78989e14803cecd81288b18a9e3fe", + "url": "https://repo.maven.apache.org/maven2/org/openrdf/sesame/sesame-rio-turtle/2.7.16/sesame-rio-turtle-2.7.16.jar" + }, + "org.openrdf.sesame:sesame-rio-turtle:pom:2.7.16": { + "layout": "org/openrdf/sesame/sesame-rio-turtle/2.7.16/sesame-rio-turtle-2.7.16.pom", + "sha256": "9d68bd5571a94bad13d2cbc08f4efe99876f3d9b8f32a11b2f5cf2a3a2a3ca83", + "url": "https://repo.maven.apache.org/maven2/org/openrdf/sesame/sesame-rio-turtle/2.7.16/sesame-rio-turtle-2.7.16.pom" + }, + "org.openrdf.sesame:sesame-rio:pom:2.7.16": { + "layout": "org/openrdf/sesame/sesame-rio/2.7.16/sesame-rio-2.7.16.pom", + "sha256": "b04805a49ebf0529f8859e33620ec9fca537ec3d2d91b97501a3ada4e9a2f63e", + "url": "https://repo.maven.apache.org/maven2/org/openrdf/sesame/sesame-rio/2.7.16/sesame-rio-2.7.16.pom" + }, + "org.openrdf.sesame:sesame-util:jar:2.7.16": { + "layout": "org/openrdf/sesame/sesame-util/2.7.16/sesame-util-2.7.16.jar", + "sha256": "4cca4ec18c63d844fbf3afd52ab4f0408b848bd7d76d1ed87ef924207041c865", + "url": "https://repo.maven.apache.org/maven2/org/openrdf/sesame/sesame-util/2.7.16/sesame-util-2.7.16.jar" + }, + "org.openrdf.sesame:sesame-util:pom:2.7.16": { + "layout": "org/openrdf/sesame/sesame-util/2.7.16/sesame-util-2.7.16.pom", + "sha256": "b8217bd6e32ac7cf5247b3e0dd99ad9a4224fe97ac58b14e4b49ebcaf333b08a", + "url": "https://repo.maven.apache.org/maven2/org/openrdf/sesame/sesame-util/2.7.16/sesame-util-2.7.16.pom" + }, + "org.openrdf.sesame:sesame:pom:2.7.16": { + "layout": "org/openrdf/sesame/sesame/2.7.16/sesame-2.7.16.pom", + "sha256": "c3aa12d8e13a29a6b98825e06c9531794841a5ffdc22e0cf38412463bb12dd65", + "url": "https://repo.maven.apache.org/maven2/org/openrdf/sesame/sesame/2.7.16/sesame-2.7.16.pom" + }, + "org.ow2.asm:asm-analysis:jar:9.2": { + "layout": "org/ow2/asm/asm-analysis/9.2/asm-analysis-9.2.jar", + "sha256": "878fbe521731c072d14d2d65b983b1beae6ad06fda0007b6a8bae81f73f433c4", + "url": "https://repo.maven.apache.org/maven2/org/ow2/asm/asm-analysis/9.2/asm-analysis-9.2.jar" + }, + "org.ow2.asm:asm-analysis:pom:9.2": { + "layout": "org/ow2/asm/asm-analysis/9.2/asm-analysis-9.2.pom", + "sha256": "773cc1a2bfc14c6c4a979c51a075c0234a0bf694fc3abe4facf454f37a145f1b", + "url": "https://repo.maven.apache.org/maven2/org/ow2/asm/asm-analysis/9.2/asm-analysis-9.2.pom" + }, + "org.ow2.asm:asm-commons:jar:9.2": { + "layout": "org/ow2/asm/asm-commons/9.2/asm-commons-9.2.jar", + "sha256": "be4ce53138a238bb522cd781cf91f3ba5ce2f6ca93ec62d46a162a127225e0a6", + "url": "https://repo.maven.apache.org/maven2/org/ow2/asm/asm-commons/9.2/asm-commons-9.2.jar" + }, + "org.ow2.asm:asm-commons:jar:9.4": { + "layout": "org/ow2/asm/asm-commons/9.4/asm-commons-9.4.jar", + "sha256": "0c128a9ec3f33c98959272f6d16cf14247b508f58951574bcdbd2b56d6326364", + "url": "https://repo.maven.apache.org/maven2/org/ow2/asm/asm-commons/9.4/asm-commons-9.4.jar" + }, + "org.ow2.asm:asm-commons:pom:9.2": { + "layout": "org/ow2/asm/asm-commons/9.2/asm-commons-9.2.pom", + "sha256": "02824e839f2a2f0e72959fdd30b4897240f05afc43de42d7ba0b18437601c070", + "url": "https://repo.maven.apache.org/maven2/org/ow2/asm/asm-commons/9.2/asm-commons-9.2.pom" + }, + "org.ow2.asm:asm-commons:pom:9.4": { + "layout": "org/ow2/asm/asm-commons/9.4/asm-commons-9.4.pom", + "sha256": "b42ca2abcf8811776a5ddc0290f2106d7f313f82c78b0ddccd5cce4c63a35179", + "url": "https://repo.maven.apache.org/maven2/org/ow2/asm/asm-commons/9.4/asm-commons-9.4.pom" + }, + "org.ow2.asm:asm-parent:pom:6.0_BETA": { + "layout": "org/ow2/asm/asm-parent/6.0_BETA/asm-parent-6.0_BETA.pom", + "sha256": "6877adfe16143708322b98e4d23ab9c2a9761bec02d476a4600f39be877d6e09", + "url": "https://repo.maven.apache.org/maven2/org/ow2/asm/asm-parent/6.0_BETA/asm-parent-6.0_BETA.pom" + }, + "org.ow2.asm:asm-tree:jar:9.2": { + "layout": "org/ow2/asm/asm-tree/9.2/asm-tree-9.2.jar", + "sha256": "aabf9bd23091a4ebfc109c1f3ee7cf3e4b89f6ba2d3f51c5243f16b3cffae011", + "url": "https://repo.maven.apache.org/maven2/org/ow2/asm/asm-tree/9.2/asm-tree-9.2.jar" + }, + "org.ow2.asm:asm-tree:jar:9.4": { + "layout": "org/ow2/asm/asm-tree/9.4/asm-tree-9.4.jar", + "sha256": "c42d479cf24566a21eb20af7eeaeef4e86bdb4a886306cf72f483b65e75b2acf", + "url": "https://repo.maven.apache.org/maven2/org/ow2/asm/asm-tree/9.4/asm-tree-9.4.jar" + }, + "org.ow2.asm:asm-tree:pom:9.2": { + "layout": "org/ow2/asm/asm-tree/9.2/asm-tree-9.2.pom", + "sha256": "f61f3ebea5520ddf19f452b03c426c7231bdd8a81d7ac28765cb5271225ac378", + "url": "https://repo.maven.apache.org/maven2/org/ow2/asm/asm-tree/9.2/asm-tree-9.2.pom" + }, + "org.ow2.asm:asm-tree:pom:9.4": { + "layout": "org/ow2/asm/asm-tree/9.4/asm-tree-9.4.pom", + "sha256": "c7e9ef93bdd8ab363032ce5382fcc64d002d6c589c175233236cd298e51a3c16", + "url": "https://repo.maven.apache.org/maven2/org/ow2/asm/asm-tree/9.4/asm-tree-9.4.pom" + }, + "org.ow2.asm:asm:jar:6.0_BETA": { + "layout": "org/ow2/asm/asm/6.0_BETA/asm-6.0_BETA.jar", + "sha256": "424252bafe280b3b58df8669d38aba56808acae15c4b4453e65a8b3fb67a6656", + "url": "https://repo.maven.apache.org/maven2/org/ow2/asm/asm/6.0_BETA/asm-6.0_BETA.jar" + }, + "org.ow2.asm:asm:jar:7.2": { + "layout": "org/ow2/asm/asm/7.2/asm-7.2.jar", + "sha256": "7e6cc9e92eb94d04e39356c6d8144ca058cda961c344a7f62166a405f3206672", + "url": "https://repo.maven.apache.org/maven2/org/ow2/asm/asm/7.2/asm-7.2.jar" + }, + "org.ow2.asm:asm:jar:9.2": { + "layout": "org/ow2/asm/asm/9.2/asm-9.2.jar", + "sha256": "b9d4fe4d71938df38839f0eca42aaaa64cf8b313d678da036f0cb3ca199b47f5", + "url": "https://repo.maven.apache.org/maven2/org/ow2/asm/asm/9.2/asm-9.2.jar" + }, + "org.ow2.asm:asm:jar:9.4": { + "layout": "org/ow2/asm/asm/9.4/asm-9.4.jar", + "sha256": "39d0e2b3dc45af65a09b097945750a94a126e052e124f93468443a1d0e15f381", + "url": "https://repo.maven.apache.org/maven2/org/ow2/asm/asm/9.4/asm-9.4.jar" + }, + "org.ow2.asm:asm:pom:6.0_BETA": { + "layout": "org/ow2/asm/asm/6.0_BETA/asm-6.0_BETA.pom", + "sha256": "4e41ae566a4ca9cd2b6e36a5bc71f05b7b8f6485df5c97fae9e07d79ce091f6b", + "url": "https://repo.maven.apache.org/maven2/org/ow2/asm/asm/6.0_BETA/asm-6.0_BETA.pom" + }, + "org.ow2.asm:asm:pom:7.2": { + "layout": "org/ow2/asm/asm/7.2/asm-7.2.pom", + "sha256": "e9e529afbd4bc699f6a3380855d27d13017c360fdb68547e06d1c3842d84e262", + "url": "https://repo.maven.apache.org/maven2/org/ow2/asm/asm/7.2/asm-7.2.pom" + }, + "org.ow2.asm:asm:pom:9.2": { + "layout": "org/ow2/asm/asm/9.2/asm-9.2.pom", + "sha256": "dfb12a1b224bf01be1fd604020466f894241bcb645dcce395edd8cd6f8a50df4", + "url": "https://repo.maven.apache.org/maven2/org/ow2/asm/asm/9.2/asm-9.2.pom" + }, + "org.ow2.asm:asm:pom:9.4": { + "layout": "org/ow2/asm/asm/9.4/asm-9.4.pom", + "sha256": "483751e48fb2d1f43c61ad3ab00ffa466edc0333d8fc2fdb5a26e95d32982394", + "url": "https://repo.maven.apache.org/maven2/org/ow2/asm/asm/9.4/asm-9.4.pom" + }, + "org.ow2:ow2:pom:1.3": { + "layout": "org/ow2/ow2/1.3/ow2-1.3.pom", + "sha256": "51215c67d2c068d8b7d2f6f80f51372a098075deccc448d4bdd7b987ba8328fb", + "url": "https://repo.maven.apache.org/maven2/org/ow2/ow2/1.3/ow2-1.3.pom" + }, + "org.ow2:ow2:pom:1.5": { + "layout": "org/ow2/ow2/1.5/ow2-1.5.pom", + "sha256": "0f8a1b116e760b8fe6389c51b84e4b07a70fc11082d4f936e453b583dd50b43b", + "url": "https://repo.maven.apache.org/maven2/org/ow2/ow2/1.5/ow2-1.5.pom" + }, + "org.ow2:ow2:pom:1.5.1": { + "layout": "org/ow2/ow2/1.5.1/ow2-1.5.1.pom", + "sha256": "321ddbb7ee6fe4f53dea6b4cd6db74154d6bfa42391c1f763b361b9f485acf05", + "url": "https://repo.maven.apache.org/maven2/org/ow2/ow2/1.5.1/ow2-1.5.1.pom" + }, + "org.slf4j:jcl-over-slf4j:jar:1.7.21": { + "layout": "org/slf4j/jcl-over-slf4j/1.7.21/jcl-over-slf4j-1.7.21.jar", + "sha256": "686b9dab357b7b665b969bbbf3dcdc67edd88ee9500699e893b5e70927be5e3f", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/jcl-over-slf4j/1.7.21/jcl-over-slf4j-1.7.21.jar" + }, + "org.slf4j:jcl-over-slf4j:jar:1.7.25": { + "layout": "org/slf4j/jcl-over-slf4j/1.7.25/jcl-over-slf4j-1.7.25.jar", + "sha256": "5e938457e79efcbfb3ab64bc29c43ec6c3b95fffcda3c155f4a86cc320c11e14", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/jcl-over-slf4j/1.7.25/jcl-over-slf4j-1.7.25.jar" + }, + "org.slf4j:jcl-over-slf4j:pom:1.5.6": { + "layout": "org/slf4j/jcl-over-slf4j/1.5.6/jcl-over-slf4j-1.5.6.pom", + "sha256": "d71d7748e68bb9cb7ad38b95d17c0466e31fc1f4d15bb1e635f3ebad34a38ff3", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/jcl-over-slf4j/1.5.6/jcl-over-slf4j-1.5.6.pom" + }, + "org.slf4j:jcl-over-slf4j:pom:1.7.21": { + "layout": "org/slf4j/jcl-over-slf4j/1.7.21/jcl-over-slf4j-1.7.21.pom", + "sha256": "2cbaf19a11eaefe2e7ef7d3b7c293bbc0c87974879cdc66420fa97bc779f55b2", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/jcl-over-slf4j/1.7.21/jcl-over-slf4j-1.7.21.pom" + }, + "org.slf4j:jcl-over-slf4j:pom:1.7.25": { + "layout": "org/slf4j/jcl-over-slf4j/1.7.25/jcl-over-slf4j-1.7.25.pom", + "sha256": "f318976d3d4bd3f36a9bab47af4b17eaf671603e3d7a92e6c67b2004462e0f2d", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/jcl-over-slf4j/1.7.25/jcl-over-slf4j-1.7.25.pom" + }, + "org.slf4j:slf4j-api:jar:1.7.21": { + "layout": "org/slf4j/slf4j-api/1.7.21/slf4j-api-1.7.21.jar", + "sha256": "1d5aeb6bd98b0fdd151269eae941c05f6468a791ea0f1e68d8e7fe518af3e7df", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.21/slf4j-api-1.7.21.jar" + }, + "org.slf4j:slf4j-api:jar:1.7.25": { + "layout": "org/slf4j/slf4j-api/1.7.25/slf4j-api-1.7.25.jar", + "sha256": "18c4a0095d5c1da6b817592e767bb23d29dd2f560ad74df75ff3961dbde25b79", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.25/slf4j-api-1.7.25.jar" + }, + "org.slf4j:slf4j-api:jar:1.7.28": { + "layout": "org/slf4j/slf4j-api/1.7.28/slf4j-api-1.7.28.jar", + "sha256": "fb6e4f67a2a4689e3e713584db17a5d1090c1ebe6eec30e9e0349a6ee118141e", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.28/slf4j-api-1.7.28.jar" + }, + "org.slf4j:slf4j-api:jar:1.7.32": { + "layout": "org/slf4j/slf4j-api/1.7.32/slf4j-api-1.7.32.jar", + "sha256": "3624f8474c1af46d75f98bc097d7864a323c81b3808aa43689a6e1c601c027be", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.32/slf4j-api-1.7.32.jar" + }, + "org.slf4j:slf4j-api:jar:1.7.5": { + "layout": "org/slf4j/slf4j-api/1.7.5/slf4j-api-1.7.5.jar", + "sha256": "fe30825245d2336c859dc38d60c0fc5f3668dbf29cd586828d2b5667ec355b91", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.5/slf4j-api-1.7.5.jar" + }, + "org.slf4j:slf4j-api:jar:1.7.7": { + "layout": "org/slf4j/slf4j-api/1.7.7/slf4j-api-1.7.7.jar", + "sha256": "69980c038ca1b131926561591617d9c25fabfc7b29828af91597ca8570cf35fe", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.7/slf4j-api-1.7.7.jar" + }, + "org.slf4j:slf4j-api:pom:1.5.6": { + "layout": "org/slf4j/slf4j-api/1.5.6/slf4j-api-1.5.6.pom", + "sha256": "91b0a0d016b8c0cba1ddd8a5c59e5bf5c2a9b49b95577e8e38927a7fdff55ce8", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.5.6/slf4j-api-1.5.6.pom" + }, + "org.slf4j:slf4j-api:pom:1.6.1": { + "layout": "org/slf4j/slf4j-api/1.6.1/slf4j-api-1.6.1.pom", + "sha256": "069ba3837bdf8bdb4807648f1579fd43bc292b8d76a6f31b1574867dd93bca56", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.6.1/slf4j-api-1.6.1.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.0": { + "layout": "org/slf4j/slf4j-api/1.7.0/slf4j-api-1.7.0.pom", + "sha256": "d98851d71c708f3bf64f7343b1f1ff8c4720ee265b5f603ad2b1dc31d2acdef8", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.0/slf4j-api-1.7.0.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.1": { + "layout": "org/slf4j/slf4j-api/1.7.1/slf4j-api-1.7.1.pom", + "sha256": "795b974fb010024dbcdedaa18ae587947d04ca3549d961d16e7ecd273c2c1a85", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.1/slf4j-api-1.7.1.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.10": { + "layout": "org/slf4j/slf4j-api/1.7.10/slf4j-api-1.7.10.pom", + "sha256": "af9c5e8d2263422c74792ddd91b3cc1a24bd02b451b54cbb10cd6f2ba46c14b1", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.10/slf4j-api-1.7.10.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.11": { + "layout": "org/slf4j/slf4j-api/1.7.11/slf4j-api-1.7.11.pom", + "sha256": "fc10dbe561cab3bf17b93f16628203544bfe26c1299705c7956df82a1034ffdb", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.11/slf4j-api-1.7.11.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.12": { + "layout": "org/slf4j/slf4j-api/1.7.12/slf4j-api-1.7.12.pom", + "sha256": "5e81ff69125e5ecddbbbee5cee28aa5d65cfcb366ed66501bc678eb9dd66a4fc", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.12/slf4j-api-1.7.12.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.13": { + "layout": "org/slf4j/slf4j-api/1.7.13/slf4j-api-1.7.13.pom", + "sha256": "5aab0ad8d0ee4933f57abdbedc8a3c2a1d6a5f7bd7135efbc407a29d8872abae", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.13/slf4j-api-1.7.13.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.14": { + "layout": "org/slf4j/slf4j-api/1.7.14/slf4j-api-1.7.14.pom", + "sha256": "91344d1ad9ca5cc3c838a071e49b5eef088fd053db091c3489a435065a5b02dc", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.14/slf4j-api-1.7.14.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.15": { + "layout": "org/slf4j/slf4j-api/1.7.15/slf4j-api-1.7.15.pom", + "sha256": "20f01dcf0712975bc3ac14c3666e20469a5389e97674c6eb45e2e6d0ab9ef300", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.15/slf4j-api-1.7.15.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.16": { + "layout": "org/slf4j/slf4j-api/1.7.16/slf4j-api-1.7.16.pom", + "sha256": "ac627333a3a1ea5d2880e9a099d805d240b12263b2b0a58295cac46732bc529b", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.16/slf4j-api-1.7.16.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.18": { + "layout": "org/slf4j/slf4j-api/1.7.18/slf4j-api-1.7.18.pom", + "sha256": "ac80a2f62aef13ce3d07d216e7ade8c8c3e56d45d1d015bab673d449195b6a9f", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.18/slf4j-api-1.7.18.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.19": { + "layout": "org/slf4j/slf4j-api/1.7.19/slf4j-api-1.7.19.pom", + "sha256": "92df29f5fb61808ca8cdf3fc8c74d7df80b24bc7b14d79a7e39d15112e509c15", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.19/slf4j-api-1.7.19.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.2": { + "layout": "org/slf4j/slf4j-api/1.7.2/slf4j-api-1.7.2.pom", + "sha256": "2eaca71afe0a1516f4abd8e9ff907838d268f38c81c3a542cce8d7f3b87c5d4c", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.2/slf4j-api-1.7.2.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.20": { + "layout": "org/slf4j/slf4j-api/1.7.20/slf4j-api-1.7.20.pom", + "sha256": "7b8b2a869b448c87a185aeae38e86f6e611677ee99d4ef8422098f5255ec5d35", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.20/slf4j-api-1.7.20.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.21": { + "layout": "org/slf4j/slf4j-api/1.7.21/slf4j-api-1.7.21.pom", + "sha256": "5f2bdf26ffa2dae84af2c9df8438532a6c703a5e9f7f998e9b1cd600fdd54fdc", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.21/slf4j-api-1.7.21.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.22": { + "layout": "org/slf4j/slf4j-api/1.7.22/slf4j-api-1.7.22.pom", + "sha256": "71e0385f64d410a2ccb8cd31abff64d7b44241c4214a8a5ee3a51d8ba94f2da2", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.22/slf4j-api-1.7.22.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.23": { + "layout": "org/slf4j/slf4j-api/1.7.23/slf4j-api-1.7.23.pom", + "sha256": "d3527b9a8db91755b498c6326a605cf196b59589ae76ee823f5b774713e1e917", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.23/slf4j-api-1.7.23.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.24": { + "layout": "org/slf4j/slf4j-api/1.7.24/slf4j-api-1.7.24.pom", + "sha256": "d7a15266d22fadc1c81bbc3b62637801d116e484eb6c831217dc36c537be48d2", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.24/slf4j-api-1.7.24.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.25": { + "layout": "org/slf4j/slf4j-api/1.7.25/slf4j-api-1.7.25.pom", + "sha256": "7cd9d7a0b5d93dfd461a148891b43509cf403a9c7f9fb49060d3554df1c81e1e", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.25/slf4j-api-1.7.25.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.28": { + "layout": "org/slf4j/slf4j-api/1.7.28/slf4j-api-1.7.28.pom", + "sha256": "61f10feac576665b68caa6170cd423e8fb00055f1fad7ad9d7de2150e5f15caa", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.28/slf4j-api-1.7.28.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.3": { + "layout": "org/slf4j/slf4j-api/1.7.3/slf4j-api-1.7.3.pom", + "sha256": "14b460d975c6c8e48c26688a449719522b970d30675a36246d727cf3ad4283e8", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.3/slf4j-api-1.7.3.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.32": { + "layout": "org/slf4j/slf4j-api/1.7.32/slf4j-api-1.7.32.pom", + "sha256": "001cde5b3c6ba91070425cfe9f2e695e4aeb8bc290a2d4cd96531127ab244fe5", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.32/slf4j-api-1.7.32.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.36": { + "layout": "org/slf4j/slf4j-api/1.7.36/slf4j-api-1.7.36.pom", + "sha256": "fb046a9c229437928bb11c2d27c8b5d773eb8a25e60cbd253d985210dedc2684", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.36/slf4j-api-1.7.36.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.4": { + "layout": "org/slf4j/slf4j-api/1.7.4/slf4j-api-1.7.4.pom", + "sha256": "850339608f1a2577539e58aa9e1421fd97c35438aaabf85c5789061766f4d7c1", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.4/slf4j-api-1.7.4.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.5": { + "layout": "org/slf4j/slf4j-api/1.7.5/slf4j-api-1.7.5.pom", + "sha256": "afaf8e74019b230d3f56fdd7c93fb1070c0dca34f3d2d5ab5dea9fc616bd5ca4", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.5/slf4j-api-1.7.5.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.6": { + "layout": "org/slf4j/slf4j-api/1.7.6/slf4j-api-1.7.6.pom", + "sha256": "7a9e7276c1d353d2b2014bb4ee20446ef016a99e72d9b7b7ac3b380cbd0c3685", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.6/slf4j-api-1.7.6.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.7": { + "layout": "org/slf4j/slf4j-api/1.7.7/slf4j-api-1.7.7.pom", + "sha256": "353904a7a6c28304d0dcac80fad30c48cd898f6db03b93f05ad6c90dd42d98ad", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.7/slf4j-api-1.7.7.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.8": { + "layout": "org/slf4j/slf4j-api/1.7.8/slf4j-api-1.7.8.pom", + "sha256": "6014130be2a2b7638b25c1d4a098c975311fc032df075f5279517fc9bb597c34", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.8/slf4j-api-1.7.8.pom" + }, + "org.slf4j:slf4j-api:pom:1.7.9": { + "layout": "org/slf4j/slf4j-api/1.7.9/slf4j-api-1.7.9.pom", + "sha256": "6a80b1821e5d6956cf78ec4c0ce3c640c7f1141edd4885fae3e0b060797cca01", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/1.7.9/slf4j-api-1.7.9.pom" + }, + "org.slf4j:slf4j-jdk14:pom:1.5.6": { + "layout": "org/slf4j/slf4j-jdk14/1.5.6/slf4j-jdk14-1.5.6.pom", + "sha256": "85f97344eeeed2714eda6f800aa712c1aa7405b0d7f98e207499363f82f37eec", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-jdk14/1.5.6/slf4j-jdk14-1.5.6.pom" + }, + "org.slf4j:slf4j-log4j12:jar:1.7.28": { + "layout": "org/slf4j/slf4j-log4j12/1.7.28/slf4j-log4j12-1.7.28.jar", + "sha256": "ad926e9a170d96519eab7553c94462d7849d55c4886c9b5d386782090e321fd6", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-log4j12/1.7.28/slf4j-log4j12-1.7.28.jar" + }, + "org.slf4j:slf4j-log4j12:pom:1.7.28": { + "layout": "org/slf4j/slf4j-log4j12/1.7.28/slf4j-log4j12-1.7.28.pom", + "sha256": "573ab7568bfd4b9a994958059d93c4bce396b90afbedc7bdcfd617fdf98d9716", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-log4j12/1.7.28/slf4j-log4j12-1.7.28.pom" + }, + "org.slf4j:slf4j-parent:pom:1.5.6": { + "layout": "org/slf4j/slf4j-parent/1.5.6/slf4j-parent-1.5.6.pom", + "sha256": "b9d17d6f915b389c7dd77f170d6fcc77f1c7d6c7362fefb146043d8412defddd", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.5.6/slf4j-parent-1.5.6.pom" + }, + "org.slf4j:slf4j-parent:pom:1.6.1": { + "layout": "org/slf4j/slf4j-parent/1.6.1/slf4j-parent-1.6.1.pom", + "sha256": "34d6e74c1f1658771b0ae6a97a454a3ad208e4edba3a2d4ba00550e6f45ff702", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.6.1/slf4j-parent-1.6.1.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.0": { + "layout": "org/slf4j/slf4j-parent/1.7.0/slf4j-parent-1.7.0.pom", + "sha256": "89365fe9d64ec6a12cd08d71cceb239ab738403e69bbd49e0e80aff3e6c91e64", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.0/slf4j-parent-1.7.0.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.1": { + "layout": "org/slf4j/slf4j-parent/1.7.1/slf4j-parent-1.7.1.pom", + "sha256": "749d92147c32c4d745c3f32fcecda9f17e0e8e5b2baaa6d5a3c992dad732edb3", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.1/slf4j-parent-1.7.1.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.10": { + "layout": "org/slf4j/slf4j-parent/1.7.10/slf4j-parent-1.7.10.pom", + "sha256": "1abee7f5182fb79b4926fb64658af8c3248ed6b374f9ac7da1fd9e8b9197e2ce", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.10/slf4j-parent-1.7.10.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.11": { + "layout": "org/slf4j/slf4j-parent/1.7.11/slf4j-parent-1.7.11.pom", + "sha256": "293d46a77ee9a506de56d78937b194bee49ec482172399c8ca2e29b1ea4d0b1d", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.11/slf4j-parent-1.7.11.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.12": { + "layout": "org/slf4j/slf4j-parent/1.7.12/slf4j-parent-1.7.12.pom", + "sha256": "46230ffafa0cfa4b7dc50f31b13cd6b468c37fe8bc32fe7f8418b180cfee921f", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.12/slf4j-parent-1.7.12.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.13": { + "layout": "org/slf4j/slf4j-parent/1.7.13/slf4j-parent-1.7.13.pom", + "sha256": "1205f1d1c1b7ab4c2cedf7a49d455083feead2983d4f1e66d9207450cdac6318", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.13/slf4j-parent-1.7.13.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.14": { + "layout": "org/slf4j/slf4j-parent/1.7.14/slf4j-parent-1.7.14.pom", + "sha256": "90c0cb09ff26f42fb86e3c827c23194b4b25ffefdaa2e0d86aee23077b290a9f", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.14/slf4j-parent-1.7.14.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.15": { + "layout": "org/slf4j/slf4j-parent/1.7.15/slf4j-parent-1.7.15.pom", + "sha256": "721584ed3580963208eb291753aa6d3156b4901c63ed1fd76bc5e78ceb9d11ad", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.15/slf4j-parent-1.7.15.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.16": { + "layout": "org/slf4j/slf4j-parent/1.7.16/slf4j-parent-1.7.16.pom", + "sha256": "4d89aff01688aee32594237f63ce2bef60d332d44ea696483af664c14732afde", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.16/slf4j-parent-1.7.16.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.18": { + "layout": "org/slf4j/slf4j-parent/1.7.18/slf4j-parent-1.7.18.pom", + "sha256": "55452947cfa57470dcff558baed441f0922f0dbb213b044ff6846968d6ec5a3c", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.18/slf4j-parent-1.7.18.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.19": { + "layout": "org/slf4j/slf4j-parent/1.7.19/slf4j-parent-1.7.19.pom", + "sha256": "75f0c9ba46b2ac29c3a3f41c2109f50d24ff0617d7c81afdaafbce18458665c1", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.19/slf4j-parent-1.7.19.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.2": { + "layout": "org/slf4j/slf4j-parent/1.7.2/slf4j-parent-1.7.2.pom", + "sha256": "1d8e084a6f2384ade42685332b52a0ece090478641dc14c0fa8c52e1e2984425", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.2/slf4j-parent-1.7.2.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.20": { + "layout": "org/slf4j/slf4j-parent/1.7.20/slf4j-parent-1.7.20.pom", + "sha256": "85392db1df1321125708d25080bacaeafc33a084bd71584367b23808a9c2a4c4", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.20/slf4j-parent-1.7.20.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.21": { + "layout": "org/slf4j/slf4j-parent/1.7.21/slf4j-parent-1.7.21.pom", + "sha256": "7aaa257f5ded516c93c07586c1ed139ed50ee5a4ff619e6f58bba17f49d13bae", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.21/slf4j-parent-1.7.21.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.22": { + "layout": "org/slf4j/slf4j-parent/1.7.22/slf4j-parent-1.7.22.pom", + "sha256": "c0628634dd85a2892e421533c54bc083302b6e77369bb90418ad5852db422425", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.22/slf4j-parent-1.7.22.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.23": { + "layout": "org/slf4j/slf4j-parent/1.7.23/slf4j-parent-1.7.23.pom", + "sha256": "db43711d353748269e4c28730731cd6dc44a5d9781996f34d95fe1e359d45ef1", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.23/slf4j-parent-1.7.23.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.24": { + "layout": "org/slf4j/slf4j-parent/1.7.24/slf4j-parent-1.7.24.pom", + "sha256": "36367d127a586d502b60bf54db2d7397ec94573620eff523873ec12a12e54839", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.24/slf4j-parent-1.7.24.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.25": { + "layout": "org/slf4j/slf4j-parent/1.7.25/slf4j-parent-1.7.25.pom", + "sha256": "18f5c52120db036e88d6136f8839c832d074bdda95c756c6f429249d2db54ac6", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.25/slf4j-parent-1.7.25.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.28": { + "layout": "org/slf4j/slf4j-parent/1.7.28/slf4j-parent-1.7.28.pom", + "sha256": "919b5f42dde33ace036865d1e2b292d98a0627417ff756e0287f4a56ad3e544e", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.28/slf4j-parent-1.7.28.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.3": { + "layout": "org/slf4j/slf4j-parent/1.7.3/slf4j-parent-1.7.3.pom", + "sha256": "6403f6016e1b0330a87fd655eda5841823e64f97cbee71570a64163c5b677c18", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.3/slf4j-parent-1.7.3.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.32": { + "layout": "org/slf4j/slf4j-parent/1.7.32/slf4j-parent-1.7.32.pom", + "sha256": "5ab349d0f4c7bc08ed0ef1f4d9386cb1940a2f4d6f152150e16dbbecc0b83c70", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.32/slf4j-parent-1.7.32.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.36": { + "layout": "org/slf4j/slf4j-parent/1.7.36/slf4j-parent-1.7.36.pom", + "sha256": "bb388d37fbcdd3cde64c3cede21838693218dc451f04040c5df360a78ed7e812", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.36/slf4j-parent-1.7.36.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.4": { + "layout": "org/slf4j/slf4j-parent/1.7.4/slf4j-parent-1.7.4.pom", + "sha256": "fced87c3204ff000a6de0da0757ca3ca033718fef4c9195c6cd40c6ab97c4802", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.4/slf4j-parent-1.7.4.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.5": { + "layout": "org/slf4j/slf4j-parent/1.7.5/slf4j-parent-1.7.5.pom", + "sha256": "c43bc5a022dbfd9de82be232dffe46208cbc7de12c14385b5da824e331e535bb", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.5/slf4j-parent-1.7.5.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.6": { + "layout": "org/slf4j/slf4j-parent/1.7.6/slf4j-parent-1.7.6.pom", + "sha256": "69d805262ede4eac3c0f15825c8b1343a6f741ad96892c3c4f04f196d84fe206", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.6/slf4j-parent-1.7.6.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.7": { + "layout": "org/slf4j/slf4j-parent/1.7.7/slf4j-parent-1.7.7.pom", + "sha256": "1dffae3ce768d0547e261c96898cf5d9d194bffd5600f597c9751cc6a73d33d4", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.7/slf4j-parent-1.7.7.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.8": { + "layout": "org/slf4j/slf4j-parent/1.7.8/slf4j-parent-1.7.8.pom", + "sha256": "e23b32b789a09a72938a8198c65ced598f74e59cb36948bac341a8c0b299d98a", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.8/slf4j-parent-1.7.8.pom" + }, + "org.slf4j:slf4j-parent:pom:1.7.9": { + "layout": "org/slf4j/slf4j-parent/1.7.9/slf4j-parent-1.7.9.pom", + "sha256": "3c3a4f5786c75b8df8ef1addce66ccb4bb8bd2ff874069e0e6c85d4b4576a748", + "url": "https://repo.maven.apache.org/maven2/org/slf4j/slf4j-parent/1.7.9/slf4j-parent-1.7.9.pom" + }, + "org.sonatype.aether:aether-api:jar:1.7": { + "layout": "org/sonatype/aether/aether-api/1.7/aether-api-1.7.jar", + "sha256": "1c5c5ac5e8f29aefc8faa051ffa14eccd85b9e20f4bb35dc82fba7d5da50d326", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/aether/aether-api/1.7/aether-api-1.7.jar" + }, + "org.sonatype.aether:aether-api:pom:1.7": { + "layout": "org/sonatype/aether/aether-api/1.7/aether-api-1.7.pom", + "sha256": "e855b04820e58822bda1ab448f7b29e2fccf363f1b2ca95c8c05f2d625b28928", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/aether/aether-api/1.7/aether-api-1.7.pom" + }, + "org.sonatype.aether:aether-impl:jar:1.7": { + "layout": "org/sonatype/aether/aether-impl/1.7/aether-impl-1.7.jar", + "sha256": "288149850d8d131763df4151f7e443fd2739e48510a6e4cfe49ca082c76130fa", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/aether/aether-impl/1.7/aether-impl-1.7.jar" + }, + "org.sonatype.aether:aether-impl:pom:1.7": { + "layout": "org/sonatype/aether/aether-impl/1.7/aether-impl-1.7.pom", + "sha256": "0cf0bc1966c54645ed9702538158cc4a363861905470991616f4dabd4030e851", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/aether/aether-impl/1.7/aether-impl-1.7.pom" + }, + "org.sonatype.aether:aether-parent:pom:1.7": { + "layout": "org/sonatype/aether/aether-parent/1.7/aether-parent-1.7.pom", + "sha256": "29004012161043936443d59574924e0406a2326f53943f02eca7944b33c169df", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/aether/aether-parent/1.7/aether-parent-1.7.pom" + }, + "org.sonatype.aether:aether-spi:jar:1.7": { + "layout": "org/sonatype/aether/aether-spi/1.7/aether-spi-1.7.jar", + "sha256": "f54a0a28ce3d62af0e1cfe41dde616f645c28e452e77f77b78bc36e74d5e1a69", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/aether/aether-spi/1.7/aether-spi-1.7.jar" + }, + "org.sonatype.aether:aether-spi:pom:1.7": { + "layout": "org/sonatype/aether/aether-spi/1.7/aether-spi-1.7.pom", + "sha256": "a5a8a19df914af051d29eeb4084189a118c8c301054df41472d9f180ddcc6747", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/aether/aether-spi/1.7/aether-spi-1.7.pom" + }, + "org.sonatype.aether:aether-util:jar:1.7": { + "layout": "org/sonatype/aether/aether-util/1.7/aether-util-1.7.jar", + "sha256": "ff690ffc550b7ada3a4b79ef4ca89bf002b24f43a13a35d10195c3bba63d7654", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/aether/aether-util/1.7/aether-util-1.7.jar" + }, + "org.sonatype.aether:aether-util:pom:1.7": { + "layout": "org/sonatype/aether/aether-util/1.7/aether-util-1.7.pom", + "sha256": "0342bdcbd23208534dde58819ddf937aabbe3d61a47231ffb06632fb47dd2657", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/aether/aether-util/1.7/aether-util-1.7.pom" + }, + "org.sonatype.forge:forge-parent:pom:10": { + "layout": "org/sonatype/forge/forge-parent/10/forge-parent-10.pom", + "sha256": "c14fb9c32b59cc03251f609416db7c0cff01f811edcccb4f6a865d6e7046bd0b", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/forge/forge-parent/10/forge-parent-10.pom" + }, + "org.sonatype.forge:forge-parent:pom:3": { + "layout": "org/sonatype/forge/forge-parent/3/forge-parent-3.pom", + "sha256": "03263b68791fb11e7464ffcc2c3de7eaeae235de8c94827ce6407e0454d2aae9", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/forge/forge-parent/3/forge-parent-3.pom" + }, + "org.sonatype.forge:forge-parent:pom:4": { + "layout": "org/sonatype/forge/forge-parent/4/forge-parent-4.pom", + "sha256": "1838d132479005b4b7459b798e9d9915515090c288082fdcd86db0b10983a24c", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/forge/forge-parent/4/forge-parent-4.pom" + }, + "org.sonatype.forge:forge-parent:pom:5": { + "layout": "org/sonatype/forge/forge-parent/5/forge-parent-5.pom", + "sha256": "e56188aa8ce51278006aa90bc7e0f304a81e2f1219f462e7d21f262535cd2795", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/forge/forge-parent/5/forge-parent-5.pom" + }, + "org.sonatype.forge:forge-parent:pom:6": { + "layout": "org/sonatype/forge/forge-parent/6/forge-parent-6.pom", + "sha256": "9c5f7cd5226ac8c3798cb1f800c031f7dedc1606dc50dc29567877c8224459a7", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/forge/forge-parent/6/forge-parent-6.pom" + }, + "org.sonatype.oss:oss-parent:pom:5": { + "layout": "org/sonatype/oss/oss-parent/5/oss-parent-5.pom", + "sha256": "1678d4120a585d8a630131aeec4c524d928398583b7eab616ee7d5a87f520d3d", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/oss/oss-parent/5/oss-parent-5.pom" + }, + "org.sonatype.oss:oss-parent:pom:6": { + "layout": "org/sonatype/oss/oss-parent/6/oss-parent-6.pom", + "sha256": "b4306d13e8f5392458a1b30866f1cff161b3d2e6999a88d059eea3932c8a8499", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/oss/oss-parent/6/oss-parent-6.pom" + }, + "org.sonatype.oss:oss-parent:pom:7": { + "layout": "org/sonatype/oss/oss-parent/7/oss-parent-7.pom", + "sha256": "b51f8867c92b6a722499557fc3a1fdea77bdf9ef574722fe90ce436a29559454", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/oss/oss-parent/7/oss-parent-7.pom" + }, + "org.sonatype.oss:oss-parent:pom:9": { + "layout": "org/sonatype/oss/oss-parent/9/oss-parent-9.pom", + "sha256": "fb40265f982548212ff82e362e59732b2187ec6f0d80182885c14ef1f982827a", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/oss/oss-parent/9/oss-parent-9.pom" + }, + "org.sonatype.plexus:plexus-build-api:jar:0.0.4": { + "layout": "org/sonatype/plexus/plexus-build-api/0.0.4/plexus-build-api-0.0.4.jar", + "sha256": "d2d415ba26078a84e97816fd444361def86dec65a23b4278d95cfb1c285f2649", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/plexus/plexus-build-api/0.0.4/plexus-build-api-0.0.4.jar" + }, + "org.sonatype.plexus:plexus-build-api:pom:0.0.4": { + "layout": "org/sonatype/plexus/plexus-build-api/0.0.4/plexus-build-api-0.0.4.pom", + "sha256": "9bd824511766b9f512d7badbf24add39ece050c75e07d6cacc954517f49ea465", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/plexus/plexus-build-api/0.0.4/plexus-build-api-0.0.4.pom" + }, + "org.sonatype.plexus:plexus-cipher:jar:1.4": { + "layout": "org/sonatype/plexus/plexus-cipher/1.4/plexus-cipher-1.4.jar", + "sha256": "5a15fdba22669e0fdd06e10dcce6320879e1f7398fbc910cd0677b50672a78c4", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/plexus/plexus-cipher/1.4/plexus-cipher-1.4.jar" + }, + "org.sonatype.plexus:plexus-cipher:pom:1.4": { + "layout": "org/sonatype/plexus/plexus-cipher/1.4/plexus-cipher-1.4.pom", + "sha256": "a63a2e23988cca7fac6c93886d6f0506fd26d88d7e8cc0cb89b9c6e0d6c994ad", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/plexus/plexus-cipher/1.4/plexus-cipher-1.4.pom" + }, + "org.sonatype.plexus:plexus-sec-dispatcher:jar:1.3": { + "layout": "org/sonatype/plexus/plexus-sec-dispatcher/1.3/plexus-sec-dispatcher-1.3.jar", + "sha256": "3b0559bb8432f28937efe6ca193ef54a8506d0075d73fd7406b9b116c6a11063", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/plexus/plexus-sec-dispatcher/1.3/plexus-sec-dispatcher-1.3.jar" + }, + "org.sonatype.plexus:plexus-sec-dispatcher:pom:1.3": { + "layout": "org/sonatype/plexus/plexus-sec-dispatcher/1.3/plexus-sec-dispatcher-1.3.pom", + "sha256": "d5e650c50ef6958c028ed024b59af04cf3d38e1453a77d542b6b484bc0f4ca0b", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/plexus/plexus-sec-dispatcher/1.3/plexus-sec-dispatcher-1.3.pom" + }, + "org.sonatype.sisu.inject:guice-bean:pom:1.4.2": { + "layout": "org/sonatype/sisu/inject/guice-bean/1.4.2/guice-bean-1.4.2.pom", + "sha256": "d2ee7efbcdc82206c69559548aef86a99add95378f03cc58b4d9696b3969c8bb", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/sisu/inject/guice-bean/1.4.2/guice-bean-1.4.2.pom" + }, + "org.sonatype.sisu.inject:guice-bean:pom:1.4.3.2": { + "layout": "org/sonatype/sisu/inject/guice-bean/1.4.3.2/guice-bean-1.4.3.2.pom", + "sha256": "30f0583957412e83d41f242e1acccea667441b4d2ef6a2373dd6e2cb9417cbed", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/sisu/inject/guice-bean/1.4.3.2/guice-bean-1.4.3.2.pom" + }, + "org.sonatype.sisu.inject:guice-parent:pom:2.9.2": { + "layout": "org/sonatype/sisu/inject/guice-parent/2.9.2/guice-parent-2.9.2.pom", + "sha256": "58b60eec5bdb7df503e8b61b96ff3655dfc66726cfa0d2e28ace213bcfe27690", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/sisu/inject/guice-parent/2.9.2/guice-parent-2.9.2.pom" + }, + "org.sonatype.sisu.inject:guice-plexus:pom:1.4.2": { + "layout": "org/sonatype/sisu/inject/guice-plexus/1.4.2/guice-plexus-1.4.2.pom", + "sha256": "13a66ca6e6ad1a186076513eea822db2c3c0e460a983a0a31f4d937de336ad98", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/sisu/inject/guice-plexus/1.4.2/guice-plexus-1.4.2.pom" + }, + "org.sonatype.sisu.inject:guice-plexus:pom:1.4.3.2": { + "layout": "org/sonatype/sisu/inject/guice-plexus/1.4.3.2/guice-plexus-1.4.3.2.pom", + "sha256": "c9ab6c98a083cafaecb31e5edb0eb2fd5a282776049613b3207779b1c0700c4b", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/sisu/inject/guice-plexus/1.4.3.2/guice-plexus-1.4.3.2.pom" + }, + "org.sonatype.sisu:sisu-guice:jar:no_aop:2.9.2": { + "layout": "org/sonatype/sisu/sisu-guice/2.9.2/sisu-guice-2.9.2-no_aop.jar", + "sha256": "2c8cb09fdbf5ae33c7fbf1634f14ab69eded9c6c501b4656136c32caecc2944b", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/sisu/sisu-guice/2.9.2/sisu-guice-2.9.2-no_aop.jar" + }, + "org.sonatype.sisu:sisu-guice:jar:noaop:2.1.7": { + "layout": "org/sonatype/sisu/sisu-guice/2.1.7/sisu-guice-2.1.7-noaop.jar", + "sha256": "240113a2f22fd1f0b182b32baecf0e7876b3a8e41f3c4da3335eeb9ffb24b9f4", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/sisu/sisu-guice/2.1.7/sisu-guice-2.1.7-noaop.jar" + }, + "org.sonatype.sisu:sisu-guice:pom:2.1.7": { + "layout": "org/sonatype/sisu/sisu-guice/2.1.7/sisu-guice-2.1.7.pom", + "sha256": "2b3f02f2d0ec3e95884f9ab415596ce627492469c2d8fd75e3fb00fb69532c44", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/sisu/sisu-guice/2.1.7/sisu-guice-2.1.7.pom" + }, + "org.sonatype.sisu:sisu-guice:pom:2.9.2": { + "layout": "org/sonatype/sisu/sisu-guice/2.9.2/sisu-guice-2.9.2.pom", + "sha256": "38e47a7b42e8b8b71890cbf29e8f686bc4372b261d258bcffdb5d99c8cfd1322", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/sisu/sisu-guice/2.9.2/sisu-guice-2.9.2.pom" + }, + "org.sonatype.sisu:sisu-inject-bean:jar:1.4.2": { + "layout": "org/sonatype/sisu/sisu-inject-bean/1.4.2/sisu-inject-bean-1.4.2.jar", + "sha256": "fb3160e1e3a7852b441016dbcc97a34e3cf4eeb8ceb9e82edf2729439858f080", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/sisu/sisu-inject-bean/1.4.2/sisu-inject-bean-1.4.2.jar" + }, + "org.sonatype.sisu:sisu-inject-bean:jar:1.4.3.2": { + "layout": "org/sonatype/sisu/sisu-inject-bean/1.4.3.2/sisu-inject-bean-1.4.3.2.jar", + "sha256": "1c9e4ed96eb7c5ae9f9cae9eddda0e71f387dcb48ef87ba317ca1f31fca44d6b", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/sisu/sisu-inject-bean/1.4.3.2/sisu-inject-bean-1.4.3.2.jar" + }, + "org.sonatype.sisu:sisu-inject-bean:pom:1.4.2": { + "layout": "org/sonatype/sisu/sisu-inject-bean/1.4.2/sisu-inject-bean-1.4.2.pom", + "sha256": "06d75dd6f2a0dc9ea6bf73a67491ba4790f92251c654bf4925511e5e4f48f1df", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/sisu/sisu-inject-bean/1.4.2/sisu-inject-bean-1.4.2.pom" + }, + "org.sonatype.sisu:sisu-inject-bean:pom:1.4.3.2": { + "layout": "org/sonatype/sisu/sisu-inject-bean/1.4.3.2/sisu-inject-bean-1.4.3.2.pom", + "sha256": "441c5ddd03404d01df6cd18c1c4a6305f073f000bd2046e694d45545b9babf39", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/sisu/sisu-inject-bean/1.4.3.2/sisu-inject-bean-1.4.3.2.pom" + }, + "org.sonatype.sisu:sisu-inject-plexus:jar:1.4.2": { + "layout": "org/sonatype/sisu/sisu-inject-plexus/1.4.2/sisu-inject-plexus-1.4.2.jar", + "sha256": "a65e27aefbe74102d73cd7e3c5c7637021d294a9e7f33132f3c782a76714d0a3", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/sisu/sisu-inject-plexus/1.4.2/sisu-inject-plexus-1.4.2.jar" + }, + "org.sonatype.sisu:sisu-inject-plexus:jar:1.4.3.2": { + "layout": "org/sonatype/sisu/sisu-inject-plexus/1.4.3.2/sisu-inject-plexus-1.4.3.2.jar", + "sha256": "28ae3447e5dc7e3059b039bdff80d6e7e58c0b9af71c5bf19141bce2daae009f", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/sisu/sisu-inject-plexus/1.4.3.2/sisu-inject-plexus-1.4.3.2.jar" + }, + "org.sonatype.sisu:sisu-inject-plexus:pom:1.4.2": { + "layout": "org/sonatype/sisu/sisu-inject-plexus/1.4.2/sisu-inject-plexus-1.4.2.pom", + "sha256": "e302200cf462cf1af9f3e870738253cdf90d7abc8279b9d3b507a5d0d3b9f289", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/sisu/sisu-inject-plexus/1.4.2/sisu-inject-plexus-1.4.2.pom" + }, + "org.sonatype.sisu:sisu-inject-plexus:pom:1.4.3.2": { + "layout": "org/sonatype/sisu/sisu-inject-plexus/1.4.3.2/sisu-inject-plexus-1.4.3.2.pom", + "sha256": "27116ec8690e2e3a35c95bc9f00230fc755fc519b1cc116c641c0a836b8f86df", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/sisu/sisu-inject-plexus/1.4.3.2/sisu-inject-plexus-1.4.3.2.pom" + }, + "org.sonatype.sisu:sisu-inject:pom:1.4.2": { + "layout": "org/sonatype/sisu/sisu-inject/1.4.2/sisu-inject-1.4.2.pom", + "sha256": "a5991ead85259ba9f8c985d194aace3b069e14bcd8cde68fce928223714d3968", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/sisu/sisu-inject/1.4.2/sisu-inject-1.4.2.pom" + }, + "org.sonatype.sisu:sisu-inject:pom:1.4.3.2": { + "layout": "org/sonatype/sisu/sisu-inject/1.4.3.2/sisu-inject-1.4.3.2.pom", + "sha256": "8f39a38a7b8d8792b5826b0d7154fe489807101a77254724607e46dae342a32a", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/sisu/sisu-inject/1.4.3.2/sisu-inject-1.4.3.2.pom" + }, + "org.sonatype.sisu:sisu-parent:pom:1.4.2": { + "layout": "org/sonatype/sisu/sisu-parent/1.4.2/sisu-parent-1.4.2.pom", + "sha256": "abb04084d0885319fd0b372d77655f8feb8aa8bb091699fcd99b45798a9587d5", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/sisu/sisu-parent/1.4.2/sisu-parent-1.4.2.pom" + }, + "org.sonatype.sisu:sisu-parent:pom:1.4.3.2": { + "layout": "org/sonatype/sisu/sisu-parent/1.4.3.2/sisu-parent-1.4.3.2.pom", + "sha256": "405d910ae065e717c48de9d29fd602d5bc2e9834f54ed62c990eef57c93778ec", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/sisu/sisu-parent/1.4.3.2/sisu-parent-1.4.3.2.pom" + }, + "org.sonatype.spice:spice-parent:pom:10": { + "layout": "org/sonatype/spice/spice-parent/10/spice-parent-10.pom", + "sha256": "683c012c6bf5e1e31b232b3755c027ccd829692a09c8216652b414b09d4ae623", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/spice/spice-parent/10/spice-parent-10.pom" + }, + "org.sonatype.spice:spice-parent:pom:12": { + "layout": "org/sonatype/spice/spice-parent/12/spice-parent-12.pom", + "sha256": "21a19b26dbe5c38ddb5114cf4eadbf5ccb411bc6b128fdd5949b1ccb12f3683e", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/spice/spice-parent/12/spice-parent-12.pom" + }, + "org.sonatype.spice:spice-parent:pom:16": { + "layout": "org/sonatype/spice/spice-parent/16/spice-parent-16.pom", + "sha256": "258f43b5e805687302a5bb400856b972a573ec42a44f949641354a84b9243758", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/spice/spice-parent/16/spice-parent-16.pom" + }, + "org.sonatype.spice:spice-parent:pom:17": { + "layout": "org/sonatype/spice/spice-parent/17/spice-parent-17.pom", + "sha256": "9151f9a5b33ec36ee8778842fc56144fb0242d39cbcc42061b053b8909969bdf", + "url": "https://repo.maven.apache.org/maven2/org/sonatype/spice/spice-parent/17/spice-parent-17.pom" + }, + "org.tukaani:xz:jar:1.6": { + "layout": "org/tukaani/xz/1.6/xz-1.6.jar", + "sha256": "a594643d73cc01928cf6ca5ce100e094ea9d73af760a5d4fb6b75fa673ecec96", + "url": "https://repo.maven.apache.org/maven2/org/tukaani/xz/1.6/xz-1.6.jar" + }, + "org.tukaani:xz:pom:1.6": { + "layout": "org/tukaani/xz/1.6/xz-1.6.pom", + "sha256": "06843f984cf0ad3ecd4196861404ddc4af83ec37a1eb7a1773ab89db02b4020f", + "url": "https://repo.maven.apache.org/maven2/org/tukaani/xz/1.6/xz-1.6.pom" + }, + "org.vafer:jdependency:jar:2.8.0": { + "layout": "org/vafer/jdependency/2.8.0/jdependency-2.8.0.jar", + "sha256": "bfd2cc7e1bfc78aa83b44c0a54bf2cde38a4382ec24728af683d98dc6be78192", + "url": "https://repo.maven.apache.org/maven2/org/vafer/jdependency/2.8.0/jdependency-2.8.0.jar" + }, + "org.vafer:jdependency:pom:2.8.0": { + "layout": "org/vafer/jdependency/2.8.0/jdependency-2.8.0.pom", + "sha256": "101867f3f9e92657a2ef89a310b604d43d090c9b90aa3e0b052dcd16a3bbf32d", + "url": "https://repo.maven.apache.org/maven2/org/vafer/jdependency/2.8.0/jdependency-2.8.0.pom" + }, + "oro:oro:jar:2.0.7": { + "layout": "oro/oro/2.0.7/oro-2.0.7.jar", + "sha256": "36ae6fff1e3ace81739ab168ebc2374513b55965c7c6f7c2ebaeadc2f193f68b", + "url": "https://repo.maven.apache.org/maven2/oro/oro/2.0.7/oro-2.0.7.jar" + }, + "oro:oro:jar:2.0.8": { + "layout": "oro/oro/2.0.8/oro-2.0.8.jar", + "sha256": "e00ccdad5df7eb43fdee44232ef64602bf63807c2d133a7be83ba09fd49af26e", + "url": "https://repo.maven.apache.org/maven2/oro/oro/2.0.8/oro-2.0.8.jar" + }, + "oro:oro:pom:2.0.7": { + "layout": "oro/oro/2.0.7/oro-2.0.7.pom", + "sha256": "f230a1141e0db8f150a253c4b29071ad3a0d4791adba1f2a58fd8416286960a5", + "url": "https://repo.maven.apache.org/maven2/oro/oro/2.0.7/oro-2.0.7.pom" + }, + "oro:oro:pom:2.0.8": { + "layout": "oro/oro/2.0.8/oro-2.0.8.pom", + "sha256": "9aa9dfeb2e85e1d5e7932c87140697cecc2b0fadd933d679fd420a2e43831a82", + "url": "https://repo.maven.apache.org/maven2/oro/oro/2.0.8/oro-2.0.8.pom" + }, + "plexus:plexus-containers:pom:1.0.2": { + "layout": "plexus/plexus-containers/1.0.2/plexus-containers-1.0.2.pom", + "sha256": "131ed39a845f96761a46974a2d515235da6fa31eb78de8484466426a49504d16", + "url": "https://repo.maven.apache.org/maven2/plexus/plexus-containers/1.0.2/plexus-containers-1.0.2.pom" + }, + "plexus:plexus-root:pom:1.0.3": { + "layout": "plexus/plexus-root/1.0.3/plexus-root-1.0.3.pom", + "sha256": "45363cc49c9419edf74f9f927deca5d6b08668ed985544165984aea8984a32c5", + "url": "https://repo.maven.apache.org/maven2/plexus/plexus-root/1.0.3/plexus-root-1.0.3.pom" + }, + "plexus:plexus-utils:pom:1.0.2": { + "layout": "plexus/plexus-utils/1.0.2/plexus-utils-1.0.2.pom", + "sha256": "55e55cf810205c43996609a89ae14e0979ec9ccd0fad21cee7dcd6112bf139b3", + "url": "https://repo.maven.apache.org/maven2/plexus/plexus-utils/1.0.2/plexus-utils-1.0.2.pom" + }, + "sslext:sslext:jar:1.2-0": { + "layout": "sslext/sslext/1.2-0/sslext-1.2-0.jar", + "sha256": "4ec193f85bf3c5e84be4ef79fe1e8e71493b317858735cfe062c4c54f818c312", + "url": "https://repo.maven.apache.org/maven2/sslext/sslext/1.2-0/sslext-1.2-0.jar" + }, + "sslext:sslext:pom:1.2-0": { + "layout": "sslext/sslext/1.2-0/sslext-1.2-0.pom", + "sha256": "75929e166762dcef294281f216b2bdcf866bbbc7835a59be1e68bf7032de016a", + "url": "https://repo.maven.apache.org/maven2/sslext/sslext/1.2-0/sslext-1.2-0.pom" + }, + "velocity:velocity-dep:jar:1.4": { + "layout": "velocity/velocity-dep/1.4/velocity-dep-1.4.jar", + "sha256": "ab2aa30ec476779ae0a9a4cc299009ecb40468c175de296ab6a866bd1249597b", + "url": "https://repo.maven.apache.org/maven2/velocity/velocity-dep/1.4/velocity-dep-1.4.jar" + }, + "velocity:velocity-dep:pom:1.4": { + "layout": "velocity/velocity-dep/1.4/velocity-dep-1.4.pom", + "sha256": "1aedc28ee9805a7182a30a881514cef848eb1e7e7f9d5157d59ef0fa0c052eb2", + "url": "https://repo.maven.apache.org/maven2/velocity/velocity-dep/1.4/velocity-dep-1.4.pom" + }, + "velocity:velocity:jar:1.4": { + "layout": "velocity/velocity/1.4/velocity-1.4.jar", + "sha256": "ff9ea1a3ad582e58a40a7bf744661101aa1c1dcb9f4de6a64722ff5d09380853", + "url": "https://repo.maven.apache.org/maven2/velocity/velocity/1.4/velocity-1.4.jar" + }, + "velocity:velocity:pom:1.4": { + "layout": "velocity/velocity/1.4/velocity-1.4.pom", + "sha256": "ac4804914f9f061bbedce3a647d2892243e1501718bc5cd0e50f687b787f0f6b", + "url": "https://repo.maven.apache.org/maven2/velocity/velocity/1.4/velocity-1.4.pom" + }, + "xerces:xercesImpl:jar:2.9.1": { + "layout": "xerces/xercesImpl/2.9.1/xercesImpl-2.9.1.jar", + "sha256": "6ae540a7c85c814ac64bea48016b3a6f45c95d4765f547fcc0053dc36c94ed5c", + "url": "https://repo.maven.apache.org/maven2/xerces/xercesImpl/2.9.1/xercesImpl-2.9.1.jar" + }, + "xerces:xercesImpl:pom:2.9.1": { + "layout": "xerces/xercesImpl/2.9.1/xercesImpl-2.9.1.pom", + "sha256": "246900758da9d89b2dd857569fc39c78318304816681dca82b84b9e26f40ce47", + "url": "https://repo.maven.apache.org/maven2/xerces/xercesImpl/2.9.1/xercesImpl-2.9.1.pom" + }, + "xml-apis:xml-apis:jar:1.0.b2": { + "layout": "xml-apis/xml-apis/1.0.b2/xml-apis-1.0.b2.jar", + "sha256": "8232f3482c346d843e5e3fb361055771c1acc105b6d8a189eb9018c55948cf9f", + "url": "https://repo.maven.apache.org/maven2/xml-apis/xml-apis/1.0.b2/xml-apis-1.0.b2.jar" + }, + "xml-apis:xml-apis:jar:1.3.04": { + "layout": "xml-apis/xml-apis/1.3.04/xml-apis-1.3.04.jar", + "sha256": "d404aa881eb9c5f7a4fb546e84ea11506cd417a72b5972e88eff17f43f9f8a64", + "url": "https://repo.maven.apache.org/maven2/xml-apis/xml-apis/1.3.04/xml-apis-1.3.04.jar" + }, + "xml-apis:xml-apis:pom:1.0.b2": { + "layout": "xml-apis/xml-apis/1.0.b2/xml-apis-1.0.b2.pom", + "sha256": "7bc38e7a0f8ca20b0caed607e00cbb144dc8d006ebec4aa193f55dcf391bad50", + "url": "https://repo.maven.apache.org/maven2/xml-apis/xml-apis/1.0.b2/xml-apis-1.0.b2.pom" + }, + "xml-apis:xml-apis:pom:1.3.04": { + "layout": "xml-apis/xml-apis/1.3.04/xml-apis-1.3.04.pom", + "sha256": "35a1fd49d44b41c616d48ca99097a32efa2b64e1b3739fbac6fbf36e3c3b57b1", + "url": "https://repo.maven.apache.org/maven2/xml-apis/xml-apis/1.3.04/xml-apis-1.3.04.pom" + }, + "xml-apis:xml-apis:pom:2.0.2": { + "layout": "xml-apis/xml-apis/2.0.2/xml-apis-2.0.2.pom", + "sha256": "a902d962402c02f4c1a57c5a2303a608a8ac3aef5cd145e1980563447603d606", + "url": "https://repo.maven.apache.org/maven2/xml-apis/xml-apis/2.0.2/xml-apis-2.0.2.pom" + }, + "xmlunit:xmlunit:pom:1.5": { + "layout": "xmlunit/xmlunit/1.5/xmlunit-1.5.pom", + "sha256": "092e64f9d645761c83d9a11969f0233cb7ccf0fa7cf2804437595ec9ca113a1e", + "url": "https://repo.maven.apache.org/maven2/xmlunit/xmlunit/1.5/xmlunit-1.5.pom" + } + } +} diff --git a/nix/default.nix b/nix/default.nix new file mode 100644 index 000000000..12c398a98 --- /dev/null +++ b/nix/default.nix @@ -0,0 +1,14 @@ +{ + getJdk, + gitignoreSource, +}: final: prev: let + jdk = getJdk final.pkgs; + maven = prev.maven.override {inherit jdk;}; +in rec { + kognac = final.pkgs.callPackage ./pkgs/kognac {}; + trident = final.pkgs.callPackage ./pkgs/trident {}; + vlog = final.pkgs.callPackage ./pkgs/vlog {inherit jdk maven;}; + rulewerk = final.pkgs.callPackage ./pkgs/rulewerk {inherit jdk maven gitignoreSource;}; + vlog-debug = final.pkgs.enableDebugging vlog; + rulewerk-debug = final.pkgs.enableDebugging (rulewerk.override {vlog = vlog-debug;}); +} diff --git a/nix/pkgs/kognac/default.nix b/nix/pkgs/kognac/default.nix new file mode 100644 index 000000000..4bda10c39 --- /dev/null +++ b/nix/pkgs/kognac/default.nix @@ -0,0 +1,51 @@ +{ + pkgs, + cacert, + cmake, + git, + lib, + lz4, + sparsehash, + stdenv, + zlib, + ... +}: +stdenv.mkDerivation { + pname = "kognac"; + version = "unstable-2022-08-07"; + src = pkgs.fetchFromGitHub { + owner = "karmaresearch"; + repo = "kognac"; + rev = "ec961644647e2b545cfb859148cde3dff94d317e"; + sha256 = "uliMzYkcaIf3TR2WQkM4o07M3dGF0a4/GYlWCljTlQo="; + }; + + buildInputs = [zlib sparsehash lz4]; + nativeBuildInputs = [cmake git cacert]; + + cmakeFlags = ["-DCMAKE_CXX_FLAGS=-w" "-DCMAKE_SKIP_RPATH=1"]; + # this patch forces CMake to prefer our provided lz4 library. + patches = [./patches/kognac-lz4.patch]; + + installPhase = '' + runHook preInstall + + mkdir -p $out/bin + cp ./kognac_exec $out/bin + + mkdir -p $out/lib + cp ./libkognac-core.so $out/lib/ + + mkdir -p $out/share/include + cp -R $src/include/kognac/ $out/share/include + cp -R $src/include/zstr/ $out/share/include + + runHook postInstall + ''; + + meta = with lib; { + description = "A library handling compressed storage of RDF triples"; + license = licenses.asl20; + homepage = "https://github.com/karmaresearch/kognac"; + }; +} diff --git a/nix/pkgs/kognac/patches/kognac-lz4.patch b/nix/pkgs/kognac/patches/kognac-lz4.patch new file mode 100644 index 000000000..9f10ffbfb --- /dev/null +++ b/nix/pkgs/kognac/patches/kognac-lz4.patch @@ -0,0 +1,56 @@ +From 5c2da49c2228b16130db7980614fa691003389cb Mon Sep 17 00:00:00 2001 +From: Maximilian Marx +Date: Fri, 2 Sep 2022 01:35:35 +0200 +Subject: [PATCH] Patch lz4 + +--- + CMakeLists.txt | 33 +++------------------------------ + 1 file changed, 3 insertions(+), 30 deletions(-) + +diff --git a/CMakeLists.txt b/CMakeLists.txt +index f59ff5f..41fe741 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -100,36 +100,9 @@ ENDIF() + + #LZ4 + # we need it statically included, so download it, not only if it cannot be found! +-# find_library(lz4 lz4) +-# find_path (lz4h lz4.h) +-# IF (${lz4h} STREQUAL "lz4h-NOTFOUND") +-# message("Could not find LZ4. I'm going to download it from the GIT repository ...") +-message("Downloading lz4, static version required") +-IF (DIST AND ${CMAKE_SYSTEM_NAME} STREQUAL "Darwin") +- set(MOREFLAGS "-fPIC -arch x86_64 -arch arm64") +-ELSE() +- set(MOREFLAGS "-fPIC") +-ENDIF() +- +- ExternalProject_Add(git-lz4 +- DOWNLOAD_COMMAND git clone https://github.com/Cyan4973/lz4.git +- DOWNLOAD_DIR external +- SOURCE_DIR external/lz4/ +- CONFIGURE_COMMAND "" +- BUILD_IN_SOURCE 1 +- BUILD_COMMAND make -C lib lib MOREFLAGS=${MOREFLAGS} +- INSTALL_COMMAND "" +- ) +- ExternalProject_Get_Property(git-lz4 SOURCE_DIR) +- include_directories(${SOURCE_DIR}/lib/) +- add_library(lz4 STATIC IMPORTED) +- set_property(TARGET lz4 PROPERTY IMPORTED_LOCATION ${CMAKE_BINARY_DIR}/external/lz4/lib/${CMAKE_STATIC_LIBRARY_PREFIX}lz4${CMAKE_STATIC_LIBRARY_SUFFIX}) +- add_dependencies(lz4 git-lz4) +- add_dependencies(kognac-o lz4) +-#ELSE() +-# include_directories(lz4h) +-#message("Found LZ4, lz4=${lz4}") +-#ENDIF() ++find_library(lz4 ${CMAKE_STATIC_LIBRARY_PREFIX}lz4${CMAKE_STATIC_LIBRARY_SUFFIX}) ++find_path (lz4h lz4.h) ++include_directories(lz4h) + + #standard include + include_directories(include/) +-- +2.36.2 + diff --git a/nix/pkgs/rulewerk/default.nix b/nix/pkgs/rulewerk/default.nix new file mode 100644 index 000000000..fa20faacf --- /dev/null +++ b/nix/pkgs/rulewerk/default.nix @@ -0,0 +1,139 @@ +{ + pkgs, + buildMavenRepositoryFromLockFile, + gitignoreSource, + curl, + jdk, + lib, + lz4, + makeWrapper, + maven, + sparsehash, + stdenv, + vlog, +}: let + rulewerk-dependencies = buildMavenRepositoryFromLockFile {file = ../../../mvn2nix-lock.json;}; +in + stdenv.mkDerivation rec { + pname = "rulewerk"; + version = "unstable-latest"; + src = gitignoreSource ../../..; + + modules = [ + "core" + "vlog" + "rdf" + "owlapi" + "graal" + "parser" + "commands" + "examples" + "client" + ]; + + buildInputs = [makeWrapper lz4 curl sparsehash]; + nativeBuildInputs = [maven]; + + # this prepares a local maven repository with all the dependencies + # and our local `vlog-java` jar. + preBuild = '' + mkdir -p $out/lib/ + + # provide a local `vlog-java` jar. This will be installed below. + mkdir -p rulewerk-vlog/lib/ + cp ${vlog}/share/java/jvlog.jar rulewerk-vlog/lib/jvlog-local.jar + + # create a local maven repository with all the dependencies. + # Note that we are copying symbolic links, so this will not use much space. + cp -PR ${rulewerk-dependencies}/* $out/lib/ + # make the local repository writable. + chmod -R +w $out/lib/ + + # maven needs the metadata files to resolve version ranges, + # but `buildMavenRepositoryFromLockFile` does not provide them. + # Hack around this be generating the necessary metadata files, + # seemingly all for dependencies of owlapi. + + cat > $out/lib/com/google/guava/guava/maven-metadata-central.xml << EOF + + + com.google.guava + guava + + + 22.0 + + + + EOF + + cat > $out/lib/com/google/code/findbugs/jsr305/maven-metadata-central.xml << EOF + + + com.google.code.findbugs + jsr305 + + + 3.0.2 + + + + EOF + + cat > $out/lib/org/slf4j/slf4j-api/maven-metadata-central.xml << EOF + + + org.slf4j + slf4j-api + + + 1.7.25 + 1.7.32 + + + + EOF + + # install our local `vlog-java` jar. + mvn --offline --no-transfer-progress initialize -Pdevelopment -Dmaven.repo.local=$out/lib + ''; + + # Actually build the rulewerk packages. Skip tests, they are run + # as part of `checkPhase`, which we don't need to specify here, + # since the default is to invoke `mvn verify`. + buildPhase = '' + runHook preBuild + + mvn package -Pclient --offline -Dmaven.repo.local=$out/lib -DskipTests + + runHook postBuild + ''; + + # Collect built jars into the local repository, and provide + # executables for launching rulewerk and maven. + installPhase = '' + runHook preInstall + + # find the version number from the generated artifacts + vers=$(basename ${pname}-core/target/${pname}-core-*.jar | cut -d'-' -f3- | sed -e 's/.jar$//') + + mkdir -p $out/bin $out/share/java + find $out/lib -type f -regex '.+\(\.lastUpdated\|resolver-status\.properties\|_remote\.repositories\|maven-metadata-local\.xml\)' -delete + for module in ${toString modules} + do + cp ${pname}-$module/target/${pname}-$module-$vers.jar $out/share/java + done + + cp rulewerk-client/target/standalone-rulewerk-client-$vers.jar $out/share/java + makeWrapper ${jdk}/bin/java $out/bin/${pname} --add-flags "-jar $out/share/java/standalone-rulewerk-client-$vers.jar" + makeWrapper ${maven}/bin/mvn $out/bin/mvn --add-flags "-DdependenciesFromNix.repo=$out/lib" + + runHook postInstall + ''; + + meta = with lib; { + description = "A java toolkit for reasoning with existential rules"; + license = licenses.asl20; + homepage = "https://github.com/knowsys/rulewerk"; + }; + } diff --git a/nix/pkgs/trident/default.nix b/nix/pkgs/trident/default.nix new file mode 100644 index 000000000..9e5d4754a --- /dev/null +++ b/nix/pkgs/trident/default.nix @@ -0,0 +1,60 @@ +{ + pkgs, + cacert, + cmake, + git, + kognac, + lib, + lz4, + sparsehash, + stdenv, + zlib, + ... +}: +stdenv.mkDerivation { + pname = "trident"; + version = "unstable-2022-11-25"; + src = pkgs.fetchFromGitHub { + owner = "karmaresearch"; + repo = "trident"; + rev = "6665f4465451478119721337f65b128f868f2362"; + sha256 = "kcITwU1dVbB/sov7ZzkknSczLtTxWD9HfyFSIOOx9ak="; + }; + + buildInputs = [zlib sparsehash lz4]; + nativeBuildInputs = [cmake git cacert]; + + cmakeFlags = [ + "-DSPARQL=1" + "-DCMAKE_CXX_FLAGS=-w" + "-DCMAKE_SKIP_RPATH=1" + "-DKOGNAC_LIB=${kognac}/lib" + "-DKOGNAC_INC=${kognac}/share/include" + ]; + # this patch forces CMake to prefer our provided lz4 library. + patches = [./patches/trident-lz4.patch]; + + installPhase = '' + runHook preInstall + + mkdir -p $out/bin + cp ./trident $out/bin + + mkdir -p $out/lib + cp ./libtrident-core.so $out/lib/ + cp ./libtrident-sparql.so $out/lib/ + + mkdir -p $out/share/include + cp -R $src/include/trident $out/share/include + cp -R $src/include/layers $out/share/include + cp -R $src/rdf3x/include $out/share/ + + runHook postInstall + ''; + + meta = with lib; { + description = "A read-only RDF triple store"; + license = licenses.asl20; + homepage = "https://github.com/karmaresearch/trident"; + }; +} diff --git a/nix/pkgs/trident/patches/trident-lz4.patch b/nix/pkgs/trident/patches/trident-lz4.patch new file mode 100644 index 000000000..45282b65c --- /dev/null +++ b/nix/pkgs/trident/patches/trident-lz4.patch @@ -0,0 +1,56 @@ +From cb04a5beea80a0a17a19dfc46165430d1fdee6bf Mon Sep 17 00:00:00 2001 +From: Maximilian Marx +Date: Fri, 2 Sep 2022 01:37:02 +0200 +Subject: [PATCH] Patch lz4 + +Signed-off-by: Maximilian Marx +--- + CMakeLists.txt | 32 +++----------------------------- + 1 file changed, 3 insertions(+), 29 deletions(-) + +diff --git a/CMakeLists.txt b/CMakeLists.txt +index 1340859..cfd943f 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -129,35 +129,9 @@ IF (${sparsehash} STREQUAL "sparsehash-NOTFOUND") + ENDIF() + + #LZ4 +-# find_library(lz4 lz4) +-# find_path(lz4h lz4.h) +-# IF (${lz4h} STREQUAL "lz4h-NOTFOUND") +-# message("Could not find LZ4. I'm going to use the version from kognac") +- message("I'm going to use LZ4 the version from kognac") +- include_directories(${KOGNAC_LIB}/external/lz4/lib/) +- LINK_DIRECTORIES(${KOGNAC_LIB}/external/lz4/lib) +- add_library(lz4 STATIC IMPORTED) +- set_property(TARGET lz4 PROPERTY IMPORTED_LOCATION ${KOGNAC_LIB}/external/lz4/lib/${CMAKE_STATIC_LIBRARY_PREFIX}lz4${CMAKE_STATIC_LIBRARY_SUFFIX}) +-# ELSE() +-# include_directories(lz4h) +-# message("-- Found LZ4") +-# ENDIF() +- +-#LZ4 +-# find_library(lz4 lz4) +-# find_path (lz4h lz4.h) +-# IF ((${lz4h} STREQUAL "lz4h-NOTFOUND")) +- # message("Could not find LZ4. I'm going to use the version from kognac") +- # message("using the lz4 version from kognac") +- # set(LZ4_LIB "${KOGNAC_LIB}/external/lz4/lib") +- # include_directories(${LZ4_LIB}) +- # LINK_DIRECTORIES(${LZ4_LIB}) +- # add_library(lz4 STATIC IMPORTED) +- # set_property(TARGET lz4 PROPERTY IMPORTED_LOCATION "${LZ4_LIB}/${CMAKE_STATIC_LIBRARY_PREFIX}lz4${CMAKE_STATIC_LIBRARY_SUFFIX}") +- # ELSE() +- # include_directories(lz4h) +- # message("-- Found LZ4") +-# ENDIF() ++find_library(lz4 ${CMAKE_STATIC_LIBRARY_PREFIX}lz4${CMAKE_STATIC_LIBRARY_SUFFIX}) ++find_path(lz4h lz4.h) ++include_directories(lz4h) + + #Create the core library + include_directories(include/ rdf3x/include rapidjson/include) +-- +2.36.2 + diff --git a/nix/pkgs/vlog/default.nix b/nix/pkgs/vlog/default.nix new file mode 100644 index 000000000..ab1f51f5f --- /dev/null +++ b/nix/pkgs/vlog/default.nix @@ -0,0 +1,69 @@ +{ + pkgs, + buildMavenRepositoryFromLockFile, + cacert, + cmake, + curl, + git, + jdk, + kognac, + lib, + lz4, + maven, + sparsehash, + stdenv, + trident, + zlib, + ... +}: let + rulewerk-dependencies = buildMavenRepositoryFromLockFile {file = ../../../mvn2nix-lock.json;}; +in + stdenv.mkDerivation rec { + pname = "vlog"; + version = "unstable-2022-11-25"; + src = pkgs.fetchFromGitHub { + owner = "karmaresearch"; + repo = "vlog"; + # rev = "v${version}"; + # 'rev' and 'sha256' point to the latest VLog master branch tag/commit we want to test + rev = "ca63a3c6b32b0c4e5c099b645ff3d51a89212c76"; + sha256 = "uyOSE01zc+D5Fqrex/fUespBKZgh+vDaAN/vE3ZW3RY="; + }; + + buildInputs = [kognac trident sparsehash jdk curl lz4]; + nativeBuildInputs = [cmake git cacert maven]; + + cmakeFlags = [ + "-DJAVA=1" + "-DSPARQL=1" + "-DCMAKE_CXX_FLAGS=-w" + "-DCMAKE_SKIP_RPATH=1" + "-DKOGNAC_LIB=${kognac}/lib" + "-DKOGNAC_INC=${kognac}/share/include" + "-DTRIDENT_LIB=${trident}/lib" + "-DTRIDENT_INC=${trident}/share/include" + ]; + # this patch forces CMake to prefer our provided lz4 library. + patches = [./patches/vlog-lz4.patch]; + + postInstall = '' + mkdir -p $out/bin + cp ./vlog $out/bin + + mkdir -p $out/lib + cp ./libvlog-core.so $out/lib/ + + mkdir -p $out/share/java + # strip timestamps and other non-reproducible information from the jar + mvn --offline --no-transfer-progress io.github.zlika:reproducible-build-maven-plugin:0.16:strip-jar \ + -Dreproducible.includes=./jvlog.jar \ + -Dmaven.repo.local=${rulewerk-dependencies} + cp ./jvlog.jar $out/share/java + ''; + + meta = with lib; { + description = "A reasoner for Datalog and Existential Rules"; + license = licenses.asl20; + homepage = "https://github.crom/karmaresearch/vlog"; + }; + } diff --git a/nix/pkgs/vlog/patches/vlog-lz4.patch b/nix/pkgs/vlog/patches/vlog-lz4.patch new file mode 100644 index 000000000..7b0db6096 --- /dev/null +++ b/nix/pkgs/vlog/patches/vlog-lz4.patch @@ -0,0 +1,41 @@ +From bef7c75789879a55479a2084a7089550628d0d48 Mon Sep 17 00:00:00 2001 +From: Maximilian Marx +Date: Fri, 2 Sep 2022 01:37:23 +0200 +Subject: [PATCH] Patch lz4 + +Signed-off-by: Maximilian Marx +--- + CMakeLists.txt | 17 +++-------------- + 1 file changed, 3 insertions(+), 14 deletions(-) + +diff --git a/CMakeLists.txt b/CMakeLists.txt +index 289b2f3..9cc598c 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -127,20 +127,9 @@ IF (${sparsehash} STREQUAL "sparsehash-NOTFOUND") + include_directories(${KOGNAC_LIB}/external/sparsehash/src/) + ENDIF() + +-#LZ4 +-# find_library(lz4 lz4) +-# find_path(lz4h lz4.h) +-# IF (${lz4h} STREQUAL "lz4h-NOTFOUND") +-# message("Could not find LZ4. I'm going to use the version from kognac") +- message("I'm going to use LZ4 the version from kognac") +- include_directories(${KOGNAC_LIB}/external/lz4/lib/) +- LINK_DIRECTORIES(${KOGNAC_LIB}/external/lz4/lib) +- add_library(lz4 STATIC IMPORTED) +- set_property(TARGET lz4 PROPERTY IMPORTED_LOCATION ${KOGNAC_LIB}/external/lz4/lib/${CMAKE_STATIC_LIBRARY_PREFIX}lz4${CMAKE_STATIC_LIBRARY_SUFFIX}) +-# ELSE() +-# include_directories(lz4h) +-# message("-- Found LZ4") +-# ENDIF() ++find_library(lz4 ${CMAKE_STATIC_LIBRARY_PREFIX}lz4${CMAKE_STATIC_LIBRARY_SUFFIX}) ++find_path(lz4h lz4.h) ++include_directories(lz4h) + + IF (JAVA) + find_package(Java REQUIRED) +-- +2.36.2 + diff --git a/pom.xml b/pom.xml index 79d6b321d..671e4f985 100644 --- a/pom.xml +++ b/pom.xml @@ -1,23 +1,31 @@ - + 4.0.0 - org.semanticweb.vlog4j - vlog4j-parent - 0.0.1 + org.semanticweb.rulewerk + rulewerk-parent + 0.10.0-SNAPSHOT pom - VLog4j + Rulewerk A Java library for working with the VLog rule engine - https://github.com/mkroetzsch/vlog4j + https://github.com/knowsys/rulewerk - vlog4j-core - vlog4j-rdf - vlog4j-examples - vlog4j-owlapi + + rulewerk-core + rulewerk-vlog + rulewerk-rdf + rulewerk-owlapi + rulewerk-graal + rulewerk-parser + rulewerk-commands + rulewerk-examples + rulewerk-client + rulewerk-integrationtests + coverage @@ -31,7 +39,7 @@ - VLog4j Developers + Rulewerk Developers @@ -50,27 +58,41 @@ David Carral david.carral@tu-dresden.de + + maximilian + Maximilian Marx + maximilian.marx@tu-dresden.de + + + larry + Larry González + larry.gonzalez@tu-dresden.de + + + ali + Ali Elhalawati + ali.elhalawati@tu-dresden.de + UTF-8 - 2.1.100 - 4.12 - 1.10.19 - 1.7.10 - 3.7 + 4.13.2 + 2.28.2 + 1.7.28 + 3.9 1.5 - 3.0.0 - 4.5.1 + 3.1.1 + 5.1.11 2.7.16 + 1.3.1 + 4.0.4 + 3.4.1 + 3.16.0 + 1.18 - - org.eclipse.jdt - org.eclipse.jdt.annotation - ${eclipse.jdt.annotation.version} - junit junit @@ -125,12 +147,16 @@ src/main/java src/test/java + true + + java + - org.eclipse.m2e lifecycle-mapping @@ -188,11 +214,20 @@ - + + org.jacoco + jacoco-maven-plugin + 0.8.8 + + + org.apache.maven.plugins + maven-help-plugin + 3.3.0 + @@ -201,7 +236,7 @@ license-maven-plugin - org.apache.maven.plugins maven-compiler-plugin @@ -209,50 +244,132 @@ 1.8 1.8 + + -Xlint:deprecation + + + org.apache.maven.plugins + maven-surefire-plugin + 3.0.0-M5 + + ${surefireArgLine} + 1 + true + + + + org.apache.maven.plugins + maven-failsafe-plugin + 3.0.0-M5 + + + + integration-test + verify + + + + org.eluder.coveralls coveralls-maven-plugin 4.3.0 + + + coverage/target/site/jacoco-aggregate/jacoco.xml + + + + + + javax.xml.bind + jaxb-api + 2.3.1 + + - - org.codehaus.mojo - cobertura-maven-plugin - 2.7 + org.jacoco + jacoco-maven-plugin + + + prepare-agent + + prepare-agent + + + surefireArgLine + + + + default-cli + + report + + test + + + ${project.reporting.outputDirectory}/jacoco-ut + + + **/javacc/JavaCCParser* + **/javacc/JavaCCParserConstants* + **/javacc/JavaCCParserTokenManager* + **/javacc/JavaCharStream* + **/javacc/ParseException* + **/javacc/SimpleCharStream* + **/javacc/Token* + **/javacc/TokenMgrError* + + + + - xml - - 256m - - true + + + **/javacc/JavaCCParser* + **/javacc/JavaCCParserConstants* + **/javacc/JavaCCParserTokenManager* + **/javacc/JavaCharStream* + **/javacc/ParseException* + **/javacc/SimpleCharStream* + **/javacc/Token* + **/javacc/TokenMgrError* + - org.apache.maven.plugins maven-javadoc-plugin ${maven.javadoc.version} - VLog4j homepage]]> + 1.8 + + Rulewerk homepage]]> + org.apache.maven.plugins maven-scm-publish-plugin - 3.0.0 + 3.1.0 ${project.build.directory}/scmpublish Publishing javadoc for ${project.artifactId}:${project.version} ${project.reporting.outputDirectory}/apidocs - scm:git:https://github.com/mkroetzsch/vlog4j.git + scm:git:https://${GITHUB_USER}:${GITHUB_TOKEN}@github.com/knowsys/rulewerk.git gh-pages @@ -287,12 +404,18 @@ jar + + false + aggregate-javadoc-jar aggregate-jar + + false + @@ -313,12 +436,71 @@ + + development + + + + + org.jacoco + jacoco-maven-plugin + + + prepare-agent + none + + + + + + + + java-9 + + [9,) + + + ${java.home}/bin/javadoc + + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven.javadoc.version} + + --no-module-directories + + + + + + + dependenciesFromNix + + + dependenciesFromNix.repo + + + + + dependencies-from-nix-store + file://${dependenciesFromNix.repo} + + + + + dependencies-from-nix-store + file://${dependenciesFromNix.repo} + + + - https://github.com/mkroetzsch/vlog4j.git - scm:git:https://github.com/mkroetzsch/vlog4j.git - scm:git:https://github.com/mkroetzsch/vlog4j.git + https://github.com/knowsys/rulewerk.git + scm:git:https://github.com/knowsys/rulewerk.git + scm:git:https://github.com/knowsys/rulewerk.git - + \ No newline at end of file diff --git a/vlog4j-examples/LICENSE.txt b/rulewerk-client/LICENSE.txt similarity index 100% rename from vlog4j-examples/LICENSE.txt rename to rulewerk-client/LICENSE.txt diff --git a/rulewerk-client/pom.xml b/rulewerk-client/pom.xml new file mode 100644 index 000000000..892d5ad1d --- /dev/null +++ b/rulewerk-client/pom.xml @@ -0,0 +1,116 @@ + + + 4.0.0 + + org.semanticweb.rulewerk + rulewerk-parent + 0.10.0-SNAPSHOT + + + rulewerk-client + jar + + Rulewerk Client + Stand-alone Rulewerk application + + + UTF-8 + + + + + ${project.groupId} + rulewerk-core + ${project.version} + + + ${project.groupId} + rulewerk-parser + ${project.version} + + + ${project.groupId} + rulewerk-vlog + ${project.version} + + + ${project.groupId} + rulewerk-commands + ${project.version} + + + org.slf4j + slf4j-log4j12 + ${slf4j.version} + + + info.picocli + picocli + ${picoli.version} + + + org.apache.maven.plugins + maven-shade-plugin + ${shade.version} + + + + + org.jline + jline + ${jline.version} + + + org.fusesource.jansi + jansi + ${jansi.version} + + + + + client + + + + org.apache.maven.plugins + maven-shade-plugin + ${shade.version} + + + package + + shade + + + standalone-rulewerk-client-${project.version} + + + org.semanticweb.rulewerk.client.picocli.Main + + + + + + + *:* + + META-INF/*.SF + META-INF/*.DSA + META-INF/*.RSA + + + + + + + + + + + + diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java new file mode 100644 index 000000000..4b7d94e48 --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/ClientUtils.java @@ -0,0 +1,124 @@ +package org.semanticweb.rulewerk.client.picocli; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.Iterator; + +import org.apache.log4j.ConsoleAppender; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; +import org.apache.log4j.PatternLayout; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; + +/** + * Utility class for interacting with the Rulewerk client. + * + * @author dragoste + * + */ +public final class ClientUtils { + + /** + * Private constructor. This is a utility class. Therefore, it is best practice + * to do the following: (1) Make the class final, (2) make its constructor + * private, (3) make all its fields and methods static. This prevents the + * classes instantiation and inheritance. + */ + private ClientUtils() { + + } + + /** + * Defines how messages should be logged. This method can be modified to + * restrict the logging messages that are shown on the console or to change + * their formatting. See the documentation of Log4J for details on how to do + * this. + * + * Note: The VLog C++ backend performs its own logging. The log-level for this + * can be configured using + * {@link Reasoner#setLogLevel(org.semanticweb.rulewerk.core.reasoner.LogLevel)}. + * It is also possible to specify a separate log file for this part of the logs. + */ + public static void configureLogging() { + // Create the appender that will write log messages to the console. + final ConsoleAppender consoleAppender = new ConsoleAppender(); + // Define the pattern of log messages. + // Insert the string "%c{1}:%L" to also show class name and line. + final String pattern = "%d{yyyy-MM-dd HH:mm:ss} %-5p - %m%n"; + consoleAppender.setLayout(new PatternLayout(pattern)); + // Change to Level.ERROR for fewer messages: + consoleAppender.setThreshold(Level.INFO); + + consoleAppender.activateOptions(); + Logger.getRootLogger().addAppender(consoleAppender); + } + + /** + * Prints out the answers given by {@code reasoner} to the query + * ({@code queryAtom}). + * + * @param queryAtom query to be answered + * @param reasoner reasoner to query on + */ + public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final Reasoner reasoner) { + System.out.println("Answers to query " + queryAtom + " :"); + try (final QueryResultIterator answers = reasoner.answerQuery(queryAtom, true)) { + answers.forEachRemaining(answer -> System.out.println(" - " + answer)); + + System.out.println("Query answers are: " + answers.getCorrectness()); + } + System.out.println(); + } + + /** + * Returns the number of answers returned by {@code reasoner} to the query + * ({@code queryAtom}). + * + * @param queryAtom query to be answered + * @param reasoner reasoner to query on + * + * @return number of answers to the given query + */ + public static int getQueryAnswerCount(final PositiveLiteral queryAtom, final Reasoner reasoner) { + try (final QueryResultIterator answers = reasoner.answerQuery(queryAtom, true)) { + return iteratorSize(answers); + } + } + + /** + * Returns the size of an iterator. + * + * @FIXME This is an inefficient way of counting results. It should be done at a + * lower level instead + * @param Iterator to iterate over + * @return number of elements in iterator + */ + private static int iteratorSize(final Iterator iterator) { + int size = 0; + for (; iterator.hasNext(); ++size) { + iterator.next(); + } + return size; + } + +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java new file mode 100644 index 000000000..0193bc7da --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/Main.java @@ -0,0 +1,111 @@ +package org.semanticweb.rulewerk.client.picocli; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.io.PrintStream; + +import org.apache.log4j.ConsoleAppender; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; +import org.apache.log4j.PatternLayout; +import org.semanticweb.rulewerk.client.shell.DefaultShellConfiguration; +import org.semanticweb.rulewerk.client.shell.InteractiveShellClient; + +import picocli.CommandLine; +import picocli.CommandLine.Command; + +/** + * Class with main method that is a command with subcommands {@code shell} + * (default) and {@code materialize}. + * + * @author Irina Dragoste + * + */ +@Command(name = "", description = "A command line client for Rulewerk.", subcommands = { InteractiveShellClient.class, + RulewerkClientMaterialize.class }) +public class Main { + + public static String INTERACTIVE_SHELL_COMMAND = "shell"; + public static String COMMAND_LINE_CLIENT_COMMAND = "materialize"; + public static String HELP_COMMAND = "help"; + + /** + * Launches the client application for Rulewerk. The functionality depends on + * the given command-line args ({@code args}): + *
    + *
  • empty args ("") or argument "shell" launch an interactive + * shell.
  • + *
  • argument "materialize" can be used with different options to complete + * several materialization and querying tasks from the command line.
  • + *
  • help
  • + *
+ * + * @param args + * + * @throws IOException + */ + public static void main(final String[] args) throws IOException { + configureLogging(); + + if (args.length == 0 || (args.length > 0 && INTERACTIVE_SHELL_COMMAND.equals(args[0]))) { + new InteractiveShellClient().launchShell(new DefaultShellConfiguration()); + } else { + if (COMMAND_LINE_CLIENT_COMMAND.equals(args[0])) { + final CommandLine commandline = new CommandLine(new RulewerkClientMaterialize()); + commandline.execute(args); + } else { + displayHelp(args, System.out); + } + } + } + + static void displayHelp(final String[] args, final PrintStream printStream) { + if (!HELP_COMMAND.equals(args[0])) { + printStream.println("Invalid command."); + } + + if (HELP_COMMAND.equals(args[0]) && args.length > 1 && COMMAND_LINE_CLIENT_COMMAND.equals(args[1])) { + (new CommandLine(new RulewerkClientMaterialize())).usage(printStream); + } else { + (new CommandLine(new Main())).usage(printStream); + } + } + + /** + * Configures {@link Logger} settings. Messages are logged to the console. Log + * level is set to {@link Level#FATAL}. + */ + public static void configureLogging() { + // Create the appender that will write log messages to the console. + final ConsoleAppender consoleAppender = new ConsoleAppender(); + // Define the pattern of log messages. + // Insert the string "%c{1}:%L" to also show class name and line. + final String pattern = "%d{yyyy-MM-dd HH:mm:ss} %-5p - %m%n"; + consoleAppender.setLayout(new PatternLayout(pattern)); + // Change to Level.ERROR for fewer messages: + consoleAppender.setThreshold(Level.FATAL); + + consoleAppender.activateOptions(); + Logger.getRootLogger().addAppender(consoleAppender); + } + +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java new file mode 100644 index 000000000..de472be40 --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResults.java @@ -0,0 +1,91 @@ +package org.semanticweb.rulewerk.client.picocli; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import picocli.CommandLine.Option; + +/** + * Helper class to print query results. + * + * @author Larry Gonzalez + * + */ +public class PrintQueryResults { + + static final String configurationErrorMessage = "Configuration Error: @code{--print-query-result-size} and @code{--print-query-result} are mutually exclusive. Set only one to true."; + + /** + * If true, RulewerkClient will print the size of the query result. Mutually + * exclusive with {@code --print-complete-query-result} + * + * @default true + */ + @Option(names = "--print-query-result-size", description = "Boolean. If true, RulewerkClient will print the size of the query result. True by default.") + private boolean sizeOnly = true; + + /** + * If true, RulewerkClient will print the query result in stdout. Mutually + * exclusive with {@code --print-query-result-size} + * + * @default false + */ + @Option(names = "--print-complete-query-result", description = "Boolean. If true, RulewerkClient will print the query result in stdout. False by default.") + private boolean complete = false; + + public PrintQueryResults() { + } + + public PrintQueryResults(final boolean sizeOnly, final boolean complete) { + this.sizeOnly = sizeOnly; + this.complete = complete; + } + + /** + * Check correct configuration of the class. {@code --print-query-result-size} + * and {@code --print-query-result} are mutually exclusive. + * + * @return {@code true} if configuration is valid. + */ + public boolean isValid() { + return !this.sizeOnly || !this.complete; + } + + public boolean isSizeOnly() { + return this.sizeOnly; + } + + public void setSizeOnly(final boolean sizeOnly) { + this.sizeOnly = sizeOnly; + } + + public boolean isComplete() { + return this.complete; + } + + public void setComplete(final boolean complete) { + this.complete = complete; + } + + void printConfiguration() { + System.out.println(" --print-query-result-size: " + this.sizeOnly); + System.out.println(" --print-complete-query-result: " + this.complete); + } +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClientMaterialize.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClientMaterialize.java new file mode 100644 index 000000000..ba3d2ccdc --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/RulewerkClientMaterialize.java @@ -0,0 +1,246 @@ +package org.semanticweb.rulewerk.client.picocli; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.LogLevel; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import picocli.CommandLine.ArgGroup; +import picocli.CommandLine.Command; +import picocli.CommandLine.Option; + +/** + * Class to implement a command to execute full materialization. + * + * @author Larry Gonzalez + * + */ +@Command(name = "materialize", description = "Execute the chase and store the literal's extensions") +public class RulewerkClientMaterialize implements Runnable { + + private final KnowledgeBase kb = new KnowledgeBase(); + private final List queries = new ArrayList<>(); + + @Option(names = "--rule-file", description = "Rule file(s) in {@link https://github.com/knowsys/rulewerk/wiki/Rule-syntax-grammar} syntax", required = true) + private final List ruleFiles = new ArrayList<>(); + +// TODO +// Support graal rule files +// @Option(names = "--graal-rule-file", description = "Rule file(s) in graal syntax", required = true) +// private List graalRuleFiles = new ArrayList<>(); + + @Option(names = "--log-level", description = "Log level of VLog (c++ library). One of: DEBUG, INFO, WARNING (default), ERROR.", required = false) + private LogLevel logLevel = LogLevel.WARNING; + + @Option(names = "--log-file", description = "Log file of VLog (c++ library). VLog will log to the default system output by default", required = false) + private String logFile; + + @Option(names = "--chase-algorithm", description = "Chase algorithm. RESTRICTED_CHASE (default) or SKOLEM_CHASE.", required = false) + private Algorithm chaseAlgorithm = Algorithm.RESTRICTED_CHASE; + + @Option(names = "--timeout", description = "Timeout in seconds. Infinite by default", required = false) + private int timeout = 0; + + @Option(names = "--query", description = "Positive not-ground Literals to query after materialization in rls syntax. RulewerkClient will print the size of its extension", required = true) + private List queryStrings = new ArrayList<>(); + + @ArgGroup(exclusive = false) + private final PrintQueryResults printQueryResults = new PrintQueryResults(); + + @ArgGroup(exclusive = false) + private final SaveQueryResults saveQueryResults = new SaveQueryResults(); + + // TODO + // @ArgGroup(exclusive = false) + // private SaveModel saveModel = new SaveModel(); + + @Override + public void run() { + ClientUtils.configureLogging(); + + /* Validate configuration */ + this.validateConfiguration(); + + /* Configure rules */ + this.configureRules(); + + /* Configure queries */ + this.configureQueries(); + + /* Print configuration */ + this.printConfiguration(); + + try (Reasoner reasoner = new VLogReasoner(this.kb)) { + + this.materialize(reasoner); + // TODO if (saveModel.saveModel) { this.saveModel(); } + + this.answerQueries(reasoner); + } + System.out.println("Process completed."); + } + + private void validateConfiguration() { + if (!this.printQueryResults.isValid()) { + this.printErrorMessageAndExit(PrintQueryResults.configurationErrorMessage); + } + if (!this.saveQueryResults.isConfigurationValid()) { + this.printErrorMessageAndExit(SaveQueryResults.configurationErrorMessage); + } + if (this.saveQueryResults.isSaveResults() && !this.saveQueryResults.isDirectoryValid()) { + this.printErrorMessageAndExit(SaveQueryResults.wrongDirectoryErrorMessage); + } + // TODO + // if (!saveModel.isConfigurationValid()) { + // printMessageAndExit(SaveModel.configurationErrorMessage); + // } + // if (saveModel.isSaveResults() && !saveModel.isDirectoryValid()) { + // printMessageAndExit(SaveModel.wrongDirectoryErrorMessage); + // } + } + + private void configureRules() { + for (final String ruleFile : this.ruleFiles) { + try { + RuleParser.parseInto(this.kb, new FileInputStream(ruleFile)); + } catch (final FileNotFoundException e1) { + this.printErrorMessageAndExit("File not found: " + ruleFile + "\n " + e1.getMessage()); + } catch (final ParsingException e2) { + this.printErrorMessageAndExit("Failed to parse rule file: " + ruleFile + "\n " + e2.getMessage()); + } + } + } + + private void configureQueries() { + for (final String queryString : this.queryStrings) { + try { + final PositiveLiteral query = RuleParser.parsePositiveLiteral(queryString); + this.queries.add(query); + } catch (final ParsingException e) { + System.err.println("Failed to parse query: \"\"\"" + queryString + "\"\"\"."); + System.err.println(e.getMessage()); + System.err.println("The query was skipped. Continuing ..."); + } + } + } + + private void materialize(final Reasoner reasoner) { + // logFile + reasoner.setLogFile(this.logFile); + // logLevel + reasoner.setLogLevel(this.logLevel); + // chaseAlgorithm + reasoner.setAlgorithm(this.chaseAlgorithm); + // timeout + if (this.timeout > 0) { + reasoner.setReasoningTimeout(this.timeout); + } + + System.out.println("Executing the chase ..."); + try { + reasoner.reason(); + } catch (final IOException e) { + this.printErrorMessageAndExit( + "Something went wrong during reasoning. Please check the reasoner log file.\n" + e.getMessage()); + } + + } + + // TODO private void saveModel() {...} + + private void answerQueries(final Reasoner reasoner) { + if (!this.queries.isEmpty()) { + System.out.println("Answering queries ..."); + for (final PositiveLiteral query : this.queries) { + if (this.saveQueryResults.isSaveResults()) { + // Save the query results + this.doSaveQueryResults(reasoner, query); + } + + if (this.printQueryResults.isSizeOnly()) { + // print number of facts in results + this.doPrintResults(reasoner, query); + } else if (this.printQueryResults.isComplete()) { + // print facts + ClientUtils.printOutQueryAnswers(query, reasoner); + } + } + } + } + + private void printConfiguration() { + System.out.println("Configuration:"); + + for (final String ruleFile : this.ruleFiles) { + System.out.println(" --rule-file: " + ruleFile); + } + + for (final PositiveLiteral query : this.queries) { + System.out.println(" --query: " + query); + } + + System.out.println(" --log-file: " + this.logFile); + System.out.println(" --log-level: " + this.logLevel); + System.out.println(" --chase-algorithm: " + this.chaseAlgorithm); + System.out.println(" --timeout: " + ((this.timeout > 0) ? this.timeout : "none")); + + /* Print what to do with the result */ + this.printQueryResults.printConfiguration(); + this.saveQueryResults.printConfiguration(); + // TODO saveModel.printConfiguration(); + } + + private void doSaveQueryResults(final Reasoner reasoner, final PositiveLiteral query) { + this.saveQueryResults.mkdir(); + try { + reasoner.exportQueryAnswersToCsv(query, this.queryOputputPath(query), true); + } catch (final IOException e) { + System.err.println("Can't save query: \"\"\"" + query + "\"\"\"."); + System.err.println(e.getMessage()); + } + } + + private void doPrintResults(final Reasoner reasoner, final PositiveLiteral query) { + System.out.println("Number of query answers in " + query + ": " + reasoner.countQueryAnswers(query).getCount()); + } + + private String queryOputputPath(final PositiveLiteral query) { + return this.saveQueryResults.getOutputQueryResultDirectory() + "/" + query + ".csv"; + } + + private void printErrorMessageAndExit(final String message) { + System.err.println(message); + System.out.println("Exiting the program."); + System.exit(1); + } + +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java new file mode 100644 index 000000000..03f98eff1 --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveModel.java @@ -0,0 +1,118 @@ +package org.semanticweb.rulewerk.client.picocli; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; + +import picocli.CommandLine.Option; + +/** + * Helper class to save the resulting model of the materialization process. + * + * @author Larry Gonzalez + * + */ +public class SaveModel { + + public static final String DEFAULT_OUTPUT_DIR_NAME = "model"; + + static final String configurationErrorMessage = "Configuration Error: If @code{--save-model} is true, then a non empty @code{--output-model-directory} is required."; + static final String wrongDirectoryErrorMessage = "Configuration Error: wrong @code{--output-model-directory}. Please check the path."; + + /** + * If true, RulewerkClient will save the model in {@code --output-model-directory} + * + * @default false + */ + @Option(names = "--save-model", description = "Boolean. If true, RulewerkClient will save the model into --output-model-directory. False by default.") + private boolean saveModel = false; + + /** + * Directory to store the model. Used only if {@code --store-model} is true. + * + * @default "model" + */ + @Option(names = "--output-model-directory", description = "Directory to store the model. Used only if --store-model is true. \"" + + DEFAULT_OUTPUT_DIR_NAME + "\" by default.") + private String outputModelDirectory = DEFAULT_OUTPUT_DIR_NAME; + + public SaveModel() { + } + + public SaveModel(final boolean saveModel, final String outputDir) { + this.saveModel = saveModel; + this.outputModelDirectory = outputDir; + } + + /** + * Check correct configuration of the class. If {@code --save-model} is true, + * then a non-empty {@code --output-model-directory} is required. + * + * @return {@code true} if configuration is valid. + */ + public boolean isConfigurationValid() { + return !this.saveModel || ((this.outputModelDirectory != null) && !this.outputModelDirectory.isEmpty()); + } + + /** + * Check that the path to store the model is either non-existing or a directory. + * + * @return {@code true} if conditions are satisfied. + */ + public boolean isDirectoryValid() { + final File file = new File(this.outputModelDirectory); + return !file.exists() || file.isDirectory(); + } + + /** + * Create directory to store the model + */ + void mkdir() { + if (this.saveModel) { + final File file = new File(this.outputModelDirectory); + if (!file.exists()) { + file.mkdirs(); + } + } + } + + public void printConfiguration() { + System.out.println(" --save-model: " + this.saveModel); + System.out.println(" --output-model-directory: " + this.outputModelDirectory); + } + + public boolean isSaveModel() { + return this.saveModel; + } + + public void setSaveModel(final boolean saveModel) { + this.saveModel = saveModel; + } + + public String getOutputModelDirectory() { + return this.outputModelDirectory; + } + + public void setOutputModelDirectory(final String outputModelDirectory) { + this.outputModelDirectory = outputModelDirectory; + } + +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java new file mode 100644 index 000000000..1f84bb15a --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResults.java @@ -0,0 +1,122 @@ +package org.semanticweb.rulewerk.client.picocli; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; + +import picocli.CommandLine.Option; + +/** + * Helper class to save query results. + * + * @author Larry Gonzalez + * + */ +public class SaveQueryResults { + public static final String DEFAULT_OUTPUT_DIR_NAME = "query-results"; + + static final String configurationErrorMessage = "Configuration Error: If @code{--save-query-results} is true, then a non empty @code{--output-query-result-directory} is required."; + static final String wrongDirectoryErrorMessage = "Configuration Error: wrong @code{--output-query-result-directory}. Please check the path."; + + /** + * If true, RulewerkClient will save the query result in + * {@code --output-query-result-directory} + * + * @default false + */ + @Option(names = "--save-query-results", description = "Boolean. If true, RulewerkClient will save the query result into --output-query-result-directory. False by default.") + private boolean saveResults = false; + + /** + * Directory to store the model. Used only if {@code --save-query-results} is + * true + * + * @default query-results + */ + @Option(names = "--output-query-result-directory", description = "Directory to store the model. Used only if --save-query-results is true. \"" + + DEFAULT_OUTPUT_DIR_NAME + "\" by default.") + private String outputQueryResultDirectory = DEFAULT_OUTPUT_DIR_NAME; + + public SaveQueryResults() { + } + + public SaveQueryResults(final boolean saveResults, final String outputDir) { + this.saveResults = saveResults; + this.outputQueryResultDirectory = outputDir; + } + + /** + * Check correct configuration of the class. If {@code --save-query-results} is + * true, then a non-empty {@code --output-query-result-directory} is required. + * + * @return {@code true} if configuration is valid. + */ + public boolean isConfigurationValid() { + return !this.saveResults + || ((this.outputQueryResultDirectory != null) && !this.outputQueryResultDirectory.isEmpty()); + } + + /** + * Check that the path to store the query results is either non-existing or a + * directory. + * + * @return {@code true} if conditions are satisfied. + */ + public boolean isDirectoryValid() { + final File file = new File(this.outputQueryResultDirectory); + return !file.exists() || file.isDirectory(); + } + + public boolean isSaveResults() { + return this.saveResults; + } + + public void setSaveResults(final boolean saveResults) { + this.saveResults = saveResults; + } + + public String getOutputQueryResultDirectory() { + return this.outputQueryResultDirectory; + } + + public void setOutputQueryResultDirectory(final String outputQueryResultDirectory) { + this.outputQueryResultDirectory = outputQueryResultDirectory; + } + + /** + * Create directory to store query results if not present. It assumes that + * configuration and directory are valid. + */ + void mkdir() { + if (this.saveResults) { + final File file = new File(this.outputQueryResultDirectory); + if (!file.exists()) { + file.mkdirs(); + } + } + } + + void printConfiguration() { + System.out.println(" --save-query-results: " + this.saveResults); + System.out.println(" --output-query-result-directory: " + this.outputQueryResultDirectory); + } + +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java new file mode 100644 index 000000000..5a82c72e6 --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfiguration.java @@ -0,0 +1,130 @@ +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import org.jline.builtins.Completers; +import org.jline.builtins.Completers.TreeCompleter; +import org.jline.builtins.Completers.TreeCompleter.Node; +import org.jline.reader.LineReader; +import org.jline.reader.LineReaderBuilder; +import org.jline.reader.impl.completer.StringsCompleter; +import org.jline.terminal.Terminal; +import org.jline.terminal.TerminalBuilder; +import org.jline.utils.AttributedString; +import org.jline.utils.AttributedStyle; +import org.semanticweb.rulewerk.commands.ClearCommandInterpreter; +import org.semanticweb.rulewerk.commands.ExportCommandInterpreter; +import org.semanticweb.rulewerk.commands.LoadCommandInterpreter; + +/** + * An implementation of {@link ShellConfiguration} with custom styling and + * completion for recognized commands. + * + * @author Irina Dragoste + * + */ +public class DefaultShellConfiguration implements ShellConfiguration { + + public static final String PROMPT_STRING = "rulewerk> "; + + @Override + public LineReader buildLineReader(final Terminal terminal, final Collection registeredCommands) { + final LineReaderBuilder lineReaderBuilder = this.getDefaultLineReaderConfiguration(LineReaderBuilder.builder()); + + lineReaderBuilder.terminal(terminal); + lineReaderBuilder.completer(this.buildCompleter(registeredCommands)); + + return lineReaderBuilder.build(); + } + + LineReaderBuilder getDefaultLineReaderConfiguration(final LineReaderBuilder lineReaderBuilder) { + + lineReaderBuilder.appName("Rulewerk Shell"); + /* + * This allows completion on an empty buffer, rather than inserting a tab + */ + lineReaderBuilder.option(LineReader.Option.INSERT_TAB, false); + lineReaderBuilder.option(LineReader.Option.AUTO_FRESH_LINE, true); + lineReaderBuilder.option(LineReader.Option.USE_FORWARD_SLASH, true); + + return lineReaderBuilder; + } + + TreeCompleter buildCompleter(final Collection registeredCommands) { + final Node fileNameCompleterNode = TreeCompleter.node(new Completers.FileNameCompleter()); + + final List nodes = new ArrayList<>(); + registeredCommands.stream().map(command -> "@" + command).forEach(commandName -> { + if (commandName.equals("@load")) { + nodes.add(TreeCompleter.node(commandName, fileNameCompleterNode)); + + final StringsCompleter taskOptionsCompleter = new StringsCompleter(LoadCommandInterpreter.TASK_OWL, + LoadCommandInterpreter.TASK_RDF, LoadCommandInterpreter.TASK_RLS); + nodes.add(TreeCompleter.node(commandName, + TreeCompleter.node(taskOptionsCompleter, fileNameCompleterNode))); + } else if (commandName.equals("@export")) { + final StringsCompleter taskOptionsCompleter = new StringsCompleter( + ExportCommandInterpreter.TASK_INFERENCES, ExportCommandInterpreter.TASK_KB + ); + nodes.add(TreeCompleter.node(commandName, + TreeCompleter.node(taskOptionsCompleter, fileNameCompleterNode))); + } else if (commandName.equals("@clear")) { + final StringsCompleter taskOptionsCompleter = new StringsCompleter(ClearCommandInterpreter.TASK_ALL, + ClearCommandInterpreter.TASK_INFERENCES, ClearCommandInterpreter.TASK_FACTS, + ClearCommandInterpreter.TASK_PREFIXES, ClearCommandInterpreter.TASK_RULES, + ClearCommandInterpreter.TASK_SOURCES); + nodes.add(TreeCompleter.node(commandName, TreeCompleter.node(taskOptionsCompleter))); + } else if (commandName.equals("@help")) { + nodes.add( + TreeCompleter.node(commandName, TreeCompleter.node(new StringsCompleter(registeredCommands)))); + } else { + nodes.add(TreeCompleter.node(commandName)); + } + }); + return new TreeCompleter(nodes); + + } + + @Override + public Terminal buildTerminal() throws IOException { + return this.getDefaultTerminalConfiguration(TerminalBuilder.builder()).build(); + } + + TerminalBuilder getDefaultTerminalConfiguration(final TerminalBuilder terminalBuilder) { + return terminalBuilder.dumb(true).jansi(true).jna(false).system(true); + } + + @Override + public String buildPrompt(final Terminal terminal) { + return this.getDefaultPromptStyle().toAnsi(terminal); + } + + AttributedString getDefaultPromptStyle() { + final AttributedStyle promptStyle = AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW); + return new AttributedString(PROMPT_STRING, promptStyle); + } + +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClient.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClient.java new file mode 100644 index 000000000..e74a7a189 --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClient.java @@ -0,0 +1,75 @@ +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.jline.reader.LineReader; +import org.jline.terminal.Terminal; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; + +import picocli.CommandLine.Command; + +/** + * Class for executing the default {@code shell} command, which launches an + * interactive shell. + * + * @author Irina Dragoste + * + */ +@Command(name = "shell", description = "Launch an interactive shell for Rulewerk. The default command.") +public class InteractiveShellClient +{ + + /** + * Builds and launches an interactive shell, which accepts commands for running + * Rulewerk tasks using VLog Reasosner. + * + * @param configuration for shell I/O resources + * @throws IOException if {@link Terminal} cannot be built. + */ + public void launchShell(final ShellConfiguration configuration) throws IOException { + + final Terminal terminal = configuration.buildTerminal(); + + try (Interpreter interpreter = this.initializeInterpreter(terminal)) { + final Shell shell = new Shell(interpreter); + + final LineReader lineReader = configuration.buildLineReader(terminal, shell.getCommands()); + final String prompt = configuration.buildPrompt(terminal); + + shell.run(lineReader, prompt); + } + } + + Interpreter initializeInterpreter(final Terminal terminal) { + final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + final Interpreter interpreter = new Interpreter(Interpreter.EMPTY_KNOWLEDGE_BASE_PROVIDER, + (knowledgeBase) -> new VLogReasoner(knowledgeBase), new TerminalStyledPrinter(terminal), + parserConfiguration); + + return interpreter; + } + +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java new file mode 100644 index 000000000..103d6abb1 --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/Shell.java @@ -0,0 +1,163 @@ +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.Set; + +import org.jline.reader.EndOfFileException; +import org.jline.reader.LineReader; +import org.jline.reader.UserInterruptException; +import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; +import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter.ExitCommandName; +import org.semanticweb.rulewerk.commands.CommandExecutionException; +import org.semanticweb.rulewerk.commands.CommandInterpreter; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class Shell { + + private final Interpreter interpreter; + + private boolean running; + + public Shell(final Interpreter interpreter) { + this.interpreter = interpreter; + + this.registerExitCommand(); + } + + private void registerExitCommand() { + final CommandInterpreter exitCommandInterpreter = new ExitCommandInterpreter(this); + for (final ExitCommandName exitCommandName : ExitCommandName.values()) { + this.interpreter.registerCommandInterpreter(exitCommandName.toString(), exitCommandInterpreter); + } + } + + public void run(final LineReader lineReader, final String prompt) { + this.printWelcome(); + + this.running = true; + while (this.running) { + this.runCommand(lineReader, prompt); + } + this.interpreter.printSection("Exiting Rulewerk shell ... bye.\n\n"); + } + + Command runCommand(final LineReader lineReader, final String prompt) { + Command command = null; + try { + command = this.readCommand(lineReader, prompt); + } catch (final Exception e) { + this.interpreter.printNormal("Unexpected error: " + e.getMessage() + "\n"); + e.printStackTrace(); + } + + if (command != null) { + try { + this.interpreter.runCommand(command); + } catch (final CommandExecutionException e) { + this.interpreter.printNormal("Error: " + e.getMessage() + "\n"); + } + } + return command; + } + + /** + * Reads a command from the prompt and returns a corresponding {@link Command} + * object. If no command should be executed, null is returned. Some effort is + * made to interpret mistyped commands by adding @ and . before and after the + * input, if forgotten. + * + * @param prompt + * + * @return command or null + */ + public Command readCommand(final LineReader lineReader, final String prompt) { + String readLine; + try { + readLine = lineReader.readLine(prompt); + } catch (final UserInterruptException e) { + if (e.getPartialLine().isEmpty()) { + // Exit request from user CTRL+C + return ExitCommandInterpreter.EXIT_COMMAND; + } else { + return null; // used as empty command + } + } catch (final EndOfFileException e) { + // Exit request from user CTRL+D + return ExitCommandInterpreter.EXIT_COMMAND; + } + + readLine = this.processReadLine(readLine); + if (readLine.isEmpty()) { + return null; + } + + try { + return this.interpreter.parseCommand(readLine); + } catch (final ParsingException e) { + this.interpreter.printNormal("Error: " + e.getMessage() + "\n" + e.getCause().getMessage() + "\n"); + return null; + } + } + + String processReadLine(final String readLine) { + String result = readLine.trim(); + if (!result.isEmpty()) { + if (result.charAt(0) != '@') { + result = "@" + result; + } + if (result.charAt(result.length() - 1) != '.') { + result = result + " ."; + } + } + return result; + } + + public void exitShell() { + this.running = false; + } + + private void printWelcome() { + this.interpreter.printNormal("\n"); + this.interpreter.printSection("Welcome to the Rulewerk interactive shell.\n"); + this.interpreter.printNormal("For further information, type "); + this.interpreter.printCode("@help."); + this.interpreter.printNormal(" To quit, type "); + this.interpreter.printCode("@exit.\n"); + this.interpreter.printNormal("\n"); + } + + boolean isRunning() { + return this.running; + } + + /** + * Getter for the shell commands. + * + * @return the names of the commands that are recognized by this shell. + */ + public Set getCommands() { + return this.interpreter.getRegisteredCommands(); + } + +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java new file mode 100644 index 000000000..fc9e42e04 --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/ShellConfiguration.java @@ -0,0 +1,65 @@ +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.util.Collection; + +import org.jline.reader.LineReader; +import org.jline.terminal.Terminal; + +/** + * Interface for providing I/O resources for an interactive shell: terminal, + * terminal prompt, and line reader + * + * @author Irina Dragoste + * + */ +public interface ShellConfiguration { + + /** + * Provides a line reader that reads user input from the given terminal. The + * line reader offers tab-completion for the given list of command names. + * + * @param terminal terminal to read from. + * @param commands list of command names recognized by the interactive shell. + * @return a line reader for interacting with the shell terminal. + */ + LineReader buildLineReader(Terminal terminal, Collection commands); + + /** + * Provides an I/O terminal for the interactive shell. + * + * @return the interactive shell terminal. + * @throws IOException when the terminal cannot be built + */ + Terminal buildTerminal() throws IOException; + + /** + * Provides the prompt text (with colour and style) to be displayed on the given + * terminal. + * + * @param terminal terminal for the prompt to be displayed on + * @return the prompt text with embedded style. + */ + String buildPrompt(Terminal terminal); + +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java new file mode 100644 index 000000000..4bf7e91ea --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/TerminalStyledPrinter.java @@ -0,0 +1,85 @@ +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.PrintWriter; + +import org.jline.terminal.Terminal; +import org.jline.utils.AttributedString; +import org.jline.utils.AttributedStyle; +import org.semanticweb.rulewerk.commands.StyledPrinter; + +/** + * StyledPrinter that uses the {@link PrintWriter} of a {@link Terminal} and has + * various styling. + * + * @author Irina Dragoste + * + */ +public class TerminalStyledPrinter implements StyledPrinter { + + final Terminal terminal; + + /** + * Constructor providing a terminal for the StyledPrinter to write to. + * + * @param terminal the terminal to write to + */ + public TerminalStyledPrinter(final Terminal terminal) { + this.terminal = terminal; + } + + @Override + public void printNormal(final String string) { + this.printStyled(string, AttributedStyle.DEFAULT); + } + + @Override + public void printSection(final String string) { + this.printStyled(string, AttributedStyle.DEFAULT.bold()); + } + + @Override + public void printEmph(final String string) { + this.printStyled(string, AttributedStyle.DEFAULT.bold()); + } + + @Override + public void printCode(final String string) { + this.printStyled(string, AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW).bold()); + } + + @Override + public void printImportant(final String string) { + this.printStyled(string, AttributedStyle.DEFAULT.foreground(AttributedStyle.RED)); + } + + @Override + public PrintWriter getWriter() { + return this.terminal.writer(); + } + + private void printStyled(final String string, final AttributedStyle attributedStyle) { + final AttributedString attributedString = new AttributedString(string, attributedStyle); + this.getWriter().print(attributedString.toAnsi(this.terminal)); + this.getWriter().flush(); + } +} diff --git a/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java new file mode 100644 index 000000000..ceb284828 --- /dev/null +++ b/rulewerk-client/src/main/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreter.java @@ -0,0 +1,78 @@ +package org.semanticweb.rulewerk.client.shell.commands; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.ArrayList; + +import org.semanticweb.rulewerk.client.shell.Shell; +import org.semanticweb.rulewerk.commands.CommandExecutionException; +import org.semanticweb.rulewerk.commands.CommandInterpreter; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.model.api.Command; + +/** + * Interpreter for the command to exit an interactive shell + * + * @author Irina Dragoste + * + */ +public class ExitCommandInterpreter implements CommandInterpreter { + + public static final Command EXIT_COMMAND = new Command(ExitCommandName.exit.toString(), new ArrayList<>(0)); + + /** + * Command names used for requesting exiting an interactive shell + * + * @author Irina Dragoste + * + */ + public static enum ExitCommandName { + exit; + } + + final Shell shell; + + /** + * Constructor that provides the interactive shell from which exit is requested + * + * @param shell interactive shell to exit from + */ + public ExitCommandInterpreter(final Shell shell) { + this.shell = shell; + } + + @Override + public void printHelp(final String commandName, final Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " .\n"); + } + + @Override + public String getSynopsis() { + return "exit Rulewerk shell"; + } + + @Override + public void run(final Command command, final org.semanticweb.rulewerk.commands.Interpreter interpreter) + throws CommandExecutionException { + this.shell.exitShell(); + } + +} diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResultsTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResultsTest.java new file mode 100644 index 000000000..f1845c573 --- /dev/null +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/PrintQueryResultsTest.java @@ -0,0 +1,119 @@ +package org.semanticweb.rulewerk.client.picocli; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import org.junit.Test; + +public class PrintQueryResultsTest { + + private static final PrintQueryResults sizeTrueCompleteTrue = new PrintQueryResults(); + private static final PrintQueryResults sizeTrueCompleteFalse = new PrintQueryResults(); + private static final PrintQueryResults sizeFalseCompleteTrue = new PrintQueryResults(false, true); + private static final PrintQueryResults sizeFalseCompleteFalse = new PrintQueryResults(); + + static { + sizeTrueCompleteTrue.setComplete(true); + sizeFalseCompleteFalse.setSizeOnly(false); + } + + @Test + public void isValid_sizeTrueCompleteFalse_valid() { + // default configuration + assertTrue(sizeTrueCompleteFalse.isValid()); + } + + @Test + public void isValid_sizeTrueCompleteTrue_notValid() { + assertFalse(sizeTrueCompleteTrue.isValid()); + } + + @Test + public void isValid_sizeFalseCompleteTrue_valid() { + assertTrue(sizeFalseCompleteTrue.isValid()); + } + + @Test + public void isValid_sizeFalseCompleteFalse_valid() { + assertTrue(sizeFalseCompleteFalse.isValid()); + } + + @Test + public void isSizeOnly_sizeFalseCompleteTrue() { + assertFalse(sizeFalseCompleteTrue.isSizeOnly()); + } + + @Test + public void isSizeOnly_sizeTrueCompleteTrue() { + assertTrue(sizeTrueCompleteTrue.isSizeOnly()); + } + + @Test + public void isSizeOnly_sizeTrueCompleteFalse() { + assertTrue(sizeTrueCompleteFalse.isSizeOnly()); + } + + @Test + public void isSizeOnly_sizeFalseCompleteFalse() { + assertFalse(sizeFalseCompleteFalse.isSizeOnly()); + } + + @Test + public void isComplete_sizeTrueCompleteFalse() { + assertFalse(sizeTrueCompleteFalse.isComplete()); + } + + @Test + public void isComplete_sizeTrueCompleteTrue() { + assertTrue(sizeTrueCompleteTrue.isComplete()); + } + + @Test + public void isComplete_sizeFalseCompleteTrue() { + assertTrue(sizeFalseCompleteTrue.isComplete()); + } + + @Test + public void isComplete_sizeFalseCompleteFalse() { + assertFalse(sizeFalseCompleteFalse.isComplete()); + } + + @Test + public void setSizeOnly_and_isSizeOnly() { + PrintQueryResults prq = new PrintQueryResults(); + prq.setSizeOnly(false); + assertFalse(prq.isSizeOnly()); + prq.setSizeOnly(true); + assertTrue(prq.isSizeOnly()); + } + + @Test + public void setComplete_and_isComplete() { + PrintQueryResults prq = new PrintQueryResults(); + prq.setComplete(false); + assertFalse(prq.isComplete()); + prq.setComplete(true); + assertTrue(prq.isComplete()); + } + +} diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveModelTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveModelTest.java new file mode 100644 index 000000000..09a780f0c --- /dev/null +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveModelTest.java @@ -0,0 +1,213 @@ +package org.semanticweb.rulewerk.client.picocli; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.io.IOException; + +import org.apache.commons.lang3.StringUtils; +import org.junit.Rule; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +public class SaveModelTest { + + private final static SaveModel saveTrueDefaultDir = new SaveModel(); + private final static SaveModel saveTrueEmptyDir = new SaveModel(true, ""); + private final static SaveModel saveTrueNullDir = new SaveModel(true, null); + private final static SaveModel saveFalseDefaultDir = new SaveModel(); + private final static SaveModel saveFalseEmptyDir = new SaveModel(false, ""); + private final static SaveModel saveFalseNullDir = new SaveModel(false, null); + + static { + saveTrueDefaultDir.setSaveModel(true); + saveFalseDefaultDir.setSaveModel(false); + } + + @Rule + public TemporaryFolder tempFolder = new TemporaryFolder(); + + @Test + public void isConfigurationValid_saveTrueDefaultDir_valid() { + assertTrue(saveTrueDefaultDir.isConfigurationValid()); + } + + @Test + public void isConfigurationValid_saveTrueEmptyDir_nonValid() { + assertFalse(saveTrueEmptyDir.isConfigurationValid()); + } + + @Test + public void isConfigurationValid_saveTrueNullDir_nonValid() { + assertFalse(saveTrueNullDir.isConfigurationValid()); + } + + @Test + public void isConfigurationValid_saveFalseDefaultDir_valid() { + assertTrue(saveFalseDefaultDir.isConfigurationValid()); + } + + @Test + public void isConfigurationValid_saveFalseEmptyDir_valid() { + assertTrue(saveFalseEmptyDir.isConfigurationValid()); + } + + @Test + public void isConfigurationValid_saveFalseNullDir_valid() { + assertTrue(saveFalseNullDir.isConfigurationValid()); + } + + @Test + public void isDirectoryValid_nonExistingDirectory_valid() throws IOException { + File nonExistingDirectory = tempFolder.newFolder("folderPath"); + nonExistingDirectory.delete(); + SaveModel temp = new SaveModel(true, nonExistingDirectory.getAbsolutePath()); + assertTrue(temp.isDirectoryValid()); + } + + @Test + public void isDirectoryValid_existingDirectory_valid() throws IOException { + File existingDirectory = tempFolder.newFolder("folderPath"); + existingDirectory.mkdir(); + SaveModel temp = new SaveModel(true, existingDirectory.getAbsolutePath()); + assertTrue(temp.isDirectoryValid()); + } + + @Test + public void isDirectoryValid_existingFile_nonValid() throws IOException { + File existingFile = tempFolder.newFile("filePath"); + existingFile.createNewFile(); + SaveModel temp = new SaveModel(true, existingFile.getAbsolutePath()); + assertFalse(temp.isDirectoryValid()); + } + + @Test + public void mkdir_saveTrueNonExistingDirectory() throws IOException { + File subDirectory = tempFolder.newFolder("folderPath", "subFolder"); + subDirectory.delete(); + SaveModel temp = new SaveModel(true, subDirectory.getAbsolutePath()); + temp.mkdir(); + assertTrue(subDirectory.isDirectory()); + } + + @Test + public void mkdir_saveTrueExistingDirectory() throws IOException { + File subDirectory = tempFolder.newFolder("folderPath", "subFolder"); + subDirectory.mkdirs(); + SaveModel temp = new SaveModel(true, subDirectory.getAbsolutePath()); + temp.mkdir(); + assertTrue(subDirectory.isDirectory()); + } + + @Test + public void mkdir_saveFalse() throws IOException { + File folder = tempFolder.newFile("validNonExistingFolder"); + folder.delete(); + SaveModel temp = new SaveModel(false, folder.getAbsolutePath()); + temp.mkdir(); + assertFalse(folder.exists()); + } + + @Test + public void isSaveModel_saveTrueDefaultDir() { + assertTrue(saveTrueDefaultDir.isSaveModel()); + } + + @Test + public void getOutputModelDirectory_saveTrueDefaultDir() { + assertEquals(SaveModel.DEFAULT_OUTPUT_DIR_NAME, saveTrueDefaultDir.getOutputModelDirectory()); + } + + @Test + public void isSaveModel_saveTrueEmptyDir() { + assertTrue(saveTrueEmptyDir.isSaveModel()); + } + + @Test + public void getOutputModelDirectory_saveTrueEmptyDir() { + assertEquals(StringUtils.EMPTY, saveTrueEmptyDir.getOutputModelDirectory()); + } + + @Test + public void isSaveModel_saveTrueNullDir() { + assertTrue(saveTrueNullDir.isSaveModel()); + } + + @Test + public void getOutputModelDirectory_saveTrueNullDir() { + assertNull(saveTrueNullDir.getOutputModelDirectory()); + } + + @Test + public void isSaveModel_saveFalseDefaultDir() { + assertFalse(saveFalseDefaultDir.isSaveModel()); + } + + @Test + public void getOutputModelDirectory_saveFalseDefaultDir() { + assertEquals(SaveModel.DEFAULT_OUTPUT_DIR_NAME, saveFalseDefaultDir.getOutputModelDirectory()); + } + + @Test + public void isSaveModel_saveFalseEmptyDir() { + assertFalse(saveFalseEmptyDir.isSaveModel()); + } + + @Test + public void getOutputModelDirectory_saveFalseEmptyDir() { + assertEquals(StringUtils.EMPTY, saveFalseEmptyDir.getOutputModelDirectory()); + } + + @Test + public void isSaveModel_saveFalseNullDir() { + assertFalse(saveFalseNullDir.isSaveModel()); + } + + @Test + public void getOutputModelDirectory_saveFalseNullDir() { + assertNull(saveFalseNullDir.getOutputModelDirectory()); + } + + @Test + public void setSaveModel_and_isSaveModel() { + SaveModel sm = new SaveModel(); + sm.setSaveModel(true); + assertTrue(sm.isSaveModel()); + sm.setSaveModel(false); + assertFalse(sm.isSaveModel()); + } + + @Test + public void setOutputModelDirectory_and_getOutputModelDirectory() { + SaveModel sm = new SaveModel(); + sm.setOutputModelDirectory(""); + assertEquals("", sm.getOutputModelDirectory()); + sm.setOutputModelDirectory(null); + assertNull(sm.getOutputModelDirectory()); + } + +} diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResultsTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResultsTest.java new file mode 100644 index 000000000..608b10438 --- /dev/null +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/picocli/SaveQueryResultsTest.java @@ -0,0 +1,158 @@ +package org.semanticweb.rulewerk.client.picocli; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.io.IOException; + +import org.junit.Rule; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +public class SaveQueryResultsTest { + + private static final SaveQueryResults saveTrueDefaultDir = new SaveQueryResults(true, + SaveQueryResults.DEFAULT_OUTPUT_DIR_NAME); + private static final SaveQueryResults saveTrueEmptyDir = new SaveQueryResults(true, ""); + private static final SaveQueryResults saveTrueNullDir = new SaveQueryResults(true, null); + private static final SaveQueryResults saveFalseDefaultDir = new SaveQueryResults(); + private static final SaveQueryResults saveFalseEmptyDir = new SaveQueryResults(false, ""); + private static final SaveQueryResults saveFalseNullDir = new SaveQueryResults(false, null); + + @Rule + public TemporaryFolder tempFolder = new TemporaryFolder(); + + @Test + public void isConfigurationValid_saveTrueDefaultDir_valid() { + assertTrue(saveTrueDefaultDir.isConfigurationValid()); + } + + @Test + public void isConfigurationValid_saveTrueEmptyDir_notValid() { + assertFalse(saveTrueEmptyDir.isConfigurationValid()); + } + + @Test + public void isConfigurationValid_saveTrueNullDir_notValid() { + assertFalse(saveTrueNullDir.isConfigurationValid()); + } + + @Test + public void isConfigurationValid_saveFalseDefaultDir_valid() { + assertTrue(saveFalseDefaultDir.isConfigurationValid()); + } + + @Test + public void isConfigurationValid_saveFalseEmptyDir_valid() { + assertTrue(saveFalseEmptyDir.isConfigurationValid()); + } + + @Test + public void isConfigurationValid_saveFalseNullDir_valid() { + assertTrue(saveFalseNullDir.isConfigurationValid()); + } + + @Test + public void isDirectoryValid_nonExistingDirectory_valid() throws IOException { + File nonExistingDirectory = tempFolder.newFolder("folderPath"); + nonExistingDirectory.delete(); + SaveQueryResults temp = new SaveQueryResults(true, nonExistingDirectory.getAbsolutePath()); + assertTrue(temp.isDirectoryValid()); + } + + @Test + public void isDirectoryValid_existingDirectory_valid() throws IOException { + File existingDirectory = tempFolder.newFolder("folderPath"); + existingDirectory.mkdir(); + SaveQueryResults temp = new SaveQueryResults(true, existingDirectory.getAbsolutePath()); + assertTrue(temp.isDirectoryValid()); + } + + @Test + public void isDirectoryValid_existingFile_nonValid() throws IOException { + File existingFile = tempFolder.newFile("filePath"); + existingFile.createNewFile(); + SaveQueryResults temp = new SaveQueryResults(true, existingFile.getAbsolutePath()); + assertFalse(temp.isDirectoryValid()); + } + + @Test + public void mkdir_saveTrueNonExistingDirectory() throws IOException { + File subDirectory = tempFolder.newFolder("folderPath", "subFolder"); + subDirectory.delete(); + SaveQueryResults temp = new SaveQueryResults(true, subDirectory.getAbsolutePath()); + temp.mkdir(); + assertTrue(subDirectory.isDirectory()); + } + + @Test + public void mkdir_saveTrueExistingDirectory() throws IOException { + File subDirectory = tempFolder.newFolder("folderPath", "subFolder"); + subDirectory.mkdirs(); + SaveQueryResults temp = new SaveQueryResults(true, subDirectory.getAbsolutePath()); + temp.mkdir(); + assertTrue(subDirectory.isDirectory()); + } + + @Test + public void mkdir_saveFalse() throws IOException { + File folder = tempFolder.newFile("validNonExistingFolder"); + folder.delete(); + SaveQueryResults temp = new SaveQueryResults(false, folder.getAbsolutePath()); + temp.mkdir(); + assertFalse(folder.exists()); + } + + @Test + public void isSaveResultsl_saveFalseDefaultDir() { + assertFalse(saveFalseDefaultDir.isSaveResults()); + } + + @Test + public void getOutputQueryResultDirectory_saveFalseDefaultDir() { + assertEquals(SaveQueryResults.DEFAULT_OUTPUT_DIR_NAME, saveFalseDefaultDir.getOutputQueryResultDirectory()); + } + + @Test + public void setSaveResults_and_isSaveResults() { + SaveQueryResults srq = new SaveQueryResults(); + srq.setSaveResults(true); + assertTrue(srq.isSaveResults()); + srq.setSaveResults(false); + assertFalse(srq.isSaveResults()); + } + + @Test + public void setOutputQueryResultDirectory_and_getOutputQueryResultsDirectory() { + SaveQueryResults srq = new SaveQueryResults(); + srq.setOutputQueryResultDirectory(""); + assertEquals("", srq.getOutputQueryResultDirectory()); + srq.setOutputQueryResultDirectory(null); + assertNull(srq.getOutputQueryResultDirectory()); + } + +} diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java new file mode 100644 index 000000000..a1f0bf7f5 --- /dev/null +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/DefaultShellConfigurationTest.java @@ -0,0 +1,313 @@ +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import org.jline.builtins.Completers.TreeCompleter; +import org.jline.reader.Candidate; +import org.jline.reader.LineReader; +import org.jline.reader.ParsedLine; +import org.jline.terminal.Terminal; +import org.jline.utils.AttributedString; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.commands.ClearCommandInterpreter; +import org.semanticweb.rulewerk.commands.ExportCommandInterpreter; +import org.semanticweb.rulewerk.commands.LoadCommandInterpreter; + +public class DefaultShellConfigurationTest { + + @Rule + public TemporaryFolder folder = new TemporaryFolder(new File(".")); + + public static final List SHELL_COMMANDS = Arrays.asList("help", "load", "assert", "retract", "addsource", + "delsource", "setprefix", "clear", + "reason", "query", "export", "showkb", "exit"); + + @Test + public void buildPromptProvider() { + final AttributedString promptProvider = new DefaultShellConfiguration().getDefaultPromptStyle(); + assertEquals("rulewerk> ", promptProvider.toString()); + } + + @Test + public void buildPrompt() { + final Terminal terminal = Mockito.mock(Terminal.class); + Mockito.when(terminal.getType()).thenReturn(Terminal.TYPE_DUMB); + final String string = new DefaultShellConfiguration().buildPrompt(terminal); + assertTrue(string.length() >= 10); + } + + @Test + public void buildCompleterEmptyLine() { + final ArrayList readWords = new ArrayList(); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + final Set expectedCandidates = SHELL_COMMANDS.stream().map(c -> "@" + c).collect(Collectors.toSet()); + assertEquals(expectedCandidates, candidates); + } + + @Test + public void buildCompleterHelp() { + final ArrayList readWords = new ArrayList(); + readWords.add("@help"); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + final Set expectedCandidates = new HashSet(SHELL_COMMANDS); + assertEquals(expectedCandidates, candidates); + } + + @Test + public void buildCompleterLoad_emptyLine() { + final ArrayList readWords = new ArrayList(); + readWords.add("@load"); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + assertFalse(candidates.isEmpty()); + final String tempFolderName = this.folder.getRoot().getName(); + assertTrue(candidates.contains(tempFolderName)); + + assertTrue(candidates.contains(LoadCommandInterpreter.TASK_OWL)); + assertTrue(candidates.contains(LoadCommandInterpreter.TASK_RDF)); + assertTrue(candidates.contains(LoadCommandInterpreter.TASK_RLS)); + } + + @Test + public void buildCompleterLoad_task_OWL() { + final ArrayList readWords = new ArrayList(); + readWords.add("@load"); + readWords.add(LoadCommandInterpreter.TASK_OWL); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + assertFalse(candidates.isEmpty()); + final String tempFolderName = this.folder.getRoot().getName(); + assertTrue(candidates.contains(tempFolderName)); + + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_OWL)); + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_RDF)); + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_RLS)); + } + + @Test + public void buildCompleterLoad_task_RDF() { + final ArrayList readWords = new ArrayList(); + readWords.add("@load"); + readWords.add(LoadCommandInterpreter.TASK_RDF); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + assertFalse(candidates.isEmpty()); + final String tempFolderName = this.folder.getRoot().getName(); + assertTrue(candidates.contains(tempFolderName)); + + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_OWL)); + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_RDF)); + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_RLS)); + } + + @Test + public void buildCompleterLoad_task_RLS() { + final ArrayList readWords = new ArrayList(); + readWords.add("@load"); + readWords.add(LoadCommandInterpreter.TASK_RLS); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + assertFalse(candidates.isEmpty()); + final String tempFolderName = this.folder.getRoot().getName(); + assertTrue(candidates.contains(tempFolderName)); + + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_OWL)); + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_RDF)); + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_RLS)); + } + + @Test + public void buildCompleterLoad_file() { + final ArrayList readWords = new ArrayList(); + readWords.add("@load"); + final String tempFolderName = this.folder.getRoot().getName(); + readWords.add(tempFolderName); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_OWL)); + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_RDF)); + assertFalse(candidates.contains(LoadCommandInterpreter.TASK_RLS)); + } + + @Test + public void buildCompleterExport_emptyLine() { + final ArrayList readWords = new ArrayList(); + readWords.add("@export"); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + final HashSet expectedCandidates = new HashSet<>(); + expectedCandidates.add(ExportCommandInterpreter.TASK_INFERENCES); + expectedCandidates.add(ExportCommandInterpreter.TASK_KB); + + assertEquals(expectedCandidates, candidates); + } + + @Test + public void buildCompleterExport_task_INFERENCES() { + final ArrayList readWords = new ArrayList(); + readWords.add("@export"); + readWords.add(ExportCommandInterpreter.TASK_INFERENCES); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + final String tempFolderName = this.folder.getRoot().getName(); + assertTrue(candidates.contains(tempFolderName)); + + assertFalse(candidates.contains(ExportCommandInterpreter.TASK_INFERENCES)); + assertFalse(candidates.contains(ExportCommandInterpreter.TASK_KB)); + } + + @Test + public void buildCompleterExport_unknown() { + final ArrayList readWords = new ArrayList(); + readWords.add("@export"); + readWords.add("unknown"); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + assertTrue(candidates.isEmpty()); + } + + @Test + public void buildCompleterExport_task_KB() { + final ArrayList readWords = new ArrayList(); + readWords.add("@export"); + readWords.add(ExportCommandInterpreter.TASK_KB); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + final String tempFolderName = this.folder.getRoot().getName(); + assertTrue(candidates.contains(tempFolderName)); + + assertFalse(candidates.contains(ExportCommandInterpreter.TASK_INFERENCES)); + assertFalse(candidates.contains(ExportCommandInterpreter.TASK_KB)); + } + + @Test + public void buildCompleterClear_emptyLine() { + final ArrayList readWords = new ArrayList(); + readWords.add("@clear"); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + + final HashSet expectedCandidates = new HashSet<>(); + expectedCandidates.add(ClearCommandInterpreter.TASK_ALL); + expectedCandidates.add(ClearCommandInterpreter.TASK_FACTS); + expectedCandidates.add(ClearCommandInterpreter.TASK_INFERENCES); + expectedCandidates.add(ClearCommandInterpreter.TASK_PREFIXES); + expectedCandidates.add(ClearCommandInterpreter.TASK_RULES); + expectedCandidates.add(ClearCommandInterpreter.TASK_SOURCES); + + assertEquals(expectedCandidates, candidates); + } + + @Test + public void buildCompleterClear_unknown() { + final ArrayList readWords = new ArrayList(); + readWords.add("@clear"); + readWords.add("unknown"); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + assertTrue(candidates.isEmpty()); + } + + @Test + public void buildCompleterClear_task_ALL() { + final ArrayList readWords = new ArrayList(); + readWords.add("@clear"); + readWords.add(ClearCommandInterpreter.TASK_ALL); + + final Set candidates = this.getCompleterCandidates(readWords, ""); + assertTrue(candidates.isEmpty()); + } + + private Set getCompleterCandidates(final ArrayList readWords, final String wordToComplete) { + final List candidates = new ArrayList<>(); + + final TreeCompleter completer = new DefaultShellConfiguration().buildCompleter(SHELL_COMMANDS); + final LineReader reader = Mockito.mock(LineReader.class); + + final ParsedLine parsedLine = this.makeParsedLine(readWords, wordToComplete); + completer.complete(reader, parsedLine, candidates); + return candidates.stream().map(c -> c.value()).collect(Collectors.toSet()); + } + + + private ParsedLine makeParsedLine(final List readWords, final String wordToComplete) { + final ParsedLine parsedLine = new ParsedLine() { + + @Override + public List words() { + return readWords; + } + + @Override + public int wordIndex() { + return readWords.size(); + } + + @Override + public int wordCursor() { + return this.word().length(); + } + + @Override + public String word() { + return wordToComplete; + } + + @Override + public String line() { + // Only used by PipedlineCompleter + return null; + } + + @Override + public int cursor() { + return this.line().length(); + } + }; + return parsedLine; + } + +} diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClientTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClientTest.java new file mode 100644 index 000000000..5d328751f --- /dev/null +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/InteractiveShellClientTest.java @@ -0,0 +1,82 @@ +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.io.PrintWriter; +import java.io.StringWriter; + +import org.jline.reader.LineReader; +import org.jline.terminal.Terminal; +import org.jline.terminal.impl.DumbTerminal; +import org.junit.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; + +public class InteractiveShellClientTest { + + @Test + public void initializeInterpreter() { + final Terminal terminal = Mockito.mock(Terminal.class); + final PrintWriter writer = Mockito.mock(PrintWriter.class); + Mockito.when(terminal.writer()).thenReturn(writer); + + final InteractiveShellClient interactiveShell = new InteractiveShellClient(); + final Interpreter interpreter = interactiveShell.initializeInterpreter(terminal); + + assertTrue(interpreter.getParserConfiguration() instanceof DefaultParserConfiguration); + assertTrue(interpreter.getKnowledgeBase().getStatements().isEmpty()); + assertEquals(writer, interpreter.getWriter()); + } + + @Test + public void run_mockConfiguration() throws IOException { + final ShellConfiguration configuration = Mockito.mock(ShellConfiguration.class); + final Terminal terminal = Mockito.mock(DumbTerminal.class); + final StringWriter output = new StringWriter(); + final PrintWriter printWriter = new PrintWriter(output); + Mockito.when(terminal.writer()).thenReturn(printWriter); + + final LineReader lineReader = Mockito.mock(LineReader.class); + Mockito.when(lineReader.readLine("prompt")).thenReturn("help", "exit"); + + Mockito.when(configuration.buildTerminal()).thenReturn(terminal); + Mockito.when(configuration.buildPrompt(terminal)).thenReturn("prompt"); + Mockito.when(configuration.buildLineReader(Mockito.eq(terminal), ArgumentMatchers.anyCollection())) + .thenReturn(lineReader); + + final InteractiveShellClient shellClient = new InteractiveShellClient(); + shellClient.launchShell(configuration); + + assertTrue(output.toString().contains("Welcome to the Rulewerk interactive shell.")); + + assertTrue(output.toString().contains("Available commands:")); + + assertTrue(output.toString().contains("Exiting Rulewerk")); + } + + +} diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java new file mode 100644 index 000000000..8c3e81448 --- /dev/null +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTest.java @@ -0,0 +1,327 @@ +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.io.StringWriter; +import java.io.Writer; + +import org.jline.reader.EndOfFileException; +import org.jline.reader.LineReader; +import org.jline.reader.UserInterruptException; +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.commands.CommandExecutionException; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class ShellTest { + + private final String prompt = "myPrompt"; + + @Test + public void processReadLine_blank() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" "); + assertEquals("", processedReadLine); + } + + @Test + public void processReadLine_startsWithAt() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" @ "); + assertEquals("@ .", processedReadLine); + } + + @Test + public void processReadLine_endsWithStop() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" . "); + assertEquals("@.", processedReadLine); + } + + @Test + public void processReadLine_startsWithAtEndsWithStop() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" @. "); + assertEquals("@.", processedReadLine); + } + + @Test + public void processReadLine_doesNotStartWithAt_DoesNotEndWithStop() { + final Shell shell = new Shell(Mockito.mock(Interpreter.class)); + final String processedReadLine = shell.processReadLine(" .@ "); + assertEquals("@.@ .", processedReadLine); + } + + @Test + public void readCommand_blank() throws ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + + final Interpreter interpreterMock = Mockito.mock(Interpreter.class); + final Shell shell = new Shell(interpreterMock); + + Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn(" "); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + assertNull(command); + + Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); + // TODO test exceptions have not been thrown + } + + @Test + public void readCommand_unknown() throws ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + + final StringWriter stringWriter = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(stringWriter); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); + + Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn("unknown"); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + + Mockito.verify(interpreterSpy).parseCommand("@unknown ."); + assertEquals("unknown", command.getName()); + assertTrue(command.getArguments().isEmpty()); + + // TODO test Parsing exception has not been thrown + } + + @Test + public void readCommand_parsingException() throws ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + + final StringWriter stringWriter = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(stringWriter); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); + + Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn("@"); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + + Mockito.verify(interpreterSpy).parseCommand("@ ."); + assertNull(command); + + // TODO test Parsing exception has been thrown + assertTrue(stringWriter.toString().startsWith("Error: failed to parse command")); + } + + @Test + public void readCommand_exit() throws CommandExecutionException, ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + + final StringWriter stringWriter = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(stringWriter); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); + + Mockito.when(lineReaderMock.readLine(this.prompt)).thenReturn("exit"); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + ShellTestUtils.testIsExitCommand(command); + Mockito.verify(interpreterSpy).parseCommand("@exit ."); + + // TODO test Parsing exception has not been thrown + } + + @Test + public void readCommand_interruptRequest_CTRLC_emptyPartialLine() throws ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + final Interpreter interpreterMock = Mockito.mock(Interpreter.class); + final Shell shell = new Shell(interpreterMock); + + Mockito.doThrow(new UserInterruptException("")).when(lineReaderMock).readLine(this.prompt); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + ShellTestUtils.testIsExitCommand(command); + + Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); + } + + @Test + public void readCommand_interruptRequest_CTRLC_nonEmptyPartialLine() throws ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + final Interpreter interpreterMock = Mockito.mock(Interpreter.class); + final Shell shell = new Shell(interpreterMock); + + Mockito.doThrow(new UserInterruptException(" ")).when(lineReaderMock).readLine(this.prompt); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + assertNull(command); + + Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); + } + + @Test + public void readCommand_interruptRequest_CTRLD_emptyPartialLine() throws ParsingException { + final LineReader lineReaderMock = Mockito.mock(LineReader.class); + final Interpreter interpreterMock = Mockito.mock(Interpreter.class); + final Shell shell = new Shell(interpreterMock); + + Mockito.doThrow(EndOfFileException.class).when(lineReaderMock).readLine(this.prompt); + + final Command command = shell.readCommand(lineReaderMock, this.prompt); + ShellTestUtils.testIsExitCommand(command); + + Mockito.verify(interpreterMock, Mockito.never()).parseCommand(Mockito.anyString()); + } + + @Test + public void run_exit() throws CommandExecutionException { + final Writer writer = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(writer); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); + + final LineReader lineReader = Mockito.mock(LineReader.class); + Mockito.when(lineReader.readLine(this.prompt)).thenReturn("exit"); + + shell.run(lineReader, this.prompt); + + assertFalse(shell.isRunning()); + + this.testPrintWelcome(interpreterSpy); + + Mockito.verify(interpreterSpy).runCommand(Mockito.any(Command.class)); + + this.testPrintExit(interpreterSpy); + + final String[] lines = writer.toString().split("\r\n|\r|\n"); + assertEquals(7, lines.length); + } + + @Test + public void run_empty_exit() throws CommandExecutionException { + final Writer writer = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(writer); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); + + final LineReader lineReader = Mockito.mock(LineReader.class); + Mockito.when(lineReader.readLine(this.prompt)).thenReturn("", "exit"); + + shell.run(lineReader, this.prompt); + + assertFalse(shell.isRunning()); + + this.testPrintWelcome(interpreterSpy); + + Mockito.verify(interpreterSpy).runCommand(Mockito.any(Command.class)); + + this.testPrintExit(interpreterSpy); + + final String[] lines = writer.toString().split("\r\n|\r|\n"); + assertEquals(7, lines.length); + } + + @Test + public void run_help_exit() throws CommandExecutionException { + final Writer writer = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(writer); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); + + final LineReader lineReader = Mockito.mock(LineReader.class); + Mockito.when(lineReader.readLine(this.prompt)).thenReturn("help", "exit"); + + shell.run(lineReader, this.prompt); + + assertFalse(shell.isRunning()); + + this.testPrintWelcome(interpreterSpy); + + Mockito.verify(interpreterSpy, Mockito.times(2)).runCommand(Mockito.any(Command.class)); + + this.testPrintExit(interpreterSpy); + + final String[] lines = writer.toString().split("\r\n|\r|\n"); + assertTrue(lines.length > 7); + } + + @Test + public void runCommand_unknown() throws CommandExecutionException { + final Writer writer = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(writer); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); + + final LineReader lineReader = Mockito.mock(LineReader.class); + Mockito.when(lineReader.readLine(this.prompt)).thenReturn("unknown", "exit"); + + final Command command = shell.runCommand(lineReader, this.prompt); + assertNotNull(command); + assertEquals("unknown", command.getName()); + + Mockito.verify(interpreterSpy).runCommand(Mockito.any(Command.class)); + + final String printedResult = writer.toString(); + assertTrue(printedResult.startsWith("Error: ")); + } + + @Test + public void runCommand_exceptionDuringReading() throws CommandExecutionException { + final Writer writer = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(writer); + final Interpreter interpreterSpy = Mockito.spy(interpreter); + final Shell shell = new Shell(interpreterSpy); + + final LineReader lineReader = Mockito.mock(LineReader.class); + final RuntimeException exception = Mockito.mock(RuntimeException.class); + Mockito.when(exception.getMessage()) + .thenReturn("This exception is thrown intentionally as part of a unit test"); + + Mockito.when(lineReader.readLine(this.prompt)).thenThrow(exception); + + final Command command = shell.runCommand(lineReader, this.prompt); + assertNull(command); + + Mockito.verify(interpreterSpy, Mockito.never()).runCommand(Mockito.any(Command.class)); + + final String printedResult = writer.toString(); + assertTrue(printedResult.startsWith("Unexpected error: " + exception.getMessage())); + + Mockito.verify(exception).printStackTrace(); + } + + public void testPrintWelcome(final Interpreter interpreterSpy) { + Mockito.verify(interpreterSpy, Mockito.times(2)).printNormal("\n"); + Mockito.verify(interpreterSpy).printSection("Welcome to the Rulewerk interactive shell.\n"); + Mockito.verify(interpreterSpy).printNormal("For further information, type "); + Mockito.verify(interpreterSpy).printCode("@help."); + Mockito.verify(interpreterSpy).printNormal(" To quit, type "); + Mockito.verify(interpreterSpy).printCode("@exit.\n"); + } + + public void testPrintExit(final Interpreter interpreterSpy) { + Mockito.verify(interpreterSpy).printSection("Exiting Rulewerk shell ... bye.\n\n"); + } + +} diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java new file mode 100644 index 000000000..49d1a7250 --- /dev/null +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/ShellTestUtils.java @@ -0,0 +1,67 @@ +package org.semanticweb.rulewerk.client.shell; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.PrintWriter; +import java.io.Writer; + +import org.jline.terminal.Terminal; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.client.shell.commands.ExitCommandInterpreter; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; + +public final class ShellTestUtils { + + private ShellTestUtils() { + } + + public static Interpreter getMockInterpreter(final Writer writer) { + final Terminal terminalMock = Mockito.mock(Terminal.class); + final PrintWriter printWriter = new PrintWriter(writer); + Mockito.when(terminalMock.writer()).thenReturn(printWriter); + + return getMockInterpreter(terminalMock); + } + + public static Interpreter getMockInterpreter(final Terminal terminal) { + final TerminalStyledPrinter terminalStyledPrinter = new TerminalStyledPrinter(terminal); + + final ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + return new Interpreter(Interpreter.EMPTY_KNOWLEDGE_BASE_PROVIDER, (knowledgeBase) -> { + final Reasoner reasoner = Mockito.mock(Reasoner.class); + Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); + return reasoner; + }, terminalStyledPrinter, parserConfiguration); + } + + public static void testIsExitCommand(final Command command) { + assertEquals(ExitCommandInterpreter.EXIT_COMMAND.getName(), command.getName()); + assertTrue(command.getArguments().isEmpty()); + } + +} diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/TerminalStylePrinterTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/TerminalStylePrinterTest.java new file mode 100644 index 000000000..0f0ff0253 --- /dev/null +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/TerminalStylePrinterTest.java @@ -0,0 +1,83 @@ +package org.semanticweb.rulewerk.client.shell; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.PrintWriter; + +import org.jline.terminal.Terminal; +import org.jline.utils.AttributedString; +import org.jline.utils.AttributedStyle; +import org.junit.Test; +import org.mockito.Mockito; + +public class TerminalStylePrinterTest { + final Terminal terminal; + final PrintWriter writer; + final TerminalStyledPrinter terminalStyledPrinter; + + public static final String TEST_STRING = "test"; + + public TerminalStylePrinterTest() { + this.writer = Mockito.mock(PrintWriter.class); + this.terminal = Mockito.mock(Terminal.class); + Mockito.when(this.terminal.writer()).thenReturn(this.writer); + + this.terminalStyledPrinter = new TerminalStyledPrinter(this.terminal); + + } + + @Test + public void testPrintNormal() { + this.terminalStyledPrinter.printNormal(TEST_STRING); + this.testPrintStyledExpected(AttributedStyle.DEFAULT); + } + + @Test + public void testPrintSection() { + this.terminalStyledPrinter.printSection(TEST_STRING); + this.testPrintStyledExpected(AttributedStyle.DEFAULT.bold()); + } + + @Test + public void testPrintEmph() { + this.terminalStyledPrinter.printEmph(TEST_STRING); + this.testPrintStyledExpected(AttributedStyle.DEFAULT.bold()); + } + + @Test + public void testPrintCode() { + this.terminalStyledPrinter.printCode(TEST_STRING); + this.testPrintStyledExpected(AttributedStyle.DEFAULT.foreground(AttributedStyle.YELLOW).bold()); + } + + @Test + public void testPrintImportant() { + this.terminalStyledPrinter.printImportant(TEST_STRING); + this.testPrintStyledExpected(AttributedStyle.DEFAULT.foreground(AttributedStyle.RED)); + } + + private void testPrintStyledExpected(final AttributedStyle expectedStyle) { + final AttributedString expectedAttributedString = new AttributedString(TEST_STRING, expectedStyle); + Mockito.verify(this.writer).print(expectedAttributedString.toAnsi(this.terminal)); + Mockito.verify(this.writer).flush(); + } + +} diff --git a/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java new file mode 100644 index 000000000..2a2fd0baf --- /dev/null +++ b/rulewerk-client/src/test/java/org/semanticweb/rulewerk/client/shell/commands/ExitCommandInterpreterTest.java @@ -0,0 +1,78 @@ +package org.semanticweb.rulewerk.client.shell.commands; + +import static org.junit.Assert.assertEquals; + +/*- + * #%L + * Rulewerk Client + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertTrue; + +import java.io.StringWriter; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.client.shell.Shell; +import org.semanticweb.rulewerk.client.shell.ShellTestUtils; +import org.semanticweb.rulewerk.commands.CommandExecutionException; +import org.semanticweb.rulewerk.commands.CommandInterpreter; +import org.semanticweb.rulewerk.commands.Interpreter; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class ExitCommandInterpreterTest { + + @Test + public void exitShell_succeeds() throws CommandExecutionException { + final Interpreter interpreterMock = Mockito.mock(Interpreter.class); + final Shell shell = new Shell(interpreterMock); + final Shell shellSpy = Mockito.spy(shell); + final ExitCommandInterpreter commandInterpreter = new ExitCommandInterpreter(shellSpy); + + commandInterpreter.run(Mockito.mock(Command.class), interpreterMock); + + Mockito.verify(shellSpy).exitShell(); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + final Shell shellMock = Mockito.mock(Shell.class); + final ExitCommandInterpreter commandInterpreter = new ExitCommandInterpreter(shellMock); + + final StringWriter writer = new StringWriter(); + final Interpreter interpreter = ShellTestUtils.getMockInterpreter(writer); + + final Interpreter interpreterSpy = Mockito.spy(interpreter); + commandInterpreter.printHelp("commandname", interpreterSpy); + + Mockito.verify(interpreterSpy).printNormal("Usage: @commandname .\n"); + + final String result = writer.toString(); + assertEquals("Usage: @commandname .\n", result); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + final Shell shellMock = Mockito.mock(Shell.class); + final CommandInterpreter commandInterpreter = new ExitCommandInterpreter(shellMock); + final String synopsis = commandInterpreter.getSynopsis(); + assertTrue(synopsis.length() < 70); + } + +} diff --git a/vlog4j-owlapi/LICENSE.txt b/rulewerk-commands/LICENSE.txt similarity index 100% rename from vlog4j-owlapi/LICENSE.txt rename to rulewerk-commands/LICENSE.txt diff --git a/rulewerk-commands/pom.xml b/rulewerk-commands/pom.xml new file mode 100644 index 000000000..9f036e2dd --- /dev/null +++ b/rulewerk-commands/pom.xml @@ -0,0 +1,41 @@ + + + + 4.0.0 + + + org.semanticweb.rulewerk + rulewerk-parent + 0.10.0-SNAPSHOT + + + rulewerk-commands + jar + + Rulewerk command execution support + API for interpreting shell commands to control Rulewerk + + + + ${project.groupId} + rulewerk-core + ${project.version} + + + ${project.groupId} + rulewerk-parser + ${project.version} + + + ${project.groupId} + rulewerk-owlapi + ${project.version} + + + ${project.groupId} + rulewerk-rdf + ${project.version} + + + diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java new file mode 100644 index 000000000..004023a65 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreter.java @@ -0,0 +1,94 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class AddSourceCommandInterpreter implements CommandInterpreter { + + @Override + public void run(final Command command, final Interpreter interpreter) throws CommandExecutionException { + Interpreter.validateArgumentCount(command, 2); + final String predicateDeclaration = Interpreter.extractStringArgument(command, 0, "predicateName[arity]"); + final PositiveLiteral sourceDeclaration = Interpreter.extractPositiveLiteralArgument(command, 1, + "source declaration"); + + final Predicate predicate = extractPredicate(predicateDeclaration); + final DataSource dataSource = extractDataSource(sourceDeclaration, interpreter); + + if (dataSource.getRequiredArity().isPresent()) { + final Integer requiredArity = dataSource.getRequiredArity().get(); + if (predicate.getArity() != requiredArity) { + throw new CommandExecutionException("Invalid arity " + predicate.getArity() + " for data source, " + + "expected " + requiredArity + "."); + } + } + + interpreter.getKnowledgeBase().addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); + } + + @Override + public void printHelp(final String commandName, final Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " []: .\n" + + " [] : the name of the predicate and its arity\n" + + " : a fact specifying a source declaration\n\n" + + "Note that every predicate can have multiple sources.\n"); + } + + @Override + public String getSynopsis() { + return "define a new external data source for a predicate"; + } + + static Predicate extractPredicate(final String predicateDeclaration) throws CommandExecutionException { + String predicateName; + int arity; + try { + final int openBracket = predicateDeclaration.indexOf('['); + final int closeBracket = predicateDeclaration.indexOf(']'); + predicateName = predicateDeclaration.substring(0, openBracket); + final String arityString = predicateDeclaration.substring(openBracket + 1, closeBracket); + arity = Integer.parseInt(arityString); + } catch (IndexOutOfBoundsException | NumberFormatException e) { + throw new CommandExecutionException( + "Predicate declaration must have the format \"predicateName[number]\" but was \"" + + predicateDeclaration + "\"."); + } + return Expressions.makePredicate(predicateName, arity); + } + + static DataSource extractDataSource(final PositiveLiteral sourceDeclaration, final Interpreter interpreter) + throws CommandExecutionException { + try { + return interpreter.getParserConfiguration() + .parseDataSourceSpecificPartOfDataSourceDeclaration(sourceDeclaration); + } catch (final ParsingException e) { + throw new CommandExecutionException("Could not parse source declaration: " + e.getMessage()); + } + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java new file mode 100644 index 000000000..bf194e079 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreter.java @@ -0,0 +1,71 @@ +package org.semanticweb.rulewerk.commands; + +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.Command; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +public class AssertCommandInterpreter implements CommandInterpreter { + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + int factCount = 0; + int ruleCount = 0; + for (Argument argument : command.getArguments()) { + if (argument.fromPositiveLiteral().isPresent()) { + PositiveLiteral literal = argument.fromPositiveLiteral().get(); + Fact fact; + try { + fact = Expressions.makeFact(literal.getPredicate(), literal.getArguments()); + } catch (IllegalArgumentException e) { + throw new CommandExecutionException("Literal " + literal.toString() + " is not a fact.", e); + } + interpreter.getKnowledgeBase().addStatement(fact); + factCount++; + } else if (argument.fromRule().isPresent()) { + interpreter.getKnowledgeBase().addStatement(argument.fromRule().get()); + ruleCount++; + } else { + throw new CommandExecutionException( + "Only facts and rules can be asserted. Encountered " + argument.toString()); + } + } + + interpreter.printNormal("Asserted " + factCount + " fact(s) and " + ruleCount + " rule(s).\n"); + } + + @Override + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " ()+ .\n" + + " fact or rule: statement(s) to be added to the knowledge base\n" + + "Reasoning needs to be invoked after finishing addition of statements.\n"); + } + + @Override + public String getSynopsis() { + return "add facts and rules to the knowledge base"; + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java new file mode 100644 index 000000000..484d90c41 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreter.java @@ -0,0 +1,90 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Rule; + +public class ClearCommandInterpreter implements CommandInterpreter { + + public static final String TASK_ALL = "ALL"; + public static final String TASK_INFERENCES = "INF"; + public static final String TASK_FACTS = "FACTS"; + public static final String TASK_RULES = "RULES"; + public static final String TASK_SOURCES = "DATASOURCES"; + public static final String TASK_PREFIXES = "PREFIXES"; + + @Override + public void run(final Command command, final Interpreter interpreter) throws CommandExecutionException { + Interpreter.validateArgumentCount(command, 1); + final String task = Interpreter.extractNameArgument(command, 0, "task"); + if (TASK_ALL.equals(task)) { + interpreter.clearReasonerAndKnowledgeBase(); + interpreter.printNormal("Knowledge base has been cleared; reasoner has been completely reset.\n"); + } else if (TASK_INFERENCES.equals(task)) { + interpreter.getReasoner().resetReasoner(); + interpreter.printNormal("Reasoner has been reset.\n"); + } else if (TASK_FACTS.equals(task)) { + for (final Fact fact : interpreter.getKnowledgeBase().getFacts()) { + interpreter.getKnowledgeBase().removeStatement(fact); + } + interpreter.printNormal("All facts have been removed from the knowledge base.\n"); + } else if (TASK_RULES.equals(task)) { + for (final Rule rule : interpreter.getKnowledgeBase().getRules()) { + interpreter.getKnowledgeBase().removeStatement(rule); + } + interpreter.printNormal("All rules have been removed from the knowledge base.\n"); + } else if (TASK_SOURCES.equals(task)) { + for (final DataSourceDeclaration dataSourceDeclaration : interpreter.getKnowledgeBase() + .getDataSourceDeclarations()) { + interpreter.getKnowledgeBase().removeStatement(dataSourceDeclaration); + } + interpreter.printNormal("All datasource declarations have been removed from the knowledge base.\n"); + } else if (TASK_PREFIXES.equals(task)) { + interpreter.getKnowledgeBase().getPrefixDeclarationRegistry().clear(); + interpreter.printNormal("All prefixes and the base namespace have been removed from the knowledge base.\n"); + } else { + throw new CommandExecutionException( + "Task \"" + task + "\" not supported; should be one of: " + TASK_ALL + ", " + TASK_INFERENCES + + ", " + TASK_FACTS + ", " + TASK_RULES + ", " + TASK_SOURCES + ", " + TASK_PREFIXES); + } + } + + @Override + public void printHelp(final String commandName, final Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " TASK .\n" // + + " TASK: what to reset, possuble values:\n" // + + " ALL: empty knowledge base and completely reset reasoner\n" // + + " INF: reset reasoner to clear all loaded data and inferences\n" // + + " FACTS: remove all facts from knowledge base\n" // + + " RULES: remove all rules from knowledge base\n" // + + " DATASOURCES: remove all data source declarations from knowledge base\n" // + + " PREFIXES: undeclare all prefixes and base namespace\n"); + } + + @Override + public String getSynopsis() { + return "discards (parts of) the knowledge base or computed inferences"; + } + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/exceptions/VLog4jException.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandExecutionException.java similarity index 58% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/exceptions/VLog4jException.java rename to rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandExecutionException.java index f7d0fca18..9d4fcce4b 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/exceptions/VLog4jException.java +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandExecutionException.java @@ -1,10 +1,10 @@ -package org.semanticweb.vlog4j.core.reasoner.exceptions; +package org.semanticweb.rulewerk.commands; /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,23 +20,20 @@ * #L% */ -/** - * Top-level checked exception for VLog4j system. - * @author Irina Dragoste - * - */ -public class VLog4jException extends Exception { +import org.semanticweb.rulewerk.core.exceptions.RulewerkException; + +public class CommandExecutionException extends RulewerkException { /** - * generated serial version UID + * Generated serial version UID */ - private static final long serialVersionUID = 8305375071519734590L; + private static final long serialVersionUID = 1479091500621334935L; - public VLog4jException(String message, Throwable cause) { + public CommandExecutionException(String message, Throwable cause) { super(message, cause); } - public VLog4jException(String message) { + public CommandExecutionException(String message) { super(message); } diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandInterpreter.java new file mode 100644 index 000000000..ff2c1f154 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/CommandInterpreter.java @@ -0,0 +1,57 @@ +package org.semanticweb.rulewerk.commands; + +import org.semanticweb.rulewerk.core.model.api.Command; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Interface for classes that interpret (execute) specific commands. + * + * @author Markus Kroetzsch + * + */ +public interface CommandInterpreter { + + /** + * Execute the commands in the context of the given reasoner and output stream. + * + * @param command command to be interpreted + * @param interpreter surrounding interpreter that provides the execution + * context + */ + void run(Command command, Interpreter interpreter) throws CommandExecutionException; + + /** + * Prints a text that describes command use and parameters, using the given + * command name. The output should start with a "Usage:" line, followed by + * single-space-indented parameter descriptions, and it should end with a + * newline. + */ + void printHelp(String commandName, Interpreter interpreter); + + /** + * Returns a short line describing the purpose of the command. + * + * @return short command synopsis + */ + String getSynopsis(); + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java new file mode 100644 index 000000000..30455240f --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreter.java @@ -0,0 +1,94 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.io.Writer; + +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.Timer; + +public class ExportCommandInterpreter implements CommandInterpreter { + + public static final String TASK_KB = "KB"; + public static final String TASK_INFERENCES = "INFERENCES"; + + @Override + public void run(final Command command, final Interpreter interpreter) throws CommandExecutionException { + Interpreter.validateArgumentCount(command, 2); + + final String task = Interpreter.extractNameArgument(command, 0, "task"); + final String fileName = Interpreter.extractStringArgument(command, 1, "filename"); + + if (TASK_KB.equals(task)) { + this.exportKb(interpreter, fileName); + } else if (TASK_INFERENCES.equals(task)) { + this.exportInferences(interpreter, fileName); + } else { + throw new CommandExecutionException( + "Unknown task " + task + ". Should be " + TASK_KB + " or " + TASK_INFERENCES); + } + + } + + @Override + public void printHelp(final String commandName, final Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " TASK \"file\" .\n" // + + " TASK: what to export; can be KB or INFERENCES\n" // + + " \"file\": path to export file (suggested extension: .rls), enclosed in quotes\n"); + } + + @Override + public String getSynopsis() { + return "export knowledgebase or inferences to a Rulewerk file"; + } + + private void exportInferences(final Interpreter interpreter, final String fileName) throws CommandExecutionException { + final Timer timer = new Timer("export"); + Correctness correctness; + try (Writer writer = interpreter.getFileWriter(fileName)) { + timer.start(); + correctness = interpreter.getReasoner().writeInferences(writer); + timer.stop(); + } catch (final IOException e) { + throw new CommandExecutionException(e.getMessage(), e); + } + + interpreter.printNormal("Exported all inferences in " + timer.getTotalWallTime() / 1000000 + "ms (" + + timer.getTotalCpuTime() / 1000000 + "ms CPU time)."); + interpreter.printNormal(" This result is " + correctness + ".\n"); + } + + private void exportKb(final Interpreter interpreter, final String fileName) throws CommandExecutionException { + final Timer timer = new Timer("export"); + try (Writer writer = interpreter.getFileWriter(fileName)) { + timer.start(); + interpreter.getKnowledgeBase().writeKnowledgeBase(writer); + timer.stop(); + } catch (final IOException e) { + throw new CommandExecutionException(e.getMessage(), e); + } + interpreter.printNormal("Exported knowledge base in " + timer.getTotalWallTime() / 1000000 + "ms (" + + timer.getTotalCpuTime() / 1000000 + "ms CPU time).\n"); + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java new file mode 100644 index 000000000..7707137e2 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreter.java @@ -0,0 +1,70 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.TermType; + +public class HelpCommandInterpreter implements CommandInterpreter { + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + if (command.getArguments().size() == 0) { + int maxLength = 0; + for (String commandName : interpreter.commandInterpreters.keySet()) { + maxLength = (commandName.length() > maxLength) ? commandName.length() : maxLength; + } + final int padLength = maxLength + 1; + + interpreter.printSection("Available commands:\n"); + interpreter.commandInterpreters.forEach((commandName, commandForName) -> { + interpreter.printCode(" @" + String.format("%1$-" + padLength + "s", commandName)); + interpreter.printNormal(": " + commandForName.getSynopsis() + "\n"); + }); + interpreter.printNormal("\nFor more information on any command, use "); + interpreter.printCode("@" + command.getName() + " [command name].\n"); + } else if (command.getArguments().size() == 1 && command.getArguments().get(0).fromTerm().isPresent() + && command.getArguments().get(0).fromTerm().get().getType() == TermType.ABSTRACT_CONSTANT) { + String helpCommand = command.getArguments().get(0).fromTerm().get().getName(); + if (interpreter.commandInterpreters.containsKey(helpCommand)) { + interpreter.printCode("@" + helpCommand); + interpreter.printNormal(": " + interpreter.commandInterpreters.get(helpCommand).getSynopsis() + "\n"); + interpreter.commandInterpreters.get(helpCommand).printHelp(helpCommand, interpreter); + } else { + interpreter.printNormal("Command '" + helpCommand + "' not known.\n"); + } + } else { + printHelp(command.getName(), interpreter); + } + } + + @Override + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " [command name] .\n" // + + "\t command name: command to get detailed help for\n"); + } + + @Override + public String getSynopsis() { + return "print help on available commands"; + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java new file mode 100644 index 000000000..1be04b71b --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/Interpreter.java @@ -0,0 +1,298 @@ +package org.semanticweb.rulewerk.commands; + +import java.io.BufferedWriter; +import java.io.ByteArrayInputStream; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStreamWriter; +import java.io.Writer; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.nio.charset.StandardCharsets; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map.Entry; +import java.util.Set; + +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Terms; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.LogLevel; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.javacc.JavaCCParser; +import org.semanticweb.rulewerk.parser.javacc.ParseException; +import org.semanticweb.rulewerk.parser.javacc.TokenMgrError; + +public class Interpreter implements AutoCloseable { + + @FunctionalInterface + public interface ReasonerProvider { + public Reasoner reasoner(KnowledgeBase knowledgeBase); + } + + @FunctionalInterface + public interface KnowledgeBaseProvider { + public KnowledgeBase knowledgeBase(); + } + + final public static KnowledgeBaseProvider EMPTY_KNOWLEDGE_BASE_PROVIDER = new KnowledgeBaseProvider() { + @Override + public KnowledgeBase knowledgeBase() { + return new KnowledgeBase(); + } + }; + + final ReasonerProvider reasonerProvider; + final KnowledgeBaseProvider knowledgeBaseProvider; + + Reasoner reasoner = null; + final StyledPrinter printer; + final ParserConfiguration parserConfiguration; + + final LinkedHashMap commandInterpreters = new LinkedHashMap<>(); + + public Interpreter(final KnowledgeBaseProvider knowledgeBaseProvider, final ReasonerProvider reasonerProvider, + final StyledPrinter printer, final ParserConfiguration parserConfiguration) { + this.knowledgeBaseProvider = knowledgeBaseProvider; + this.reasonerProvider = reasonerProvider; + this.clearReasonerAndKnowledgeBase(); + this.printer = printer; + this.parserConfiguration = parserConfiguration; + this.registerDefaultCommandInterpreters(); + } + + public void registerCommandInterpreter(final String command, final CommandInterpreter commandInterpreter) { + this.commandInterpreters.put(command, commandInterpreter); + } + + public Set getRegisteredCommands() { + return this.commandInterpreters.keySet(); + } + + public void runCommands(final List commands) throws CommandExecutionException { + for (final Command command : commands) { + this.runCommand(command); + } + } + + public void runCommand(final Command command) throws CommandExecutionException { + if (this.commandInterpreters.containsKey(command.getName())) { + try { + this.commandInterpreters.get(command.getName()).run(command, this); + } catch (final Exception e) { + throw new CommandExecutionException(e.getMessage(), e); + } + } else { + throw new CommandExecutionException("Unknown command '" + command.getName() + "'"); + } + } + + public Command parseCommand(final String commandString) throws ParsingException { + final InputStream inputStream = new ByteArrayInputStream(commandString.getBytes(StandardCharsets.UTF_8)); + final JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); + localParser.setParserConfiguration(this.parserConfiguration); + + // Copy prefixes from KB: + try { + localParser.getPrefixDeclarationRegistry().setBaseIri(this.reasoner.getKnowledgeBase().getBaseIri()); + for (final Entry prefix : this.reasoner.getKnowledgeBase().getPrefixDeclarationRegistry()) { + localParser.getPrefixDeclarationRegistry().setPrefixIri(prefix.getKey(), prefix.getValue()); + } + } catch (final PrefixDeclarationException e) { // unlikely! + throw new RuntimeException(e); + } + + Command result; + try { + result = localParser.command(); + localParser.ensureEndOfInput(); + } catch (ParseException | PrefixDeclarationException | TokenMgrError | RuntimeException e) { + throw new ParsingException("failed to parse command \"\"\"" + commandString + "\"\"\"", e); + } + return result; + } + + public Reasoner getReasoner() { + return this.reasoner; + } + + public KnowledgeBase getKnowledgeBase() { + return this.reasoner.getKnowledgeBase(); + } + + public ParserConfiguration getParserConfiguration() { + return this.parserConfiguration; + } + + public Writer getWriter() { + return this.printer.getWriter(); + } + + public void printNormal(final String string) { + this.printer.printNormal(string); + } + + public void printSection(final String string) { + this.printer.printSection(string); + } + + public void printEmph(final String string) { + this.printer.printEmph(string); + } + + public void printCode(final String string) { + this.printer.printCode(string); + } + + public void printImportant(final String string) { + this.printer.printImportant(string); + } + + private void registerDefaultCommandInterpreters() { + this.registerCommandInterpreter("help", new HelpCommandInterpreter()); + this.registerCommandInterpreter("load", new LoadCommandInterpreter()); + this.registerCommandInterpreter("assert", new AssertCommandInterpreter()); + this.registerCommandInterpreter("retract", new RetractCommandInterpreter()); + this.registerCommandInterpreter("addsource", new AddSourceCommandInterpreter()); + this.registerCommandInterpreter("delsource", new RemoveSourceCommandInterpreter()); + this.registerCommandInterpreter("setprefix", new SetPrefixCommandInterpreter()); + this.registerCommandInterpreter("clear", new ClearCommandInterpreter()); + this.registerCommandInterpreter("reason", new ReasonCommandInterpreter()); + this.registerCommandInterpreter("query", new QueryCommandInterpreter()); + this.registerCommandInterpreter("export", new ExportCommandInterpreter()); + this.registerCommandInterpreter("showkb", new ShowKbCommandInterpreter()); + } + + /** + * Validate that the correct number of arguments was passed to a command. + * + * @param command Command to validate + * @param number expected number of parameters + * @throws CommandExecutionException if the number is not correct + */ + public static void validateArgumentCount(final Command command, final int number) throws CommandExecutionException { + if (command.getArguments().size() != number) { + throw new CommandExecutionException("This command requires exactly " + number + " argument(s), but " + + command.getArguments().size() + " were given."); + } + } + + private static CommandExecutionException getArgumentTypeError(final int index, final String expectedType, + final String parameterName) { + return new CommandExecutionException( + "Argument at position " + index + " needs to be of type " + expectedType + " (" + parameterName + ")."); + } + + public static String extractStringArgument(final Command command, final int index, final String parameterName) + throws CommandExecutionException { + try { + return Terms.extractString(command.getArguments().get(index).fromTerm() + .orElseThrow(() -> getArgumentTypeError(index, "string", parameterName))); + } catch (final IllegalArgumentException | IndexOutOfBoundsException e) { + throw getArgumentTypeError(index, "string", parameterName); + } + } + + public static String extractNameArgument(final Command command, final int index, final String parameterName) + throws CommandExecutionException { + try { + return Terms.extractName(command.getArguments().get(index).fromTerm() + .orElseThrow(() -> getArgumentTypeError(index, "constant", parameterName))); + } catch (final IllegalArgumentException | IndexOutOfBoundsException e) { + throw getArgumentTypeError(index, "constant", parameterName); + } + } + + public static PositiveLiteral extractPositiveLiteralArgument(final Command command, final int index, + final String parameterName) throws CommandExecutionException { + try { + return command.getArguments().get(index).fromPositiveLiteral() + .orElseThrow(() -> getArgumentTypeError(index, "literal", parameterName)); + } catch (final IndexOutOfBoundsException e) { + throw getArgumentTypeError(index, "constant", parameterName); + } + } + + /** + * Returns a Writer to write to the specified file. + * + * @param fileName + * @return + * @throws FileNotFoundException + */ + public Writer getFileWriter(final String fileName) throws FileNotFoundException { + return new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), StandardCharsets.UTF_8)); + } + + /** + * Returns an InputStream to read from the specified file. + * + * @param fileName + * @return + * @throws FileNotFoundException + */ + public InputStream getFileInputStream(final String fileName) throws FileNotFoundException { + return new FileInputStream(fileName); + } + + /** + * Completely resets the reasoner and knowledge base. All inferences and + * statements are cleared. + */ + public void clearReasonerAndKnowledgeBase() { + this.closeReasoner(); + this.reasoner = this.reasonerProvider.reasoner(this.knowledgeBaseProvider.knowledgeBase()); + this.reasoner.setLogLevel(LogLevel.ERROR); + try { + this.reasoner.reason(); + } catch (final IOException e) { + throw new RulewerkRuntimeException("Failed to initialise reasoner: " + e.getMessage(), e); + } + } + + /** + * Frees all resources, especially those associated with reasoning. + */ + @Override + public void close() { + this.closeReasoner(); + } + + /** + * Closes and discards the internal {@link Reasoner}. + */ + private void closeReasoner() { + if (this.reasoner != null) { + this.reasoner.close(); + this.reasoner = null; + } + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java new file mode 100644 index 000000000..c8a2950c6 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreter.java @@ -0,0 +1,226 @@ +package org.semanticweb.rulewerk.commands; + +import java.io.File; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; + +import org.openrdf.model.Model; +import org.openrdf.model.impl.LinkedHashModel; +import org.openrdf.rio.RDFFormat; +import org.openrdf.rio.RDFHandlerException; +import org.openrdf.rio.RDFParseException; +import org.openrdf.rio.RDFParser; +import org.openrdf.rio.Rio; +import org.openrdf.rio.helpers.StatementCollector; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyCreationException; +import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.TermType; +import org.semanticweb.rulewerk.owlapi.OwlToRulesConverter; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.rdf.RdfModelConverter; + +/** + * Interpreter for the load command. + * + * @author Markus Kroetzsch + * + */ +public class LoadCommandInterpreter implements CommandInterpreter { + + public static final String TASK_RLS = "RULES"; + public static final String TASK_OWL = "OWL"; + public static final String TASK_RDF = "RDF"; + + static final String PREDICATE_ABOX = "ABOX"; + + @Override + public void run(final Command command, final Interpreter interpreter) throws CommandExecutionException { + String task; + int pos = 0; + if (command.getArguments().size() > 0 && command.getArguments().get(0).fromTerm().isPresent() + && command.getArguments().get(0).fromTerm().get().getType() == TermType.ABSTRACT_CONSTANT) { + task = Interpreter.extractNameArgument(command, 0, "task"); + pos++; + } else { + task = TASK_RLS; + } + + final String fileName = Interpreter.extractStringArgument(command, pos, "filename"); + pos++; + + String rdfTriplePredicate = RdfModelConverter.RDF_TRIPLE_PREDICATE_NAME; + if (TASK_RDF.equals(task) && command.getArguments().size() > pos) { + if (command.getArguments().get(pos).fromTerm().isPresent() + && command.getArguments().get(pos).fromTerm().get().getType() == TermType.ABSTRACT_CONSTANT) { + rdfTriplePredicate = command.getArguments().get(pos).fromTerm().get().getName(); + if (PREDICATE_ABOX.equals(rdfTriplePredicate)) { // ABox-style import + rdfTriplePredicate = null; + } + pos++; + } else { + throw new CommandExecutionException("Optional triple predicate name must be an IRI."); + } + } + + Interpreter.validateArgumentCount(command, pos); + + final int countRulesBefore = interpreter.getKnowledgeBase().getRules().size(); + final int countFactsBefore = interpreter.getKnowledgeBase().getFacts().size(); + final int countDataSourceDeclarationsBefore = interpreter.getKnowledgeBase().getDataSourceDeclarations().size(); + + if (TASK_RLS.equals(task)) { + this.loadKb(interpreter, fileName); + } else if (TASK_OWL.equals(task)) { + this.loadOwl(interpreter, fileName); + } else if (TASK_RDF.equals(task)) { + this.loadRdf(interpreter, fileName, rdfTriplePredicate); + } else { + throw new CommandExecutionException( + "Unknown task " + task + ". Should be one of " + TASK_RLS + ", " + TASK_OWL + ", " + TASK_RDF); + } + + interpreter.printNormal("Loaded " + (interpreter.getKnowledgeBase().getFacts().size() - countFactsBefore) + + " new fact(s), " + (interpreter.getKnowledgeBase().getRules().size() - countRulesBefore) + + " new rule(s), and " + (interpreter.getKnowledgeBase().getDataSourceDeclarations().size() + - countDataSourceDeclarationsBefore) + + " new datasource declaration(s).\n"); + + } + + private void loadKb(final Interpreter interpreter, final String fileName) throws CommandExecutionException { + try { + final InputStream inputStream = interpreter.getFileInputStream(fileName); + final File file = new File(fileName); + final ParserConfiguration parserConfiguration = new DefaultParserConfiguration() + .setImportBasePath(file.getParent()); + RuleParser.parseInto(interpreter.getKnowledgeBase(), inputStream, parserConfiguration); + } catch (final FileNotFoundException e) { + throw new CommandExecutionException(e.getMessage(), e); + } catch (final ParsingException e) { + throw new CommandExecutionException("Failed to parse Rulewerk file: " + e.getMessage(), e); + } + } + + private void loadOwl(final Interpreter interpreter, final String fileName) throws CommandExecutionException { + final OWLOntologyManager ontologyManager = OWLManager.createOWLOntologyManager(); + OWLOntology ontology; + try { + ontology = ontologyManager.loadOntologyFromOntologyDocument(new File(fileName)); + } catch (final OWLOntologyCreationException e) { + throw new CommandExecutionException("Problem loading OWL ontology: " + e.getMessage(), e); + } + interpreter.printNormal( + "Found OWL ontology with " + ontology.getLogicalAxiomCount() + " logical OWL axioms ...\n"); + + final OwlToRulesConverter owlToRulesConverter = new OwlToRulesConverter(false); + owlToRulesConverter.addOntology(ontology); + if (owlToRulesConverter.getUnsupportedAxiomsCount() > 0) { + interpreter.printImportant("Warning: Some OWL axioms could not be converted to rules.\n"); + owlToRulesConverter.getUnsupportedAxiomsSample() + .forEach((owlAxiom) -> interpreter.printNormal(owlAxiom.toString() + "\n")); + if (owlToRulesConverter.getUnsupportedAxiomsSample().size() < owlToRulesConverter + .getUnsupportedAxiomsCount()) { + interpreter.printNormal("...\n"); + } + interpreter.printNormal("Encountered " + owlToRulesConverter.getUnsupportedAxiomsCount() + + " unsupported logical axioms in total.\n"); + } + + interpreter.getKnowledgeBase().addStatements(owlToRulesConverter.getRules()); + interpreter.getKnowledgeBase().addStatements(owlToRulesConverter.getFacts()); + } + + private void loadRdf(final Interpreter interpreter, final String fileName, final String triplePredicateName) + throws CommandExecutionException { + try { + final String baseIri = new File(fileName).toURI().toString(); + + final Iterator formatsToTry = Arrays + .asList(RDFFormat.NTRIPLES, RDFFormat.TURTLE, RDFFormat.RDFXML).iterator(); + Model model = null; + final List parseErrors = new ArrayList<>(); + while (model == null && formatsToTry.hasNext()) { + final RDFFormat rdfFormat = formatsToTry.next(); + try { + final InputStream inputStream = interpreter.getFileInputStream(fileName); + model = this.parseRdfFromStream(inputStream, rdfFormat, baseIri); + interpreter.printNormal("Found RDF document in format " + rdfFormat.getName() + " ...\n"); + } catch (RDFParseException | RDFHandlerException e) { + parseErrors.add("Failed to parse as " + rdfFormat.getName() + ": " + e.getMessage()); + } + } + if (model == null) { + String message = "Failed to parse RDF input:"; + for (final String error : parseErrors) { + message += "\n " + error; + } + throw new CommandExecutionException(message); + } + + final RdfModelConverter rdfModelConverter = new RdfModelConverter(true, triplePredicateName); + rdfModelConverter.addAll(interpreter.getKnowledgeBase(), model); + } catch (final IOException e) { + throw new CommandExecutionException("Could not read input: " + e.getMessage(), e); + } + } + + private Model parseRdfFromStream(final InputStream inputStream, final RDFFormat rdfFormat, final String baseIri) + throws RDFParseException, RDFHandlerException, IOException { + final Model model = new LinkedHashModel(); + final RDFParser rdfParser = Rio.createParser(rdfFormat); + rdfParser.setRDFHandler(new StatementCollector(model)); + rdfParser.parse(inputStream, baseIri); + return model; + } + + @Override + public void printHelp(final String commandName, final Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " [TASK] \"file\" [RDF predicate] .\n" // + + " TASK: optional; one of RULES (default), OWL, RDF:\n" // + + " RULES to load a knowledge base in Rulewerk rls format\n" // + + " OWL to load an OWL ontology and convert it to facts and rules\n" // + + " RDF to load an RDF document and convert it to facts\n" // + + " \"file\": path to the file to load, enclosed in quotes\n" // + + " RDF predicate: optional name of the predicate used for loading RDF\n" // + + " triples (default: TRIPLE); use ABOX to load triples\n" // + + " like OWL assertions, using unary and binary predicates\n"); + } + + @Override + public String getSynopsis() { + return "load a knowledge base from file (in Rulewerk format, OWL, or RDF)"; + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java new file mode 100644 index 000000000..35e53a67a --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreter.java @@ -0,0 +1,181 @@ +package org.semanticweb.rulewerk.commands; + +import java.io.IOException; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.List; + +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Terms; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.LiteralQueryResultPrinter; +import org.semanticweb.rulewerk.core.reasoner.QueryAnswerCount; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Timer; + +public class QueryCommandInterpreter implements CommandInterpreter { + + public static Term KEYWORD_LIMIT = Expressions.makeAbstractConstant("LIMIT"); + public static Term KEYWORD_COUNT = Expressions.makeAbstractConstant("COUNT"); + public static Term KEYWORD_TOFILE = Expressions.makeAbstractConstant("EXPORTCSV"); + + private PositiveLiteral queryLiteral; + private int limit; + private boolean doCount; + private String csvFile; + + @Override + public void run(final Command command, final Interpreter interpreter) throws CommandExecutionException { + this.processArguments(command.getArguments()); + + if (this.doCount) { + this.printCountQueryResults(interpreter); + } else if (this.csvFile == null) { + this.printQueryResults(interpreter); + } else { + this.exportQueryResults(interpreter); + } + } + + @Override + public void printHelp(final String commandName, final Interpreter interpreter) { + interpreter.printNormal( + "Usage: @" + commandName + " [COUNT] [LIMIT ] [EXPORTCSV <\"file\">] .\n" + + " query literal: positive literal, possibly with ?queryVariables\n" + + " limit: maximal number of results to be shown\n" + + " \"file\": path to CSV file for exporting query results, enclosed in quotes\n"); + } + + @Override + public String getSynopsis() { + return "print or export query results"; + } + + private void processArguments(final List arguments) throws CommandExecutionException { + int pos = 0; + this.limit = -1; + this.doCount = false; + this.csvFile = null; + + if (arguments.size() > 0 && KEYWORD_COUNT.equals(arguments.get(0).fromTerm().orElse(null))) { + this.doCount = true; + pos++; + } + + if (arguments.size() > pos && arguments.get(pos).fromPositiveLiteral().isPresent()) { + this.queryLiteral = arguments.get(pos).fromPositiveLiteral().get(); + pos++; + } else { + throw new CommandExecutionException("A query literal must be given."); + } + + while (arguments.size() > pos) { + if (arguments.size() > pos + 1 && KEYWORD_LIMIT.equals(arguments.get(pos).fromTerm().orElse(null)) + && arguments.get(pos + 1).fromTerm().isPresent()) { + try { + this.limit = Terms.extractInt(arguments.get(pos + 1).fromTerm().get()); + pos += 2; + } catch (final IllegalArgumentException e) { + throw new CommandExecutionException( + "Invalid limit given: " + arguments.get(pos + 1).fromTerm().get()); + } + } else if (arguments.size() > pos + 1 && KEYWORD_TOFILE.equals(arguments.get(pos).fromTerm().orElse(null)) + && arguments.get(pos + 1).fromTerm().isPresent()) { + try { + this.csvFile = Terms.extractString(arguments.get(pos + 1).fromTerm().get()); + pos += 2; + } catch (final IllegalArgumentException e) { + throw new CommandExecutionException( + "Invalid filename given: " + arguments.get(pos + 1).fromTerm().get()); + } + } else { + throw new CommandExecutionException("Unrecognized arguments"); + } + } + } + + private void printCountQueryResults(final Interpreter interpreter) throws CommandExecutionException { + if (this.limit != -1) { + throw new CommandExecutionException("LIMIT not supported with COUNT"); + } + if (this.csvFile != null) { + throw new CommandExecutionException("COUNT results cannot be exported to CSV"); + } + + final Timer timer = new Timer("query"); + timer.start(); + final QueryAnswerCount count = interpreter.getReasoner().countQueryAnswers(this.queryLiteral); + timer.stop(); + + interpreter.printNormal(String.valueOf(count.getCount()) + "\n"); + interpreter.printNormal("Answered in " + timer.getTotalCpuTime() / 1000000 + "ms."); + interpreter.printNormal(" This result is " + count.getCorrectness() + ".\n"); + } + + private void printQueryResults(final Interpreter interpreter) throws CommandExecutionException { + final LiteralQueryResultPrinter printer = new LiteralQueryResultPrinter(this.queryLiteral, interpreter.getWriter(), + interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); + + final Timer timer = new Timer("query"); + timer.start(); + try (final QueryResultIterator answers = interpreter.getReasoner().answerQuery(this.queryLiteral, true)) { + while (printer.getResultCount() != this.limit && answers.hasNext()) { + printer.write(answers.next()); + } + timer.stop(); + + if (printer.isBooleanQuery()) { + interpreter.printEmph(printer.hadResults() ? "true\n" : "false\n"); + interpreter.printNormal("Answered in " + timer.getTotalCpuTime() / 1000000 + "ms."); + } else { + interpreter.printNormal( + printer.getResultCount() + " result(s) in " + timer.getTotalCpuTime() / 1000000 + "ms."); + } + interpreter.printNormal(" Results are " + answers.getCorrectness() + ".\n"); + } catch (final IOException e) { + throw new CommandExecutionException(e.getMessage(), e); + } + } + + private void exportQueryResults(final Interpreter interpreter) throws CommandExecutionException { + if (this.limit != -1) { + throw new CommandExecutionException("LIMIT not supported for CSV export"); + } + + final Timer timer = new Timer("query"); + timer.start(); + Correctness correctness; + try { + correctness = interpreter.getReasoner().exportQueryAnswersToCsv(this.queryLiteral, this.csvFile, true); + } catch (final IOException e) { + throw new CommandExecutionException(e.getMessage(), e); + } + timer.stop(); + + interpreter.printNormal("Written query result file in " + timer.getTotalCpuTime() / 1000000 + "ms."); + interpreter.printNormal(" This result is " + correctness + ".\n"); + } +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java new file mode 100644 index 000000000..6d56aeee5 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreter.java @@ -0,0 +1,61 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.reasoner.Timer; + +public class ReasonCommandInterpreter implements CommandInterpreter { + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + + if (command.getArguments().size() > 0) { + throw new CommandExecutionException("This command supports no arguments."); + } + + interpreter.printNormal("Loading and materializing inferences ...\n"); + + Timer timer = new Timer("reasoning"); + timer.start(); + try { + interpreter.getReasoner().reason(); + } catch (IOException e) { + throw new CommandExecutionException(e.getMessage(), e); + } + timer.stop(); + interpreter.printNormal("... finished in " + timer.getTotalWallTime() / 1000000 + "ms (" + + timer.getTotalCpuTime() / 1000000 + "ms CPU time).\n"); + } + + @Override + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " .\n"); + } + + @Override + public String getSynopsis() { + return "load data and compute conclusions from knowledge base"; + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java new file mode 100644 index 000000000..c835bd635 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreter.java @@ -0,0 +1,81 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; + +public class RemoveSourceCommandInterpreter implements CommandInterpreter { + + @Override + public void run(final Command command, final Interpreter interpreter) throws CommandExecutionException { + if (command.getArguments().size() == 0 || command.getArguments().size() > 2) { + throw new CommandExecutionException("This command requires one or two arguments."); + } + + final String predicateDeclaration = Interpreter.extractStringArgument(command, 0, "predicateName[arity]"); + final Predicate predicate = AddSourceCommandInterpreter.extractPredicate(predicateDeclaration); + DataSource dataSource = null; + if (command.getArguments().size() == 2) { + final PositiveLiteral sourceDeclaration = Interpreter.extractPositiveLiteralArgument(command, 1, + "source declaration"); + dataSource = AddSourceCommandInterpreter.extractDataSource(sourceDeclaration, interpreter); + } + + if (dataSource != null) { + final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); + if (interpreter.getKnowledgeBase().removeStatement(dataSourceDeclaration) > 0) { + interpreter.printNormal("Removed specified data source declaration.\n"); + } else { + interpreter.printNormal("Specified data source declaration not found in knowledge base.\n"); + } + } else { + int count = 0; + for (final DataSourceDeclaration dataSourceDeclaration : interpreter.getKnowledgeBase() + .getDataSourceDeclarations()) { + if (dataSourceDeclaration.getPredicate().equals(predicate)) { + interpreter.getKnowledgeBase().removeStatement(dataSourceDeclaration); + count++; + } + } + interpreter.printNormal("Removed " + count + " matching data source declaration(s).\n"); + } + + } + + @Override + public void printHelp(final String commandName, final Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " []: .\n" + + " [] : the name of the predicate and its arity\n" + + " (optional): a fact specifying a source declaration\n\n" + + "Note that every predicate can have multiple sources.\n"); + } + + @Override + public String getSynopsis() { + return "remove one or all external data sources for a predicate"; + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java new file mode 100644 index 000000000..6add109ca --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreter.java @@ -0,0 +1,75 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +public class RetractCommandInterpreter implements CommandInterpreter { + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + int factCount = 0; + int ruleCount = 0; + for (Argument argument : command.getArguments()) { + if (argument.fromPositiveLiteral().isPresent()) { + PositiveLiteral literal = argument.fromPositiveLiteral().get(); + Fact fact; + try { + fact = Expressions.makeFact(literal.getPredicate(), literal.getArguments()); + } catch (IllegalArgumentException e) { + throw new CommandExecutionException("Literal " + literal.toString() + " is not a fact.", e); + } + factCount += interpreter.getKnowledgeBase().removeStatement(fact); + } else if (argument.fromRule().isPresent()) { + ruleCount += interpreter.getKnowledgeBase().removeStatement(argument.fromRule().get()); + } else { // implies argument.fromTerm().isPresent() + String predicateDeclaration = Interpreter.extractStringArgument(command, 0, "predicateName[arity]"); + Predicate predicate = AddSourceCommandInterpreter.extractPredicate(predicateDeclaration); + for (Fact fact : interpreter.getKnowledgeBase().getFacts()) { + if (predicate.equals(fact.getPredicate())) { + factCount += interpreter.getKnowledgeBase().removeStatement(fact); + } + } + } + } + + interpreter.printNormal("Retracted " + factCount + " fact(s) and " + ruleCount + " rule(s).\n"); + } + + @Override + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " ()+ .\n" + + " fact or rule: statement(s) to be removed from the knowledge base, or a predicate declaration\n" + + " of the form name[arity] to remove all facts for that predicate.\n" + + "Reasoning needs to be invoked after finishing the removal of statements.\n"); + } + + @Override + public String getSynopsis() { + return "remove facts and rules to the knowledge base"; + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java new file mode 100644 index 000000000..583621c68 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreter.java @@ -0,0 +1,52 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.Command; + +public class SetPrefixCommandInterpreter implements CommandInterpreter { + + @Override + public void run(final Command command, final Interpreter interpreter) throws CommandExecutionException { + Interpreter.validateArgumentCount(command, 2); + final String prefixName = Interpreter.extractStringArgument(command, 0, "prefix name"); + final String prefixIri = Interpreter.extractNameArgument(command, 1, "prefix IRI"); + + interpreter.getKnowledgeBase().getPrefixDeclarationRegistry().unsetPrefix(prefixName); + try { + interpreter.getKnowledgeBase().getPrefixDeclarationRegistry().setPrefixIri(prefixName, prefixIri); + } catch (final PrefixDeclarationException e) { // practically impossible + throw new CommandExecutionException("Setting prefix failed: " + e.getMessage()); + } + } + + @Override + public void printHelp(final String commandName, final Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " : .\n"); + } + + @Override + public String getSynopsis() { + return "set a prefix to abbreviate long IRIs (only affects future inputs)"; + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java new file mode 100644 index 000000000..138f3ca48 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreter.java @@ -0,0 +1,49 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.semanticweb.rulewerk.core.model.api.Command; + +public class ShowKbCommandInterpreter implements CommandInterpreter { + + @Override + public void run(Command command, Interpreter interpreter) throws CommandExecutionException { + Interpreter.validateArgumentCount(command, 0); + try { + interpreter.getKnowledgeBase().writeKnowledgeBase(interpreter.getWriter()); + } catch (IOException e) { + throw new CommandExecutionException(e.getMessage(), e); + } + } + + @Override + public void printHelp(String commandName, Interpreter interpreter) { + interpreter.printNormal("Usage: @" + commandName + " .\n"); + } + + @Override + public String getSynopsis() { + return "displays the content of the knowledge base"; + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SimpleStyledPrinter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SimpleStyledPrinter.java new file mode 100644 index 000000000..1ba22dfe0 --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/SimpleStyledPrinter.java @@ -0,0 +1,80 @@ +package org.semanticweb.rulewerk.commands; + +import java.io.IOException; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.Writer; + +/** + * Simple implementation of {@link StyledPrinter} based on an arbitrary + * PrintWriter without any styling. + * + * @author Markus Kroetzsch + * + */ +public class SimpleStyledPrinter implements StyledPrinter { + + final Writer writer; + + public SimpleStyledPrinter(final Writer writer) { + this.writer = writer; + } + + @Override + public void printNormal(String string) { + write(string); + } + + @Override + public void printSection(String string) { + write(string); + } + + @Override + public void printEmph(String string) { + write(string); + } + + @Override + public void printCode(String string) { + write(string); + } + + @Override + public void printImportant(String string) { + write(string); + } + + @Override + public Writer getWriter() { + return writer; + } + + private void write(String string) { + try { + writer.write(string); + } catch (IOException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + +} diff --git a/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java new file mode 100644 index 000000000..ebaf2867d --- /dev/null +++ b/rulewerk-commands/src/main/java/org/semanticweb/rulewerk/commands/StyledPrinter.java @@ -0,0 +1,49 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.Writer; + +/** + * Interface for printing given Strings to a writer using different styles. + * + * @author Irina Dragoste + * + */ +public interface StyledPrinter { + + void printNormal(String string); + + void printSection(String string); + + void printEmph(String string); + + void printCode(String string); + + void printImportant(String string); + + /** + * + * @return the writer to print to + */ + Writer getWriter(); + +} diff --git a/rulewerk-commands/src/test/data/loadtest-fails.owl b/rulewerk-commands/src/test/data/loadtest-fails.owl new file mode 100644 index 000000000..3c8a426bb --- /dev/null +++ b/rulewerk-commands/src/test/data/loadtest-fails.owl @@ -0,0 +1 @@ +this is not a valid OWL file diff --git a/rulewerk-commands/src/test/data/loadtest-unsupported.owl b/rulewerk-commands/src/test/data/loadtest-unsupported.owl new file mode 100644 index 000000000..405a2c4a5 --- /dev/null +++ b/rulewerk-commands/src/test/data/loadtest-unsupported.owl @@ -0,0 +1,5 @@ +@prefix : . +@prefix owl: . + +:a a owl:NamedIndividual, :C . +:p a owl:ObjectProperty, owl:InverseFunctionalProperty . \ No newline at end of file diff --git a/rulewerk-commands/src/test/data/loadtest.nt b/rulewerk-commands/src/test/data/loadtest.nt new file mode 100644 index 000000000..89536774b --- /dev/null +++ b/rulewerk-commands/src/test/data/loadtest.nt @@ -0,0 +1 @@ + . diff --git a/rulewerk-commands/src/test/data/loadtest.owl b/rulewerk-commands/src/test/data/loadtest.owl new file mode 100644 index 000000000..9b9d9f270 --- /dev/null +++ b/rulewerk-commands/src/test/data/loadtest.owl @@ -0,0 +1,4 @@ +@prefix : . +@prefix owl: . + +:a a owl:NamedIndividual, :C . diff --git a/rulewerk-commands/src/test/data/loadtest.rdf b/rulewerk-commands/src/test/data/loadtest.rdf new file mode 100644 index 000000000..affae3f94 --- /dev/null +++ b/rulewerk-commands/src/test/data/loadtest.rdf @@ -0,0 +1,9 @@ + + + + + + + + diff --git a/rulewerk-commands/src/test/data/loadtest.ttl b/rulewerk-commands/src/test/data/loadtest.ttl new file mode 100644 index 000000000..3fbe612de --- /dev/null +++ b/rulewerk-commands/src/test/data/loadtest.ttl @@ -0,0 +1,3 @@ +@prefix : . + +:a :b :c . diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreterTest.java new file mode 100644 index 000000000..e56c4ea3d --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/AddSourceCommandInterpreterTest.java @@ -0,0 +1,120 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.StringWriter; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class AddSourceCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter + .parseCommand("@addsource p[1] : sparql(, \"?x\", \"?x

\") ."); + interpreter.runCommand(command); + List facts = interpreter.getKnowledgeBase().getFacts(); + List rules = interpreter.getKnowledgeBase().getRules(); + List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); + + assertEquals("addsource", command.getName()); + assertEquals(2, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromTerm().isPresent()); + assertTrue(command.getArguments().get(1).fromPositiveLiteral().isPresent()); + + assertTrue(facts.isEmpty()); + assertTrue(rules.isEmpty()); + assertEquals(1, dataSourceDeclarations.size()); + assertTrue(dataSourceDeclarations.get(0).getDataSource() instanceof SparqlQueryResultDataSource); + } + + @Test(expected = CommandExecutionException.class) + public void wrongFirstArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@addsource \"string\" p(a)."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongSecondArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@addsource p[1]: \"string\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongSecondArgumentUnknownSource_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@addsource p[1]: unknown(a) ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongSecondArgumentWrongAritySource_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@addsource p[1]: load-rdf(\"file.nt\") ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCount_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@addsource p[2]: p(a) p(b) ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new AddSourceCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new AddSourceCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreterTest.java new file mode 100644 index 000000000..0d6f01a78 --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/AssertCommandInterpreterTest.java @@ -0,0 +1,95 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.StringWriter; +import java.util.Arrays; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class AssertCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@assert p(a) q(?X) :- r(?X) ."); + interpreter.runCommand(command); + List facts = interpreter.getKnowledgeBase().getFacts(); + List rules = interpreter.getKnowledgeBase().getRules(); + List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); + + assertEquals("assert", command.getName()); + assertEquals(2, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromPositiveLiteral().isPresent()); + assertTrue(command.getArguments().get(1).fromRule().isPresent()); + + Literal literal = command.getArguments().get(0).fromPositiveLiteral().get(); + Rule rule = command.getArguments().get(1).fromRule().get(); + + assertEquals(Arrays.asList(literal), facts); + assertEquals(Arrays.asList(rule), rules); + assertTrue(dataSourceDeclarations.isEmpty()); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentTerm_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@assert \"string\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentNonFact_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@assert p(?X) ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new AssertCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new AssertCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java new file mode 100644 index 000000000..207cc68e1 --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ClearCommandInterpreterTest.java @@ -0,0 +1,244 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.StringWriter; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.StreamSupport; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class ClearCommandInterpreterTest { + + static Term a = Expressions.makeAbstractConstant("a"); + static Term x = Expressions.makeUniversalVariable("X"); + static Predicate p = Expressions.makePredicate("p", 1); + static Predicate q = Expressions.makePredicate("q", 1); + static Predicate r = Expressions.makePredicate("r", 1); + static Fact fact = Expressions.makeFact(p, a); + static PositiveLiteral headLiteral = Expressions.makePositiveLiteral(q, x); + static PositiveLiteral bodyLiteral = Expressions.makePositiveLiteral(r, x); + static Rule rule = Expressions.makeRule(headLiteral, bodyLiteral); + static Map standardPrefixes = new HashMap<>(); + static { + standardPrefixes.put("eg:", "http://example.org/"); + } + static DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(p, + Mockito.mock(DataSource.class)); + + private void prepareKnowledgeBase(KnowledgeBase knowledgeBase) throws PrefixDeclarationException { + knowledgeBase.addStatement(fact); + knowledgeBase.addStatement(rule); + knowledgeBase.addStatement(dataSourceDeclaration); + knowledgeBase.getPrefixDeclarationRegistry().setPrefixIri("eg:", "http://example.org/"); + } + + private void assertPrefixesEqual(Map expectedPrefixes, + PrefixDeclarationRegistry prefixDeclarationRegistry) { + Set> prefixes = StreamSupport.stream(prefixDeclarationRegistry.spliterator(), false) + .collect(Collectors.toSet()); + assertEquals(expectedPrefixes.entrySet(), prefixes); + } + + @Test + public void correctUseAll_succeeds() + throws ParsingException, CommandExecutionException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = Mockito.spy(InterpreterTest.getMockInterpreter(writer)); + prepareKnowledgeBase(interpreter.getKnowledgeBase()); + + assertEquals(1, interpreter.getKnowledgeBase().getFacts().size()); + + Command command = interpreter.parseCommand("@clear ALL ."); + interpreter.runCommand(command); + + assertTrue(interpreter.getKnowledgeBase().getFacts().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + assertPrefixesEqual(Collections.emptyMap(), interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); + Mockito.verify(interpreter).clearReasonerAndKnowledgeBase(); + } + + @Test + public void correctUseInf_succeeds() + throws ParsingException, CommandExecutionException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + SimpleStyledPrinter printer = new SimpleStyledPrinter(writer); + ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + final KnowledgeBase knowledgeBase = new KnowledgeBase(); + final Reasoner reasoner = Mockito.spy(Reasoner.class); + Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); + try (Interpreter interpreter = new Interpreter(() -> knowledgeBase, (kb) -> reasoner, printer, + parserConfiguration)) { + prepareKnowledgeBase(interpreter.getKnowledgeBase()); + + Command command = interpreter.parseCommand("@clear INF ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertEquals(Arrays.asList(rule), interpreter.getKnowledgeBase().getRules()); + assertEquals(Arrays.asList(dataSourceDeclaration), + interpreter.getKnowledgeBase().getDataSourceDeclarations()); + assertPrefixesEqual(standardPrefixes, interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); + Mockito.verify(reasoner).resetReasoner(); + } + } + + @Test + public void correctUseFacts_succeeds() + throws ParsingException, CommandExecutionException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + try (Interpreter interpreter = InterpreterTest.getMockInterpreter(writer)) { + prepareKnowledgeBase(interpreter.getKnowledgeBase()); + + Command command = interpreter.parseCommand("@clear FACTS ."); + interpreter.runCommand(command); + + assertTrue(interpreter.getKnowledgeBase().getFacts().isEmpty()); + assertEquals(Arrays.asList(rule), interpreter.getKnowledgeBase().getRules()); + assertEquals(Arrays.asList(dataSourceDeclaration), + interpreter.getKnowledgeBase().getDataSourceDeclarations()); + assertPrefixesEqual(standardPrefixes, interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); + } + } + + @Test + public void correctUseRules_succeeds() + throws ParsingException, CommandExecutionException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + try (Interpreter interpreter = InterpreterTest.getMockInterpreter(writer)) { + prepareKnowledgeBase(interpreter.getKnowledgeBase()); + + Command command = interpreter.parseCommand("@clear RULES ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertEquals(Arrays.asList(dataSourceDeclaration), + interpreter.getKnowledgeBase().getDataSourceDeclarations()); + assertPrefixesEqual(standardPrefixes, interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); + } + } + + @Test + public void correctUseSources_succeeds() + throws ParsingException, CommandExecutionException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + try (Interpreter interpreter = InterpreterTest.getMockInterpreter(writer)) { + prepareKnowledgeBase(interpreter.getKnowledgeBase()); + + Command command = interpreter.parseCommand("@clear DATASOURCES ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertEquals(Arrays.asList(rule), interpreter.getKnowledgeBase().getRules()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + assertPrefixesEqual(standardPrefixes, interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); + } + } + + @Test + public void correctUsePrefixes_succeeds() + throws ParsingException, CommandExecutionException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + try (Interpreter interpreter = InterpreterTest.getMockInterpreter(writer)) { + + prepareKnowledgeBase(interpreter.getKnowledgeBase()); + + Command command = interpreter.parseCommand("@clear PREFIXES ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertEquals(Arrays.asList(rule), interpreter.getKnowledgeBase().getRules()); + assertEquals(Arrays.asList(dataSourceDeclaration), + interpreter.getKnowledgeBase().getDataSourceDeclarations()); + assertPrefixesEqual(Collections.emptyMap(), interpreter.getKnowledgeBase().getPrefixDeclarationRegistry()); + } + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCount_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@clear ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@clear \"string\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void unkonwnTask_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@clear UNKNOWNTASK ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new ClearCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new ClearCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreterTest.java new file mode 100644 index 000000000..52207d1bf --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ExportCommandInterpreterTest.java @@ -0,0 +1,156 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.StringWriter; +import java.io.Writer; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class ExportCommandInterpreterTest { + + @Test + public void correctUseKb_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + StringWriter fileWriter = new StringWriter(); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doReturn(fileWriter).when(interpreter).getFileWriter(Mockito.eq("test.rls")); + Predicate predicate = Expressions.makePredicate("p", 1); + Term term = Expressions.makeAbstractConstant("a"); + Fact fact = Expressions.makeFact(predicate, term); + interpreter.getKnowledgeBase().addStatement(fact); + + Command command = interpreter.parseCommand("@export KB \"test.rls\" ."); + interpreter.runCommand(command); + + StringWriter anotherWriter = new StringWriter(); + interpreter.getKnowledgeBase().writeKnowledgeBase(anotherWriter); + + assertEquals("export", command.getName()); + assertEquals(2, command.getArguments().size()); + assertEquals(anotherWriter.toString(), fileWriter.toString()); + } + + @Test(expected = CommandExecutionException.class) + public void correctUseKbIoException_failse() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doThrow(FileNotFoundException.class).when(interpreter).getFileWriter(Mockito.eq("test.rls")); + + Command command = interpreter.parseCommand("@export KB \"test.rls\" ."); + interpreter.runCommand(command); + } + + @Test + public void correctUseInferences_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + StringWriter fileWriter = new StringWriter(); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doReturn(fileWriter).when(interpreter).getFileWriter(Mockito.eq("test.rls")); + Mockito.when(interpreter.getReasoner().writeInferences(Mockito.any(Writer.class))) + .thenReturn(Correctness.SOUND_BUT_INCOMPLETE); + + Command command = interpreter.parseCommand("@export INFERENCES \"test.rls\" ."); + interpreter.runCommand(command); + + assertEquals("export", command.getName()); + assertEquals(2, command.getArguments().size()); + assertTrue(writer.toString().contains(Correctness.SOUND_BUT_INCOMPLETE.toString())); + } + + @Test(expected = CommandExecutionException.class) + public void correctUseInferencesIoException_fails() + throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doThrow(FileNotFoundException.class).when(interpreter).getFileWriter(Mockito.eq("test.rls")); + + Command command = interpreter.parseCommand("@export INFERENCES \"test.rls\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void unknonwTask_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@export UNKNOWN \"file.csv\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongFirstArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@export \"string\" \"file.rls\"."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongSecondArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@export KB 123 ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCount_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@export KB \"file.rls\" more ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new ExportCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new ExportCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreterTest.java new file mode 100644 index 000000000..8a89c1cea --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/HelpCommandInterpreterTest.java @@ -0,0 +1,117 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.IOException; +import java.io.StringWriter; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class HelpCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@help ."); + interpreter.runCommand(command); + + String output = writer.toString(); + for (String commandName : interpreter.getRegisteredCommands()) { + assertTrue(output.contains("@" + commandName)); + } + } + + @Test + public void correctUseWithCommand_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@help query."); + interpreter.runCommand(command); + // Nothing much to test here. + assertTrue(writer.toString().length() > 0); + } + + @Test + public void wrongArgumentCount_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@help query showkb ."); + interpreter.runCommand(command); + // Nothing much to test here. + assertTrue(writer.toString().length() > 0); + } + + @Test + public void unknownCommandHelp_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@help unknowncommand ."); + interpreter.runCommand(command); + // Nothing much to test here. + assertTrue(writer.toString().length() > 0); + } + + @Test + public void wrongArgumentTypeTerm_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@help 123 ."); + interpreter.runCommand(command); + // Nothing much to test here. + assertTrue(writer.toString().length() > 0); + } + + @Test + public void wrongArgumentTypeFact_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@help p(a) ."); + interpreter.runCommand(command); + // Nothing much to test here. + assertTrue(writer.toString().length() > 0); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new HelpCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new HelpCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java new file mode 100644 index 000000000..443ed8baa --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/InterpreterTest.java @@ -0,0 +1,141 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.StringWriter; +import java.io.Writer; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class InterpreterTest { + + static public Interpreter getMockInterpreter(Writer writer) { + SimpleStyledPrinter printer = new SimpleStyledPrinter(writer); + ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + return new Interpreter(Interpreter.EMPTY_KNOWLEDGE_BASE_PROVIDER, (knowledgeBase) -> { + Reasoner reasoner = Mockito.mock(Reasoner.class); + Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); + return reasoner; + }, printer, parserConfiguration); + } + + /** + * Checks the basic format of command usage instructions and verifies that the + * given command name is used (not a fixed one). + * + * @param commandInterpreter + * @param interpreter + * @param writer + */ + static public void checkHelpFormat(CommandInterpreter commandInterpreter, Interpreter interpreter, + StringWriter writer) { + commandInterpreter.printHelp("commandname", interpreter); + String result = writer.toString(); + + assertTrue(result.startsWith("Usage: @commandname ")); + assertTrue(result.endsWith("\n")); + } + + static public void checkSynopsisFormat(CommandInterpreter commandInterpreter) { + String synopsis = commandInterpreter.getSynopsis(); + assertTrue(synopsis.length() < 70); + } + + @Test + public void getters_succeed() { + StringWriter writer = new StringWriter(); + SimpleStyledPrinter printer = new SimpleStyledPrinter(writer); + ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + final KnowledgeBase knowledgeBase = new KnowledgeBase(); + final Reasoner reasoner = Mockito.mock(Reasoner.class); + Mockito.when(reasoner.getKnowledgeBase()).thenReturn(knowledgeBase); + try (Interpreter interpreter = new Interpreter(() -> knowledgeBase, (kb) -> reasoner, printer, + parserConfiguration)) { + assertEquals(knowledgeBase, interpreter.getKnowledgeBase()); + assertEquals(reasoner, interpreter.getReasoner()); + assertEquals(writer, interpreter.getWriter()); + assertEquals(parserConfiguration, interpreter.getParserConfiguration()); + } + } + + @Test(expected = CommandExecutionException.class) + public void unknownCommand_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + try (Interpreter interpreter = getMockInterpreter(writer)) { + Command command = interpreter.parseCommand("@unknown ."); + interpreter.runCommand(command); + } + } + + @Test(expected = ParsingException.class) + public void malformedCommand_fails() throws ParsingException { + StringWriter writer = new StringWriter(); + try (Interpreter interpreter = getMockInterpreter(writer)) { + interpreter.parseCommand("malformed ."); + } + } + + @Test + public void prefixesAreUsed_succeeds() throws ParsingException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + try (Interpreter interpreter = InterpreterTest.getMockInterpreter(writer)) { + interpreter.getKnowledgeBase().getPrefixDeclarationRegistry().setPrefixIri("eg:", "http://example.org/"); + + Command command = interpreter.parseCommand("@somecommand eg:test ."); + + assertEquals(1, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromTerm().isPresent()); + assertEquals("http://example.org/test", command.getArguments().get(0).fromTerm().get().getName()); + } + } + + @Test + public void print_succeeds() { + StringWriter writer = new StringWriter(); + SimpleStyledPrinter printer = Mockito.spy(new SimpleStyledPrinter(writer)); + ParserConfiguration parserConfiguration = new DefaultParserConfiguration(); + try (Interpreter interpreter = new Interpreter(Interpreter.EMPTY_KNOWLEDGE_BASE_PROVIDER, + (kb) -> Mockito.mock(Reasoner.class), printer, parserConfiguration)) { + interpreter.printCode("Code"); + interpreter.printNormal("Normal"); + interpreter.printEmph("Emph"); + interpreter.printSection("Section"); + interpreter.printImportant("Important"); + + Mockito.verify(printer).printCode("Code"); + Mockito.verify(printer).printNormal("Normal"); + Mockito.verify(printer).printEmph("Emph"); + Mockito.verify(printer).printSection("Section"); + Mockito.verify(printer).printImportant("Important"); + } + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java new file mode 100644 index 000000000..ee3d1ac42 --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/LoadCommandInterpreterTest.java @@ -0,0 +1,359 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.ByteArrayInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.io.StringWriter; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class LoadCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + InputStream inputStream = new ByteArrayInputStream("p(a) .".getBytes(StandardCharsets.UTF_8)); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doReturn(inputStream).when(interpreter).getFileInputStream(Mockito.eq("loadtest.rls")); + + Predicate predicate = Expressions.makePredicate("p", 1); + Term term = Expressions.makeAbstractConstant("a"); + Fact fact = Expressions.makeFact(predicate, term); + + Command command = interpreter.parseCommand("@load 'loadtest.rls' ."); + interpreter.runCommand(command); + + assertEquals("load", command.getName()); + assertEquals(1, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromTerm().isPresent()); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } + + @Test + public void correctUseWithRulesTask_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + InputStream inputStream = new ByteArrayInputStream("p(a) .".getBytes(StandardCharsets.UTF_8)); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doReturn(inputStream).when(interpreter).getFileInputStream(Mockito.eq("loadtest.rls")); + + Predicate predicate = Expressions.makePredicate("p", 1); + Term term = Expressions.makeAbstractConstant("a"); + Fact fact = Expressions.makeFact(predicate, term); + + Command command = interpreter.parseCommand("@load RULES 'loadtest.rls' ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } + + @Test(expected = CommandExecutionException.class) + public void correctUseParseError_fails() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + InputStream inputStream = new ByteArrayInputStream("not parsable".getBytes(StandardCharsets.UTF_8)); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doReturn(inputStream).when(interpreter).getFileInputStream(Mockito.eq("loadtest.rls")); + + Command command = interpreter.parseCommand("@load 'loadtest.rls' ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void correctUseFileNotFoundError_fails() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter origInterpreter = InterpreterTest.getMockInterpreter(writer); + Interpreter interpreter = Mockito.spy(origInterpreter); + Mockito.doThrow(FileNotFoundException.class).when(interpreter).getFileInputStream(Mockito.eq("loadtest.rls")); + + Command command = interpreter.parseCommand("@load 'loadtest.rls' ."); + interpreter.runCommand(command); + } + + @Test + public void correctUseWithOwlTask_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Predicate predicate = Expressions.makePredicate("http://example.org/C", 1); + Term term = Expressions.makeAbstractConstant("http://example.org/a"); + Fact fact = Expressions.makeFact(predicate, term); + + Command command = interpreter.parseCommand("@load OWL 'src/test/data/loadtest.owl' ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } + + @Test + public void correctUseWithOwlTask_UnsupportedAxioms_succeeds() + throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Predicate predicate = Expressions.makePredicate("http://example.org/C", 1); + Term term = Expressions.makeAbstractConstant("http://example.org/a"); + Fact fact = Expressions.makeFact(predicate, term); + + Command command = interpreter.parseCommand("@load OWL 'src/test/data/loadtest-unsupported.owl' ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + // OUtput mentions the offending axiom in Functional-Style Syntax: + assertTrue(writer.toString().contains("InverseFunctionalObjectProperty()")); + } + + @Test(expected = CommandExecutionException.class) + public void correctUseWithOwlTask_malformedOwl_fails() + throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@load OWL 'src/test/data/loadtest-fails.owl' ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void correctUseWithOwlTask_missingFile_fails() + throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@load OWL 'src/test/data/file-does-not-exist.owl' ."); + interpreter.runCommand(command); + } + + @Test + public void correctUseWithRdfTask_Nt_succeeds() + throws ParsingException, CommandExecutionException, IOException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Predicate predicate = Expressions.makePredicate("TRIPLE", 3); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termb = Expressions.makeAbstractConstant("http://example.org/b"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termb, termc); + + Command command = interpreter.parseCommand("@load RDF 'src/test/data/loadtest.nt' ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } + + @Test + public void correctUseWithRdfTask_NtCustomPredicate_succeeds() + throws ParsingException, CommandExecutionException, IOException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Predicate predicate = Expressions.makePredicate("http://example.org/mytriple", 3); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termb = Expressions.makeAbstractConstant("http://example.org/b"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termb, termc); + + Command command = interpreter + .parseCommand("@load RDF 'src/test/data/loadtest.nt' ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } + + @Test + public void correctUseWithRdfTask_NtABoxLoading_succeeds() + throws ParsingException, CommandExecutionException, IOException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Predicate predicate = Expressions.makePredicate("http://example.org/b", 2); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termc); + + Command command = interpreter.parseCommand("@load RDF 'src/test/data/loadtest.nt' ABOX."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } + + @Test + public void correctUseWithRdfTask_Turtle_succeeds() + throws ParsingException, CommandExecutionException, IOException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Predicate predicate = Expressions.makePredicate("TRIPLE", 3); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termb = Expressions.makeAbstractConstant("http://example.org/b"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termb, termc); + + Command command = interpreter.parseCommand("@load RDF 'src/test/data/loadtest.ttl' ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertEquals("http://example.org/", interpreter.getKnowledgeBase().getPrefixIri(":")); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } + + @Test + public void correctUseWithRdfTask_RdfXml_succeeds() + throws ParsingException, CommandExecutionException, IOException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Predicate predicate = Expressions.makePredicate("TRIPLE", 3); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termb = Expressions.makeAbstractConstant("http://example.org/b"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termb, termc); + + Command command = interpreter.parseCommand("@load RDF 'src/test/data/loadtest.rdf' ."); + interpreter.runCommand(command); + + assertEquals(Arrays.asList(fact), interpreter.getKnowledgeBase().getFacts()); + assertEquals("http://example.org/", interpreter.getKnowledgeBase().getPrefixIri("eg:")); + assertTrue(interpreter.getKnowledgeBase().getRules().isEmpty()); + assertTrue(interpreter.getKnowledgeBase().getDataSourceDeclarations().isEmpty()); + } + + @Test(expected = CommandExecutionException.class) + public void correctUseWithRdfTask_malformedRdf_fails() + throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@load RDF 'src/test/data/loadtest-fails.owl' ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void correctUseWithRdfTask_missingFile_fails() + throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@load RDF 'src/test/data/file-does-not-exist.rdf' ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCount_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@load ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCountWithOptional_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@load OWL ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongRdfPredicateTermType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@load RDF \"file.nt\" \"string\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongRdfPredicateArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@load RDF \"file.nt\" p(a) ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@load p(a) ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongTask_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@load UNKOWNTASK 'loadtest.rls' ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new LoadCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new LoadCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreterTest.java new file mode 100644 index 000000000..a23f72b13 --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/QueryCommandInterpreterTest.java @@ -0,0 +1,313 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.IOException; +import java.io.StringWriter; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.QueryAnswerCount; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.QueryAnswerCountImpl; +import org.semanticweb.rulewerk.core.reasoner.implementation.QueryResultImpl; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class QueryCommandInterpreterTest { + + class TestQueryResultIterator implements QueryResultIterator { + + final Iterator results; + + public TestQueryResultIterator(List results) { + this.results = results.iterator(); + } + + @Override + public boolean hasNext() { + return results.hasNext(); + } + + @Override + public QueryResult next() { + return results.next(); + } + + @Override + public Correctness getCorrectness() { + return Correctness.SOUND_AND_COMPLETE; + } + + @Override + public void close() { + } + + } + + @Test + public void correctUseQuery_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + QueryResult r1 = new QueryResultImpl(Arrays.asList(Expressions.makeAbstractConstant("#TEST-1#"))); + QueryResult r2 = new QueryResultImpl(Arrays.asList(Expressions.makeAbstractConstant("#TEST-2#"))); + QueryResult r3 = new QueryResultImpl(Arrays.asList(Expressions.makeAbstractConstant("#TEST-3#"))); + + QueryResultIterator results = new TestQueryResultIterator(Arrays.asList(r1, r2, r3)); + + Mockito.when(interpreter.getReasoner().answerQuery(Mockito.any(), Mockito.eq(true))).thenReturn(results); + + Command command = interpreter.parseCommand("@query p(?X) LIMIT 2 ."); + interpreter.runCommand(command); + // correct operation largely verified by not throwing an exception on the + // previous line, since only very few calls to the reasoner are not mocked + String output = writer.toString(); + + assertEquals("query", command.getName()); + assertEquals(3, command.getArguments().size()); + assertTrue(output.contains("#TEST-1#")); + assertTrue(output.contains("#TEST-2#")); + assertFalse(output.contains("#TEST-3#")); + assertTrue(output.contains(Correctness.SOUND_AND_COMPLETE.toString())); + } + + @Test + public void correctUseBooleanQueryTrue_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + QueryResult r1 = new QueryResultImpl(Arrays.asList(Expressions.makeAbstractConstant("TEST-1"))); + QueryResult r2 = new QueryResultImpl(Arrays.asList(Expressions.makeAbstractConstant("#TEST-2#"))); + QueryResult r3 = new QueryResultImpl(Arrays.asList(Expressions.makeAbstractConstant("#TEST-3#"))); + + QueryResultIterator results = new TestQueryResultIterator(Arrays.asList(r1, r2, r3)); + + Mockito.when(interpreter.getReasoner().answerQuery(Mockito.any(), Mockito.eq(true))).thenReturn(results); + + Command command = interpreter.parseCommand("@query p(TEST-1) LIMIT 2 ."); + interpreter.runCommand(command); + // correct operation largely verified by not throwing an exception on the + // previous line, since only very few calls to the reasoner are not mocked + String output = writer.toString(); + + assertEquals("query", command.getName()); + assertEquals(3, command.getArguments().size()); + assertFalse(output.contains("TEST-1")); + assertFalse(output.contains("#TEST-2#")); + assertFalse(output.contains("#TEST-3#")); + assertTrue(output.startsWith("true")); + assertTrue(output.contains(Correctness.SOUND_AND_COMPLETE.toString())); + } + + @Test + public void correctUseBooleanQueryFalse_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + QueryResultIterator results = new TestQueryResultIterator(Arrays.asList()); + + Mockito.when(interpreter.getReasoner().answerQuery(Mockito.any(), Mockito.eq(true))).thenReturn(results); + + Command command = interpreter.parseCommand("@query p(TEST-1) LIMIT 2 ."); + interpreter.runCommand(command); + // correct operation largely verified by not throwing an exception on the + // previous line, since only very few calls to the reasoner are not mocked + String output = writer.toString(); + + assertEquals("query", command.getName()); + assertEquals(3, command.getArguments().size()); + assertTrue(output.startsWith("false")); + assertTrue(output.contains(Correctness.SOUND_AND_COMPLETE.toString())); + } + + @Test + public void correctUseCount_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + QueryAnswerCount queryAnswerCount = new QueryAnswerCountImpl(Correctness.SOUND_AND_COMPLETE, 42); + Mockito.when(interpreter.getReasoner().countQueryAnswers(Mockito.any(), Mockito.eq(true))) + .thenReturn(queryAnswerCount); + Mockito.when(interpreter.getReasoner().countQueryAnswers(Mockito.any())).thenReturn(queryAnswerCount); + + Command command = interpreter.parseCommand("@query COUNT p(?X) ."); + interpreter.runCommand(command); + // correct operation largely verified by not throwing an exception on the + // previous line, since only very few calls to the reasoner are not mocked + + assertEquals("query", command.getName()); + assertEquals(2, command.getArguments().size()); + assertTrue(writer.toString().startsWith("42\n")); + assertTrue(writer.toString().contains(Correctness.SOUND_AND_COMPLETE.toString())); + } + + @Test + public void correctUseExport_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Mockito.when(interpreter.getReasoner().exportQueryAnswersToCsv(Mockito.any(), Mockito.eq("file.csv"), + Mockito.anyBoolean())).thenReturn(Correctness.SOUND_BUT_INCOMPLETE); + + Command command = interpreter.parseCommand("@query p(?X) EXPORTCSV \"file.csv\" ."); + interpreter.runCommand(command); + // correct operation largely verified by not throwing an exception on the + // previous line, since only very few calls to the reasoner are not mocked + + assertEquals("query", command.getName()); + assertEquals(3, command.getArguments().size()); + assertTrue(writer.toString().contains(Correctness.SOUND_BUT_INCOMPLETE.toString())); + } + + @Test(expected = CommandExecutionException.class) + public void exportIoError_fails() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Mockito.when(interpreter.getReasoner().exportQueryAnswersToCsv(Mockito.any(), Mockito.eq("file.csv"), + Mockito.anyBoolean())).thenThrow(IOException.class); + + Command command = interpreter.parseCommand("@query p(?X) EXPORTCSV \"file.csv\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCountZero_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentNoLiteral_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query COUNT LIMIT 10 ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCountWithLimit_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query COUNT p(?X) LIMIT 10 ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCountWithExportFile_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query COUNT p(?X) EXPORTCSV \"file.csv\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentWrongLimitTerm_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query p(?X) LIMIT \"10\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentWrongLimitNoTerm_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query p(?X) LIMIT p(a) ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentMissingLimit_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query p(?X) LIMIT ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentWrongExportFileTerm_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query p(?X) EXPORTCSV 123 ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentWrongExportFileNoTerm_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query p(?X) EXPORTCSV p(a) ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentMissingExportFile_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query p(?X) EXPORTCSV ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentExportWithLimit_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@query p(?X) LIMIT 10 EXPORTCSV \"test.csv\" ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new QueryCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new QueryCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreterTest.java new file mode 100644 index 000000000..025540d02 --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ReasonCommandInterpreterTest.java @@ -0,0 +1,85 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.IOException; +import java.io.StringWriter; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class ReasonCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Mockito.when(interpreter.getReasoner().getCorrectness()).thenReturn(Correctness.SOUND_BUT_INCOMPLETE); + Mockito.when(interpreter.getReasoner().reason()).thenAnswer(I -> { + Mockito.when(interpreter.getReasoner().getCorrectness()).thenReturn(Correctness.SOUND_AND_COMPLETE); + return true; + }); + + Command command = interpreter.parseCommand("@reason ."); + interpreter.runCommand(command); + + assertEquals(Correctness.SOUND_AND_COMPLETE, interpreter.getReasoner().getCorrectness()); + } + + @Test(expected = CommandExecutionException.class) + public void correctUseReasonerException_fails() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Mockito.when(interpreter.getReasoner().reason()).thenThrow(IOException.class); + + Command command = interpreter.parseCommand("@reason ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCount_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@reason p(?X) ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new ReasonCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new ReasonCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreterTest.java new file mode 100644 index 000000000..c5532acf9 --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RemoveSourceCommandInterpreterTest.java @@ -0,0 +1,176 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.StringWriter; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.Arrays; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class RemoveSourceCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException, MalformedURLException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Predicate predicate = Expressions.makePredicate("p", 1); + DataSource dataSource = new SparqlQueryResultDataSource(new URL("http://example.org"), "?x", "?x

"); + DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); + interpreter.getKnowledgeBase().addStatement(dataSourceDeclaration); + + Command command = interpreter + .parseCommand("@delsource p[1] : sparql(, \"?x\", \"?x

\") ."); + interpreter.runCommand(command); + List facts = interpreter.getKnowledgeBase().getFacts(); + List rules = interpreter.getKnowledgeBase().getRules(); + List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); + + assertEquals("delsource", command.getName()); + assertEquals(2, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromTerm().isPresent()); + assertTrue(command.getArguments().get(1).fromPositiveLiteral().isPresent()); + + assertTrue(facts.isEmpty()); + assertTrue(rules.isEmpty()); + assertTrue(dataSourceDeclarations.isEmpty()); + } + + @Test + public void correctUseNothingRemoved_succeeds() + throws ParsingException, CommandExecutionException, MalformedURLException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Predicate predicate = Expressions.makePredicate("p", 1); + DataSource dataSource = new SparqlQueryResultDataSource(new URL("http://example.org"), "?x", "?x

"); + DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); + interpreter.getKnowledgeBase().addStatement(dataSourceDeclaration); + + Command command = interpreter + .parseCommand("@delsource another[1] : sparql(, \"?x\", \"?x

\") ."); + interpreter.runCommand(command); + List facts = interpreter.getKnowledgeBase().getFacts(); + List rules = interpreter.getKnowledgeBase().getRules(); + List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); + + assertTrue(facts.isEmpty()); + assertTrue(rules.isEmpty()); + assertEquals(Arrays.asList(dataSourceDeclaration), dataSourceDeclarations); + } + + @Test + public void correctUseRemoveAll_succeeds() + throws ParsingException, CommandExecutionException, MalformedURLException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Predicate predicate = Expressions.makePredicate("p", 1); + Predicate predicate2 = Expressions.makePredicate("q", 1); + DataSource dataSource = new SparqlQueryResultDataSource(new URL("http://example.org"), "?x", "?x

"); + DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); + DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, dataSource); + interpreter.getKnowledgeBase().addStatement(dataSourceDeclaration); + interpreter.getKnowledgeBase().addStatement(dataSourceDeclaration2); + + Command command = interpreter.parseCommand("@delsource p[1] ."); + interpreter.runCommand(command); + List facts = interpreter.getKnowledgeBase().getFacts(); + List rules = interpreter.getKnowledgeBase().getRules(); + List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); + + assertTrue(facts.isEmpty()); + assertTrue(rules.isEmpty()); + assertEquals(Arrays.asList(dataSourceDeclaration2), dataSourceDeclarations); + } + + @Test(expected = CommandExecutionException.class) + public void wrongFirstArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@dellsource \"string\" p(a)."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongSecondArgumentType_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@delsource p[1]: \"string\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongSecondArgumentUnknownSource_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@delsource p[1]: unknown(a) ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCount_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@delsource p[2]: p(a) p(b) ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCountZero_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@delsource ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new RemoveSourceCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new RemoveSourceCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java new file mode 100644 index 000000000..cc693eb7b --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/RetractCommandInterpreterTest.java @@ -0,0 +1,144 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.StringWriter; +import java.util.Arrays; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class RetractCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Term a = Expressions.makeAbstractConstant("a"); + Term x = Expressions.makeUniversalVariable("X"); + Predicate p = Expressions.makePredicate("p", 1); + Predicate q = Expressions.makePredicate("q", 1); + Predicate r = Expressions.makePredicate("r", 1); + Fact fact = Expressions.makeFact(p, a); + Fact fact2 = Expressions.makeFact(q, a); + PositiveLiteral headLiteral = Expressions.makePositiveLiteral(q, x); + PositiveLiteral bodyLiteral = Expressions.makePositiveLiteral(r, x); + Rule rule = Expressions.makeRule(headLiteral, bodyLiteral); + interpreter.getKnowledgeBase().addStatement(fact); + interpreter.getKnowledgeBase().addStatement(fact2); + interpreter.getKnowledgeBase().addStatement(rule); + + Command command = interpreter.parseCommand("@retract p(a) q(?X) :- r(?X) ."); + interpreter.runCommand(command); + List facts = interpreter.getKnowledgeBase().getFacts(); + List rules = interpreter.getKnowledgeBase().getRules(); + List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); + + assertEquals("retract", command.getName()); + assertEquals(2, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromPositiveLiteral().isPresent()); + assertTrue(command.getArguments().get(1).fromRule().isPresent()); + + assertEquals(Arrays.asList(fact2), facts); + assertTrue(rules.isEmpty()); + assertTrue(dataSourceDeclarations.isEmpty()); + } + + @Test + public void correctUse_retractPredicate_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Term a = Expressions.makeAbstractConstant("a"); + Term b = Expressions.makeAbstractConstant("b"); + Predicate p = Expressions.makePredicate("p", 1); + Predicate q = Expressions.makePredicate("q", 1); + Fact pa = Expressions.makeFact(p, a); + Fact pb = Expressions.makeFact(p, b); + Fact qa = Expressions.makeFact(q, a); + + interpreter.getKnowledgeBase().addStatement(pa); + interpreter.getKnowledgeBase().addStatement(pb); + interpreter.getKnowledgeBase().addStatement(qa); + + Command command = interpreter.parseCommand("@retract p[1] ."); + interpreter.runCommand(command); + List facts = interpreter.getKnowledgeBase().getFacts(); + List rules = interpreter.getKnowledgeBase().getRules(); + List dataSourceDeclarations = interpreter.getKnowledgeBase().getDataSourceDeclarations(); + + assertEquals(Arrays.asList(qa), facts); + assertTrue(rules.isEmpty()); + assertTrue(dataSourceDeclarations.isEmpty()); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentTermNumber_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@retract 42 ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentTermStringNoPredicate_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@retract \"string\" ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentNonFact_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@retract p(?X) ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new RetractCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new RetractCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreterTest.java new file mode 100644 index 000000000..9190c8407 --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/SetPrefixCommandInterpreterTest.java @@ -0,0 +1,91 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.StringWriter; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class SetPrefixCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException, PrefixDeclarationException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@setprefix eg: ."); + interpreter.runCommand(command); + + assertEquals("setprefix", command.getName()); + assertEquals(2, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromTerm().isPresent()); + assertTrue(command.getArguments().get(1).fromTerm().isPresent()); + + assertEquals("http://example.org/", interpreter.getKnowledgeBase().getPrefixIri("eg:")); + } + + @Test(expected = CommandExecutionException.class) + public void wrongFirstArgument_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@setprefix 123 ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongSecondArgument_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@setprefix pre: 123 ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCount_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@setprefix ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new SetPrefixCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new SetPrefixCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreterTest.java b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreterTest.java new file mode 100644 index 000000000..5ddd3b231 --- /dev/null +++ b/rulewerk-commands/src/test/java/org/semanticweb/rulewerk/commands/ShowKbCommandInterpreterTest.java @@ -0,0 +1,97 @@ +package org.semanticweb.rulewerk.commands; + +/*- + * #%L + * Rulewerk command execution support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.IOException; +import java.io.StringWriter; +import java.io.Writer; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class ShowKbCommandInterpreterTest { + + @Test + public void correctUse_succeeds() throws ParsingException, CommandExecutionException, IOException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Predicate predicate = Expressions.makePredicate("p", 1); + Term term = Expressions.makeAbstractConstant("a"); + Fact fact = Expressions.makeFact(predicate, term); + interpreter.getKnowledgeBase().addStatement(fact); + + Command command = interpreter.parseCommand("@showkb ."); + interpreter.runCommand(command); + + StringWriter anotherWriter = new StringWriter(); + interpreter.getKnowledgeBase().writeKnowledgeBase(anotherWriter); + + assertEquals("showkb", command.getName()); + assertEquals(0, command.getArguments().size()); + assertEquals(writer.toString(), anotherWriter.toString()); + } + + @Test(expected = CommandExecutionException.class) + public void wrongArgumentCount_fails() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + + Command command = interpreter.parseCommand("@showkb p(?X) ."); + interpreter.runCommand(command); + } + + @Test(expected = CommandExecutionException.class) + public void ioError_fails() throws ParsingException, CommandExecutionException, IOException { + Writer writer = Mockito.mock(Writer.class); + Mockito.doThrow(IOException.class).when(writer).write(Mockito.anyString()); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + Predicate predicate = Expressions.makePredicate("p", 1); + Term term = Expressions.makeAbstractConstant("a"); + Fact fact = Expressions.makeFact(predicate, term); + interpreter.getKnowledgeBase().addStatement(fact); + + Command command = interpreter.parseCommand("@showkb ."); + interpreter.runCommand(command); + } + + @Test + public void help_succeeds() throws ParsingException, CommandExecutionException { + StringWriter writer = new StringWriter(); + Interpreter interpreter = InterpreterTest.getMockInterpreter(writer); + CommandInterpreter commandInterpreter = new ShowKbCommandInterpreter(); + InterpreterTest.checkHelpFormat(commandInterpreter, interpreter, writer); + } + + @Test + public void synopsis_succeeds() throws ParsingException, CommandExecutionException { + CommandInterpreter commandInterpreter = new ShowKbCommandInterpreter(); + InterpreterTest.checkSynopsisFormat(commandInterpreter); + } + +} diff --git a/vlog4j-rdf/LICENSE.txt b/rulewerk-core/LICENSE.txt similarity index 100% rename from vlog4j-rdf/LICENSE.txt rename to rulewerk-core/LICENSE.txt diff --git a/rulewerk-core/pom.xml b/rulewerk-core/pom.xml new file mode 100644 index 000000000..e647d0d75 --- /dev/null +++ b/rulewerk-core/pom.xml @@ -0,0 +1,18 @@ + + + 4.0.0 + + + org.semanticweb.rulewerk + rulewerk-parent + 0.10.0-SNAPSHOT + + + rulewerk-core + jar + + Rulewerk Core Components + Core components of Rulewerk: reasoner interface and model + diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/exceptions/IncompatiblePredicateArityException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java similarity index 61% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/exceptions/IncompatiblePredicateArityException.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java index cc61ae358..57f505120 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/exceptions/IncompatiblePredicateArityException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/IncompatiblePredicateArityException.java @@ -1,10 +1,10 @@ -package org.semanticweb.vlog4j.core.reasoner.exceptions; +package org.semanticweb.rulewerk.core.exceptions; /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,21 +22,17 @@ import java.text.MessageFormat; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.reasoner.DataSource; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.Predicate; /** - * Expression thrown when attempting to load the reasoner with a knowledge base - * that contains facts from a {@link DataSource} (added with - * {@link Reasoner#addFactsFromDataSource(Predicate, DataSource)}), whose arity - * does not correspond to the arity of the {@link Predicate} the data source was - * added for. - * + * Expression thrown when attempting to load facts for a {@link Predicate} from + * a {@link DataSource} that does not contain data of the specified arity. + * * @author Irina Dragoste * */ -public class IncompatiblePredicateArityException extends VLog4jException { +public class IncompatiblePredicateArityException extends RulewerkRuntimeException { private static final long serialVersionUID = -5081219042292721026L; private static final String messagePattern = "Predicate arity [{0}] of predicate [{1}] incompatible with arity [{2}] of the data source [{3}]!"; diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java new file mode 100644 index 000000000..0e6515403 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/PrefixDeclarationException.java @@ -0,0 +1,33 @@ +package org.semanticweb.rulewerk.core.exceptions; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +public class PrefixDeclarationException extends RulewerkException { + private static final long serialVersionUID = 787997047134745982L; + + public PrefixDeclarationException(String errorMessage) { + super(errorMessage); + } + + public PrefixDeclarationException(String errorMessage, Throwable cause) { + super(errorMessage, cause); + } +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/exceptions/ReasonerStateException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java similarity index 83% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/exceptions/ReasonerStateException.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java index eb7fc8135..d87ef7fcd 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/exceptions/ReasonerStateException.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/ReasonerStateException.java @@ -1,8 +1,8 @@ /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,20 +18,20 @@ * #L% */ -package org.semanticweb.vlog4j.core.reasoner.exceptions; +package org.semanticweb.rulewerk.core.exceptions; import java.text.MessageFormat; -import org.semanticweb.vlog4j.core.reasoner.ReasonerState; +import org.semanticweb.rulewerk.core.reasoner.ReasonerState; /** * Thrown when an operation that is invalid in current reasoner state is * attempted. - * + * * @author Irina Dragoste * */ -public class ReasonerStateException extends VLog4jException { +public class ReasonerStateException extends RulewerkRuntimeException { /** * generated serial version UID @@ -42,7 +42,7 @@ public class ReasonerStateException extends VLog4jException { /** * Creates an exception with a logging message for current reasoner state. - * + * * @param state * the current Reasoner state. * @param message diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkException.java new file mode 100644 index 000000000..5223c04bc --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkException.java @@ -0,0 +1,50 @@ +package org.semanticweb.rulewerk.core.exceptions; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Top-level checked exception for Rulewerk system. + * @author Irina Dragoste + * + */ +public class RulewerkException extends Exception { + + /** + * generated serial version UID + */ + private static final long serialVersionUID = 8305375071519734590L; + + public RulewerkException(Throwable cause) { + super(cause); + } + + public RulewerkException(String message, Throwable cause) { + super(message, cause); + } + + public RulewerkException(String message) { + super(message); + } + + public RulewerkException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkRuntimeException.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkRuntimeException.java new file mode 100644 index 000000000..93237f788 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/exceptions/RulewerkRuntimeException.java @@ -0,0 +1,51 @@ +package org.semanticweb.rulewerk.core.exceptions; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Superclass of unchecked exceptions generated by Rulewerk. + * + * @author Markus Kroetzsch + * + */ +public class RulewerkRuntimeException extends RuntimeException { + + /** + * Generated serial version ID. + */ + private static final long serialVersionUID = -6574826887294416900L; + + public RulewerkRuntimeException(Throwable cause) { + super(cause); + } + + public RulewerkRuntimeException(String message, Throwable cause) { + super(message, cause); + } + + public RulewerkRuntimeException(String message) { + super(message); + } + + public RulewerkRuntimeException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java new file mode 100644 index 000000000..5d43e7cf3 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/AbstractConstant.java @@ -0,0 +1,37 @@ +package org.semanticweb.rulewerk.core.model.api; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Interface for abstract constants, i.e. for constants that represent an + * abstract domain element (in contrast to a specific value of a concrete + * datatype). Such terms are of type {@link TermType#ABSTRACT_CONSTANT}. + * + * @author Markus Kroetzsch + */ +public interface AbstractConstant extends Constant { + + @Override + default TermType getType() { + return TermType.ABSTRACT_CONSTANT; + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java new file mode 100644 index 000000000..e25136bd3 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Argument.java @@ -0,0 +1,225 @@ +package org.semanticweb.rulewerk.core.model.api; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.Optional; +import java.util.function.Function; + +/** + * A tagged union representing the possible types allowed to appear as arguments + * in commands and parser directives. + * + * @author Maximilian Marx + */ +public abstract class Argument { + private Argument() { + } + + /** + * Apply a function to the contained value. + * + * @param termHandler the function to apply to a Term + * @param ruleHandler the function to apply to a Rule + * @param positiveLiteralHandler the function to apply to a Literal + * + * @return the value returned by the appropriate handler function + */ + public abstract V apply(Function termHandler, + Function ruleHandler, + Function positiveLiteralHandler); + + /** + * Partially compare two arguments, without comparing the actual values. + * + * @param other the Object to compare to. + * + * @return An {@link Optional} containing true if the arguments are surely + * equal, containing false if the arguments are not equal, or an empty + * Optional if the values of the arguments need to be compared. + * + */ + protected Optional isEqual(Object other) { + if (other == null) { + return Optional.of(false); + } + + if (other == this) { + return Optional.of(true); + } + + if (!(other instanceof Argument)) { + return Optional.of(false); + } + + return Optional.empty(); + } + + /** + * Create an argument containing a Term. + * + * @param value the Term value + * + * @return An argument containing the given Term value + */ + public static Argument term(Term value) { + return new Argument() { + @Override + public V apply(Function termHandler, + Function ruleHandler, + Function positiveLiteralHandler) { + return termHandler.apply(value); + } + + @Override + public boolean equals(Object other) { + Optional maybeEquals = isEqual(other); + + if (maybeEquals.isPresent()) { + return maybeEquals.get(); + } + + Argument otherArgument = (Argument) other; + return otherArgument.apply(term -> term.equals(value), rule -> false, positiveLiteral -> false); + } + + @Override + public int hashCode() { + return 47 * value.hashCode(); + } + + @Override + public String toString() { + return value.toString(); + } + }; + } + + /** + * Create an argument containing a Rule. + * + * @param value the Rule value + * + * @return An argument containing the given Rule value + */ + public static Argument rule(Rule value) { + return new Argument() { + @Override + public V apply(Function termHandler, + Function ruleHandler, + Function positiveLiteralHandler) { + return ruleHandler.apply(value); + } + + @Override + public boolean equals(Object other) { + Optional maybeEquals = isEqual(other); + + if (maybeEquals.isPresent()) { + return maybeEquals.get(); + } + + Argument otherArgument = (Argument) other; + return otherArgument.apply(term -> false, rule -> rule.equals(value), positiveLiteral -> false); + } + + @Override + public int hashCode() { + return 53 * value.hashCode(); + } + + @Override + public String toString() { + return value.toString(); + } + }; + } + + /** + * Create an argument containing a PositiveLiteral. + * + * @param value the PositiveLiteral value + * + * @return An argument containing the given PositiveLiteral value + */ + public static Argument positiveLiteral(PositiveLiteral value) { + return new Argument() { + @Override + public V apply(Function termHandler, + Function ruleHandler, + Function positiveLiteralHandler) { + return positiveLiteralHandler.apply(value); + } + + @Override + public boolean equals(Object other) { + Optional maybeEquals = isEqual(other); + + if (maybeEquals.isPresent()) { + return maybeEquals.get(); + } + + Argument otherArgument = (Argument) other; + return otherArgument.apply(term -> false, rule -> false, + positiveLiteral -> positiveLiteral.equals(value)); + } + + @Override + public int hashCode() { + return 59 * value.hashCode(); + } + + @Override + public String toString() { + return value.toString(); + } + }; + } + + /** + * Create an optional from a (possible) Term value. + * + * @return An optional containing the contained Term, or an empty Optional if + * the argument doesn't contain a Term. + */ + public Optional fromTerm() { + return this.apply(Optional::of, value -> Optional.empty(), value -> Optional.empty()); + } + + /** + * Create an optional from a (possible) Rule value. + * + * @return An optional containing the contained Rule, or an empty Optional if + * the argument doesn't contain a Rule. + */ + public Optional fromRule() { + return this.apply(value -> Optional.empty(), Optional::of, value -> Optional.empty()); + } + + /** + * Create an optional from a (possible) PositiveLiteral value. + * + * @return An optional containing the contained PositiveLiteral, or an empty + * Optional if the argument doesn't contain a PositiveLitreal. + */ + public Optional fromPositiveLiteral() { + return this.apply(value -> Optional.empty(), value -> Optional.empty(), Optional::of); + } +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java new file mode 100644 index 000000000..5240358ad --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Command.java @@ -0,0 +1,72 @@ +package org.semanticweb.rulewerk.core.model.api; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.List; + +import org.semanticweb.rulewerk.core.model.implementation.Serializer; + +/** + * Class for representing a generic command that can be executed. + * + * @author Markus Kroetzsch + * + */ +public class Command implements Entity { + + final String name; + final List arguments; + + /** + * Constructor + * + * @param name String name of the command + * @param arguments list of arguments of the command + */ + public Command(String name, List arguments) { + this.name = name; + this.arguments = arguments; + } + + /** + * Returns the command name. + * + * @return + */ + public String getName() { + return name; + } + + /** + * Returns the command arguments. + * + * @return + */ + public List getArguments() { + return arguments; + } + + @Override + public String toString() { + return Serializer.getSerialization(serializer -> serializer.writeCommand(this)); + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java new file mode 100644 index 000000000..f7b8b760f --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Conjunction.java @@ -0,0 +1,42 @@ +package org.semanticweb.rulewerk.core.model.api; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.List; + +/** + * Interface for representing conjunctions of {@link Literal}s, i.e., lists of + * (negated or positive) atomic formulas that are connected with logical AND. + * Conjunctions may have free variables, since they contain no quantifiers. + * + * @author Markus Krötzsch + * + */ +public interface Conjunction extends Iterable, SyntaxObject { + + /** + * Returns the list of literals that are part of this conjunction. + * + * @return list of literals + */ + List getLiterals(); + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Constant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Constant.java similarity index 71% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Constant.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Constant.java index 09d8c38eb..bf04acd88 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Constant.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Constant.java @@ -1,10 +1,10 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,13 +21,11 @@ */ /** - * Interface for constants, i.e., terms of type {@link TermType#CONSTANT}. - * Domain elements that are denoted by constants are commonly known as named - * elements. + * Interface for constants, i.e. for syntactic names that refer to domain + * elements. * * @author david.carral@tu-dresden.de * @author Markus Krötzsch */ public interface Constant extends Term { } - diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSource.java new file mode 100644 index 000000000..f8afe3ee8 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSource.java @@ -0,0 +1,52 @@ +package org.semanticweb.rulewerk.core.model.api; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.Optional; + +/** + * Interfaces various types of data sources for storing facts. + * + * @author Irina Dragoste + * + */ +public interface DataSource extends Entity { + + /** + * Retrieve the required arity of target predicates for the data source. + * + * @return the required arity for the data source, or Optional.empty() if there + * is none. + */ + public default Optional getRequiredArity() { + return Optional.empty(); + } + + /** + * Returns a fact that represents the declaration of this {@link DataSource}. + * Rulewerk syntax uses facts to specify the relevant parameters for data source + * declarations. + * + * @return {@link Fact} that contains the parameters of this data source + */ + public Fact getDeclarationFact(); + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java new file mode 100644 index 000000000..22efc0aae --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DataSourceDeclaration.java @@ -0,0 +1,46 @@ +package org.semanticweb.rulewerk.core.model.api; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * A declaration for an external data source, which assigns a predicate to a + * source. + * + * @author Markus Kroetzsch + * + */ +public interface DataSourceDeclaration extends Statement { + + /** + * Returns the {@link Predicate} that this source applies to. + * + * @return predicate into which data from the given source is loaded + */ + Predicate getPredicate(); + + /** + * Returns the {@link DataSource} that the data is loaded from. + * + * @return data source specification + */ + DataSource getDataSource(); + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java new file mode 100644 index 000000000..c64955d35 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/DatatypeConstant.java @@ -0,0 +1,75 @@ +package org.semanticweb.rulewerk.core.model.api; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Interface for datatype constants, i.e. for constants that represent a + * specific value of a concrete datatype). Such terms are of type + * {@link TermType#DATATYPE_CONSTANT}. + * + * Note that datatype literal is a common name of the representation of + * specific values for a datatype. We mostly avoid this meaning of + * literal since a literal in logic is typically a negated or non-negated + * atom. + * + * @author Markus Kroetzsch + */ +public interface DatatypeConstant extends Constant { + + @Override + default TermType getType() { + return TermType.DATATYPE_CONSTANT; + } + + /** + * Returns the datatype of this term, which is typically an IRI that defines how + * to interpret the lexical value. + * + * @return a non-blank String (not null, nor empty or whitespace). + */ + String getDatatype(); + + /** + * Returns the lexical value of the data value, i.e. a string that encodes a + * specific value based on the value's datatype. Note that there can be several + * strings that represent the same value, depending on the rules of the + * datatype, and that there the value used here does not have to be a canonical + * representation. + * + * @return a non-null string + */ + String getLexicalValue(); + + /** + * Returns a string representation of this value that conforms to RDF + * serialisation formats such as Turtle. Turtle supports some abbreviations for + * common types, e.g., by leaving the type away for xsd:string literals, which + * can be enabled or disabled through the functions parameter. + * + * @param useAbbreviations if true, the result may use Turtle-style + * abbreviations to shorten the output where supported; + * otherwise the literal will always be serialised in + * full + * @return RDF-style string serialisation of the value + */ + String getRdfLiteralString(boolean useAbbreviations); + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java new file mode 100644 index 000000000..75c40aa3d --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Entity.java @@ -0,0 +1,29 @@ +package org.semanticweb.rulewerk.core.model.api; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Most general type of syntactic entity in Rulewerk. + * + */ +public interface Entity { + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java new file mode 100644 index 000000000..1b47a54c7 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/ExistentialVariable.java @@ -0,0 +1,36 @@ +package org.semanticweb.rulewerk.core.model.api; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Interface for existentially quantified variables, i.e., variables that appear + * in the scope of an (implicit) existential quantifier in a rule. + * + * @author Markus Krötzsch + */ +public interface ExistentialVariable extends Variable { + + @Override + default TermType getType() { + return TermType.EXISTENTIAL_VARIABLE; + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java new file mode 100644 index 000000000..d3b938897 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Fact.java @@ -0,0 +1,32 @@ +package org.semanticweb.rulewerk.core.model.api; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * A fact is a positive (non-negated) literal that contains only constants as + * its terms, but no variables. + * + * @author Markus Kroetzsch + * + */ +public interface Fact extends PositiveLiteral, Statement { + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java new file mode 100644 index 000000000..e19bf3517 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/LanguageStringConstant.java @@ -0,0 +1,64 @@ +package org.semanticweb.rulewerk.core.model.api; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Interface for string constants with a language tag, used to represent values + * of type http://www.w3.org/1999/02/22-rdf-syntax-ns#langString in RDF, OWL, + * and related languages used with knowledge graphs. Such terms are of type + * {@link TermType#LANGSTRING_CONSTANT}. + * + * @author Markus Kroetzsch + */ +public interface LanguageStringConstant extends Constant { + + @Override + default TermType getType() { + return TermType.LANGSTRING_CONSTANT; + } + + /** + * Returns the datatype of this term, which is always + * http://www.w3.org/1999/02/22-rdf-syntax-ns#langString. + * + * @return a IRI of RDF langString datatype + */ + default String getDatatype() { + return PrefixDeclarationRegistry.RDF_LANGSTRING; + } + + /** + * Returns the string value of the literal without the language tag. + * + * @return a non-null string + */ + String getString(); + + /** + * Returns the language tag of the literal, which should be a lowercase string + * that conforms to the BCP 47 + * specification. + * + * @return a non-empty string + */ + String getLanguageTag(); + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java new file mode 100644 index 000000000..b345b070d --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Literal.java @@ -0,0 +1,53 @@ +package org.semanticweb.rulewerk.core.model.api; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.List; + +/** + * Interface for literals. A positive literal is simply an atomic formula, i.e., + * a formula of the form P(t1,...,tn) where P is a {@link Predicate} of arity n + * and t1,...,tn are {@link Term}s. A negative literal is a negated atomic + * formula. + * + * @author david.carral@tu-dresden.de + * @author Irina Dragoste + */ +public interface Literal extends SyntaxObject { + + boolean isNegated(); + + /** + * The literal predicate. + * + * @return the literal predicate. + */ + Predicate getPredicate(); + + /** + * The list of terms representing the tuple arguments. + * + * @return an unmodifiable list of terms with the same size as the + * {@link Predicate} arity. + */ + List getArguments(); + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Blank.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java similarity index 70% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Blank.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java index 5cd7d1803..c4f6354cb 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Blank.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NamedNull.java @@ -1,10 +1,10 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,12 +21,17 @@ */ /** - * Interface for {@link TermType#BLANK} terms. A blank is an entity used to + * Interface for {@link TermType#NAMED_NULL} terms. A blank is an entity used to * represent anonymous domain elements introduced during the reasoning process * to satisfy existential restrictions. * * @author david.carral@tu-dresden.de */ -public interface Blank extends Term { -} +public interface NamedNull extends Term { + + @Override + default TermType getType() { + return TermType.NAMED_NULL; + } +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NegativeLiteral.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NegativeLiteral.java new file mode 100644 index 000000000..df5c6e6b0 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/NegativeLiteral.java @@ -0,0 +1,30 @@ +package org.semanticweb.rulewerk.core.model.api; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +public interface NegativeLiteral extends Literal { + + @Override + default boolean isNegated() { + return true; + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PositiveLiteral.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PositiveLiteral.java new file mode 100644 index 000000000..6c4598a77 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PositiveLiteral.java @@ -0,0 +1,29 @@ +package org.semanticweb.rulewerk.core.model.api; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +public interface PositiveLiteral extends Literal { + + @Override + default boolean isNegated() { + return false; + } +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java similarity index 86% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java index aaa126fa6..7ed7d48f8 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Predicate.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Predicate.java @@ -1,50 +1,49 @@ -package org.semanticweb.vlog4j.core.model.api; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -/** - * A Predicate represents a relation between terms. Is uniquely identified by - * its name and arity. The arity determines the number of terms allowed in the - * relation. For example, a Predicate with name {@code P} and arity {@code n} - * allows atomic formulae of the form {@code P(t1,...,tn)}. - * - * @author Irina Dragoste - * - */ -public interface Predicate { - - /** - * The name of the Predicate. - * - * @return the name of the Predicate. - */ - String getName(); - - /** - * The arity represents the number of terms allowed as relation arguments for - * this Predicate. For example, a Predicate with name {@code P} and arity - * {@code n} allows atomic formulae of the form {@code P(t1,...,tn)}. - * - * @return the arity of the Predicate. - */ - int getArity(); - -} +package org.semanticweb.rulewerk.core.model.api; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * A Predicate represents a relation between terms. Is uniquely identified by + * its name and arity. The arity determines the number of terms allowed in the + * relation. For example, a Predicate with name {@code P} and arity {@code n} + * allows atomic formulae of the form {@code P(t1,...,tn)}. + * + * @author Irina Dragoste + * + */ +public interface Predicate extends Entity { + + /** + * The name of the Predicate. + * + * @return the name of the Predicate. + */ + String getName(); + + /** + * The arity represents the number of terms allowed as relation arguments for + * this Predicate. For example, a Predicate with name {@code P} and arity + * {@code n} allows atomic formulae of the form {@code P(t1,...,tn)}. + * + * @return the arity of the Predicate. + */ + int getArity(); +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java new file mode 100644 index 000000000..1532c706c --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/PrefixDeclarationRegistry.java @@ -0,0 +1,138 @@ +package org.semanticweb.rulewerk.core.model.api; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.Map.Entry; + +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; + +/** + * Registry that manages prefixes and base namespace declarations as used for + * parsing and serialising inputs. + * + * @author Markus Kroetzsch + */ +public interface PrefixDeclarationRegistry extends Iterable> { + + static final String XSD = "http://www.w3.org/2001/XMLSchema#"; + static final String XSD_STRING = "http://www.w3.org/2001/XMLSchema#string"; + static final String XSD_DECIMAL = "http://www.w3.org/2001/XMLSchema#decimal"; + static final String XSD_DOUBLE = "http://www.w3.org/2001/XMLSchema#double"; + static final String XSD_FLOAT = "http://www.w3.org/2001/XMLSchema#float"; + static final String XSD_INTEGER = "http://www.w3.org/2001/XMLSchema#integer"; + static final String XSD_INT = "http://www.w3.org/2001/XMLSchema#int"; + static final String XSD_LONG = "http://www.w3.org/2001/XMLSchema#long"; + static final String XSD_SHORT = "http://www.w3.org/2001/XMLSchema#short"; + static final String XSD_BYTE = "http://www.w3.org/2001/XMLSchema#byte"; + static final String XSD_BOOLEAN = "http://www.w3.org/2001/XMLSchema#boolean"; + static final String RDF_LANGSTRING = "http://www.w3.org/1999/02/22-rdf-syntax-ns#langString"; + + static final String RDF = "http://www.w3.org/1999/02/22-rdf-syntax-ns#"; + static final String RDF_TYPE = "http://www.w3.org/1999/02/22-rdf-syntax-ns#type"; + + static final String EMPTY_BASE = ""; + static final String PREFIX_NAME_SEPARATOR = ":"; + + /** + * Resets the registry to an empty state, without a base or any prefixes. + */ + void clear(); + + /** + * Returns the relevant base namespace. This should always return a result, + * possibly using a local default value if no base was declared. + * + * @return string of an absolute base IRI + */ + String getBaseIri(); + + /** + * Sets the base namespace to the given value. This should only be done once, + * and not after the base namespace was assumed to be an implicit default value. + * + * @param baseIri the new base namespace + * @throws PrefixDeclarationException if base was already defined + */ + void setBaseIri(String baseIri) throws PrefixDeclarationException; + + /** + * Returns the IRI associated with a given prefix name. + * + * @param prefixName the name of the prefix. + * @throws PrefixDeclarationException if prefixName was not defined. + */ + String getPrefixIri(String prefixName) throws PrefixDeclarationException; + + /** + * Registers a prefix declaration. Behaviour is implementation-defined if + * prefixName has already been registered. + * + * @param prefixName the name of the prefix. + * @param prefixIri the IRI of the prefix. + * + * @throws PrefixDeclarationException when prefixName is already registered, at + * the discretion of the implementation. + */ + void setPrefixIri(String prefixName, String prefixIri) throws PrefixDeclarationException; + + /** + * Un-registers a prefix declaration if present. + * + * @param prefixName the name of the prefix. + */ + void unsetPrefix(String prefixName); + + /** + * Turn a prefixed + * name into an absolute IRI. + * + * @param prefixedName a prefixed name of the form prefixName:localName. + * + * @throws PrefixDeclarationException when the prefixName has not been declared. + * @return an absolute IRI corresponding to prefixedName. + */ + String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException; + + /** + * Turn a potentially relative IRI into an absolute IRI. + * + * @param relativeOrAbsoluteIri an IRI that may be relative or absolute. + * @throws PrefixDeclarationException when relativeOrAbsoluteIri is not a valid + * IRI. + * + * @return when relativeOrAbsoluteIri is an absolute IRI, it is returned as-is. + * Otherwise, the current base IRI is prepended. + */ + String absolutizeIri(String relativeOrAbsoluteIri) throws PrefixDeclarationException; + + /** + * Turn an absolute IRI into a (possibly) prefixed name. Dual to + * {@link PrefixDeclarationRegistry#resolvePrefixedName}. + * + * @param iri an absolute IRI to abbreviate + * @param addIriBrackets if true, unabbreviated IRIs will be enclosed in < + * > + * + * @return an abbreviated form of {@code iri} if an appropriate prefix is known, + * or {@code iri}. + */ + String unresolveAbsoluteIri(String iri, boolean addIriBrackets); +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/QueryResult.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/QueryResult.java similarity index 87% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/QueryResult.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/QueryResult.java index 0ea7e1f8e..49c6319b7 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/QueryResult.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/QueryResult.java @@ -1,43 +1,43 @@ -package org.semanticweb.vlog4j.core.model.api; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.List; - -/** - * A Query Result represents a list of terms that match the terms of the asked - * query. The terms can be named individuals (constants) and anonymous - * individuals (blanks). - * - * @author Irina Dragoste - * - */ -public interface QueryResult { - - /** - * Getter for the terms that represent a query answer. - * - * @return the terms that represent a query answer. They can be named - * individuals (constants) and anonymous individuals (blanks). - */ - List getTerms(); - -} +package org.semanticweb.rulewerk.core.model.api; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.List; + +/** + * A Query Result represents a list of terms that match the terms of the asked + * query. The terms can be named individuals (constants) and anonymous + * individuals (blanks). + * + * @author Irina Dragoste + * + */ +public interface QueryResult { + + /** + * Getter for the terms that represent a query answer. + * + * @return the terms that represent a query answer. They can be named + * individuals (constants) and anonymous individuals (blanks). + */ + List getTerms(); + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java new file mode 100644 index 000000000..9187282eb --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Rule.java @@ -0,0 +1,49 @@ +package org.semanticweb.rulewerk.core.model.api; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Interface for classes representing a rule. This implementation assumes that + * rules are defined by their head and body literals, without explicitly + * specifying quantifiers. All variables in the body are considered universally + * quantified; all variables in the head that do not occur in the body are + * considered existentially quantified. + * + * @author Markus Krötzsch + * + */ +public interface Rule extends SyntaxObject, Statement { + + /** + * Returns the conjunction of head literals (the consequence of the rule). + * + * @return conjunction of literals + */ + Conjunction getHead(); + + /** + * Returns the conjunction of body literals (the premise of the rule). + * + * @return conjunction of literals + */ + Conjunction getBody(); + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Statement.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Statement.java new file mode 100644 index 000000000..f43c03248 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Statement.java @@ -0,0 +1,39 @@ +package org.semanticweb.rulewerk.core.model.api; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * A statement is any element that a knowledge base can consist of, such as a + * {@link Rule}, {@link Fact}, or {@link DataSourceDeclaration}. + * + * @author Markus Kroetzsch + * + */ +public interface Statement extends Entity { + + /** + * Accept a {@link StatementVisitor} and return its output. + * + * @param statementVisitor the StatementVisitor + * @return output of the visitor + */ + T accept(StatementVisitor statementVisitor); +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/TermVisitor.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/StatementVisitor.java similarity index 50% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/TermVisitor.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/StatementVisitor.java index a311de8b4..5e8f308f6 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/TermVisitor.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/StatementVisitor.java @@ -1,10 +1,10 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,37 +21,36 @@ */ /** - * A visitor for the various types of {@link Term}s in the data model. Should be - * used to avoid any type casting or {@code instanceof} checks when processing terms. + * A visitor for the various types of {@link Statement}s in the data model. + * Should be used to avoid any type casting or {@code instanceof} checks when + * processing statements. * * @author Markus Krötzsch */ -public interface TermVisitor { +public interface StatementVisitor { /** - * Visits a {@link Constant} and returns a result + * Visits a {@link Fact} and returns a result. * - * @param term - * the term to visit + * @param statement the statement to visit * @return some result */ - T visit(Constant term); + T visit(Fact statement); /** - * Visits a {@link Variable} and returns a result + * Visits a {@link Rule} and returns a result. * - * @param term - * the term to visit + * @param statement the statement to visit * @return some result */ - T visit(Variable term); + T visit(Rule statement); /** - * Visits a {@link Blank} and returns a result + * Visits a {@link DataSourceDeclaration} and returns a result. * - * @param term - * the term to visit + * @param statement the statement to visit * @return some result */ - T visit(Blank term); + T visit(DataSourceDeclaration statement); + } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/SyntaxObject.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/SyntaxObject.java new file mode 100644 index 000000000..7dcf50142 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/SyntaxObject.java @@ -0,0 +1,105 @@ +package org.semanticweb.rulewerk.core.model.api; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.stream.Stream; + +/** + * General interface for all classes that represent syntactic objects that might + * contain atomic terms, in particular all kinds of logical formulas. Compound + * terms (with nested functions) would also be of this type if we had them. + * + * @author Markus Kroetzsch + * + */ +public interface SyntaxObject extends Entity { + + /** + * Returns the stream of distinct terms that occur in this object. + * + * @return stream of distinct terms used in this object + */ + Stream getTerms(); + + /** + * Return the stream of distinct universal variables in this object. + * + * @return stream of universal variables + */ + default Stream getUniversalVariables() { + return Terms.getUniversalVariables(getTerms()); + } + + /** + * Return the stream of distinct existential variables in this object. + * + * @return stream of existential variables + */ + default Stream getExistentialVariables() { + return Terms.getExistentialVariables(getTerms()); + } + + /** + * Return the stream of distinct variables in this object. + * + * @return stream of variables + */ + default Stream getVariables() { + return Terms.getVariables(getTerms()); + } + + /** + * Return the stream of distinct constants in this object. + * + * @return stream of constants + */ + default Stream getConstants() { + return Terms.getConstants(getTerms()); + } + + /** + * Return the stream of distinct abstract constants in this object. + * + * @return stream of abstract constants + */ + default Stream getAbstractConstants() { + return Terms.getAbstractConstants(getTerms()); + } + + /** + * Return the stream of distinct datatype constants in this object. + * + * @return stream of datatype constants + */ + default Stream getDatatypeConstants() { + return Terms.getDatatypeConstants(getTerms()); + } + + /** + * Return the stream of distinct named nulls in this object. + * + * @return stream of named nulls + */ + default Stream getNamedNulls() { + return Terms.getNamedNulls(getTerms()); + } + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java similarity index 59% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java index 898c41c7c..bb0be440d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Term.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Term.java @@ -1,10 +1,10 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -27,28 +27,47 @@ * @author david.carral@tu-dresden.de * @author Markus Krötzsch */ -public interface Term { +public interface Term extends Entity { /** * Returns the name this term. The name uniquely identifies terms of the same * {@link TermType}. - * + * * @return a non-blank String (not null, nor empty or whitespace). */ String getName(); /** * Return the type of this term. - * + * * @return the type of this term */ TermType getType(); /** - * Accept a {@link TermVisitor} and return its output. + * Returns true if the term represents some kind of constant. + * + * @return true if term is constant + */ + default boolean isConstant() { + return this.getType() == TermType.ABSTRACT_CONSTANT || this.getType() == TermType.DATATYPE_CONSTANT + || this.getType() == TermType.LANGSTRING_CONSTANT; + } + + /** + * Returns true if the term represents some kind of variable. * - * @param termVisitor - * the TermVisitor + * @return true if term is variable + */ + default boolean isVariable() { + return this.getType() == TermType.UNIVERSAL_VARIABLE || this.getType() == TermType.EXISTENTIAL_VARIABLE; + } + + /** + * Accept a {@link TermVisitor} and return its output. + * + * @param termVisitor the TermVisitor + * @param type associated to the given TermVisitor * @return output of the visitor */ T accept(TermVisitor termVisitor); diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermType.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermType.java new file mode 100644 index 000000000..490604400 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermType.java @@ -0,0 +1,62 @@ +package org.semanticweb.rulewerk.core.model.api; + +/* + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Enumeration listing the different types of terms. + * + * @author david.carral@tu-dresden.de + * @author Markus Kroetzsch + * + */ +public enum TermType { + /** + * An abstract constant is a term used to represent named domain elements that + * are not a value of any specific datatype. + */ + ABSTRACT_CONSTANT, + /** + * A datatype constant is a term used to represent named domain elements that + * are the value of a specific datatype. + */ + DATATYPE_CONSTANT, + /** + * A string constant with a language tag, used to represent values of type + * http://www.w3.org/1999/02/22-rdf-syntax-ns#langString in RDF, OWL, and + * related languages used with knowledge graphs. + */ + LANGSTRING_CONSTANT, + /** + * A named null is an entity used to represent anonymous domain elements + * introduced during the reasoning process to satisfy existential restrictions. + */ + NAMED_NULL, + /** + * A universal variable is a variable that can only be used in positions where + * it is universally quantified, or implicitly assumed to be. + */ + UNIVERSAL_VARIABLE, + /** + * An existential variable is a variable that can only be used in positions + * where it is existentially quantified, or implicitly assumed to be. + */ + EXISTENTIAL_VARIABLE +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermVisitor.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermVisitor.java new file mode 100644 index 000000000..36d662321 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/TermVisitor.java @@ -0,0 +1,79 @@ +package org.semanticweb.rulewerk.core.model.api; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * A visitor for the various types of {@link Term}s in the data model. Should be + * used to avoid any type casting or {@code instanceof} checks when processing + * terms. + * + * @author Markus Krötzsch + */ +public interface TermVisitor { + + /** + * Visits an {@link AbstractConstant} and returns a result. + * + * @param term the term to visit + * @return some result + */ + T visit(AbstractConstant term); + + /** + * Visits a {@link DatatypeConstant} and returns a result. + * + * @param term the term to visit + * @return some result + */ + T visit(DatatypeConstant term); + + /** + * Visits a {@link LanguageStringConstant} and returns a result. + * + * @param term the term to visit + * @return some result + */ + T visit(LanguageStringConstant term); + + /** + * Visits a {@link UniversalVariable} and returns a result + * + * @param term the term to visit + * @return some result + */ + T visit(UniversalVariable term); + + /** + * Visits a {@link ExistentialVariable} and returns a result + * + * @param term the term to visit + * @return some result + */ + T visit(ExistentialVariable term); + + /** + * Visits a {@link NamedNull} and returns a result + * + * @param term the term to visit + * @return some result + */ + T visit(NamedNull term); +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Terms.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Terms.java new file mode 100644 index 000000000..8e8e05ac2 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Terms.java @@ -0,0 +1,211 @@ +package org.semanticweb.rulewerk.core.model.api; + +import java.net.MalformedURLException; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.stream.Stream; + +/** + * Collection of utility methods for handling {@link Term}s. + * + * @author Markus Kroetzsch + * + */ +public class Terms { + + /** + * Returns a stream of variables found in the given stream of terms. Ordering + * and duplicates are not affected. + * + * @param terms stream of all terms + * @return stream of results + */ + public static Stream getVariables(Stream terms) { + return terms.filter(term -> term.isVariable()).map(Variable.class::cast); + } + + /** + * Returns a stream of constants found in the given stream of terms. Ordering + * and duplicates are not affected. + * + * @param terms stream of all terms + * @return stream of results + */ + public static Stream getConstants(Stream terms) { + return terms.filter(term -> term.isConstant()).map(Constant.class::cast); + } + + /** + * Returns a stream of named nulls found in the given stream of terms. Ordering + * and duplicates are not affected. + * + * @param terms stream of all terms + * @return stream of results + */ + public static Stream getNamedNulls(Stream terms) { + return terms.filter(term -> term.getType() == TermType.NAMED_NULL).map(NamedNull.class::cast); + } + + /** + * Returns a stream of universal variables found in the given stream of terms. + * Ordering and duplicates are not affected. + * + * @param terms stream of all terms + * @return stream of results + */ + public static Stream getUniversalVariables(Stream terms) { + return terms.filter(term -> term.getType() == TermType.UNIVERSAL_VARIABLE).map(UniversalVariable.class::cast); + } + + /** + * Returns a stream of existential variables found in the given stream of terms. + * Ordering and duplicates are not affected. + * + * @param terms stream of all terms + * @return stream of results + */ + public static Stream getExistentialVariables(Stream terms) { + return terms.filter(term -> term.getType() == TermType.EXISTENTIAL_VARIABLE) + .map(ExistentialVariable.class::cast); + } + + /** + * Returns a stream of abstract constants found in the given stream of terms. + * Ordering and duplicates are not affected. + * + * @param terms stream of all terms + * @return stream of results + */ + public static Stream getAbstractConstants(Stream terms) { + return terms.filter(term -> term.getType() == TermType.ABSTRACT_CONSTANT).map(AbstractConstant.class::cast); + } + + /** + * Returns a stream of datatype constants found in the given stream of terms. + * Ordering and duplicates are not affected. + * + * @param terms stream of all terms + * @return stream of results + */ + public static Stream getDatatypeConstants(Stream terms) { + return terms.filter(term -> term.getType() == TermType.DATATYPE_CONSTANT).map(DatatypeConstant.class::cast); + } + + /** + * Returns the lexical value of a term that is an xsd:string constant, and + * throws an exception for all other cases. + * + * @param term the term from which the string is to be extracted + * @return extracted string + * @throws IllegalArgumentException if the given term is not a constant of type + * xsd:string + */ + public static String extractString(Term term) { + if (term.getType() == TermType.DATATYPE_CONSTANT) { + DatatypeConstant datatypeConstant = (DatatypeConstant) term; + if (PrefixDeclarationRegistry.XSD_STRING.equals(datatypeConstant.getDatatype())) + return datatypeConstant.getLexicalValue(); + } + throw new IllegalArgumentException( + "Term " + term.toString() + " is not a datatype constant of type xsd:string."); + } + + /** + * Returns the name of an abstract term, and throws an exception for all other + * cases. + * + * @param term the term from which the name is to be extracted + * @return extracted name + * @throws IllegalArgumentException if the given term is not an abstract + * constant + */ + public static String extractName(Term term) { + if (term.getType() == TermType.ABSTRACT_CONSTANT) { + return term.getName(); + } else { + throw new IllegalArgumentException("Term " + term.toString() + " is not an abstract constant."); + } + } + + /** + * Returns the IRI representation of an abstract term, and throws an exception + * for all other cases. + * + * @param term the term from which the IRI is to be extracted + * @return extracted IRI + * @throws IllegalArgumentException if the given term is not an abstract + * constant or cannot be parsed as an IRI + */ + public static URI extractIri(Term term) { + try { + return new URI(extractName(term)); + } catch (URISyntaxException e) { + throw new IllegalArgumentException(e); + } + } + + /** + * Returns the URL representation of an abstract term, and throws an exception + * for all other cases. + * + * @param term the term from which the URL is to be extracted + * @return extracted URL + * @throws IllegalArgumentException if the given term is not an abstract + * constant or cannot be parsed as a URL + */ + public static URL extractUrl(Term term) { + try { + return new URL(extractName(term)); + } catch (MalformedURLException e) { + throw new IllegalArgumentException(e); + } + } + + /** + * Returns the numeric value of a term that is an xsd:integer (or supported + * subtype) constant, and throws an exception for all other cases. + * + * @param term the term from which the integer is to be extracted + * @return extracted integer + * @throws IllegalArgumentException if the given term is not a constant of an + * integer type, or if the lexical + * representation could not be parsed into a + * Java int + */ + public static int extractInt(Term term) { + if (term.getType() == TermType.DATATYPE_CONSTANT) { + DatatypeConstant datatypeConstant = (DatatypeConstant) term; + if (PrefixDeclarationRegistry.XSD_INTEGER.equals(datatypeConstant.getDatatype()) + || PrefixDeclarationRegistry.XSD_LONG.equals(datatypeConstant.getDatatype()) + || PrefixDeclarationRegistry.XSD_INT.equals(datatypeConstant.getDatatype()) + || PrefixDeclarationRegistry.XSD_SHORT.equals(datatypeConstant.getDatatype()) + || PrefixDeclarationRegistry.XSD_BYTE.equals(datatypeConstant.getDatatype())) + return Integer.parseInt(datatypeConstant.getLexicalValue()); + } + throw new IllegalArgumentException( + "Term " + term.toString() + " is not a datatype constant of a supported integer type."); + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/UniversalVariable.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/UniversalVariable.java new file mode 100644 index 000000000..74e95c6ae --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/UniversalVariable.java @@ -0,0 +1,36 @@ +package org.semanticweb.rulewerk.core.model.api; + +/* + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Interface for universally quantified variables, i.e., variables that appear + * in the scope of an (implicit) universal quantifier in a rule. + * + * @author Markus Krötzsch + */ +public interface UniversalVariable extends Variable { + + @Override + default TermType getType() { + return TermType.UNIVERSAL_VARIABLE; + } + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Variable.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Variable.java similarity index 65% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Variable.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Variable.java index d6bbd59aa..c68d289c2 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Variable.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/api/Variable.java @@ -1,10 +1,10 @@ -package org.semanticweb.vlog4j.core.model.api; +package org.semanticweb.rulewerk.core.model.api; /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,13 +21,13 @@ */ /** - * Interface for variables, i.e., terms of type {@link TermType#VARIABLE}. - * Variables are terms that can be quantified to create formulas that refer to - * some or all values of the domain. + * Interface for variables, i.e., terms of type + * {@link TermType#UNIVERSAL_VARIABLE} and + * {@link TermType#EXISTENTIAL_VARIABLE}. Variables are terms that can be + * quantified to create formulas that refer to some or all values of the domain. * * @author david.carral@tu-dresden.de * @author Markus Krötzsch */ public interface Variable extends Term { } - diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractConstantImpl.java similarity index 54% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConstantImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractConstantImpl.java index 957f6e13e..508360739 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConstantImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractConstantImpl.java @@ -1,14 +1,13 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.TermType; -import org.semanticweb.vlog4j.core.model.api.TermVisitor; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.TermVisitor; /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,34 +24,29 @@ */ /** - * Implements {@link #CONSTANT} terms. A constant is an entity used to represent named domain elements in the domain. + * Implementation of {@link AbstractConstant}. * * @author david.carral@tu-dresden.de */ -public class ConstantImpl extends AbstractTermImpl implements Constant { +public class AbstractConstantImpl extends AbstractTermImpl implements AbstractConstant { /** - * Instantiates a {@code ConstantImpl} object with the name {@code name}. + * Instantiates a {@code ConstantImpl} object with the name + * {@code name}. * - * @param name - * cannot be a blank String (null, empty or whitespace). + * @param name cannot be a blank String (null, empty or whitespace). */ - public ConstantImpl(final String name) { + public AbstractConstantImpl(final String name) { super(name); } - @Override - public TermType getType() { - return TermType.CONSTANT; - } - @Override public T accept(TermVisitor termVisitor) { return termVisitor.visit(this); } - + @Override public String toString() { - return this.getName(); + return Serializer.getSerialization(serializer -> serializer.writeAbstractConstant(this)); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java new file mode 100644 index 000000000..4ee1b98e0 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractLiteralImpl.java @@ -0,0 +1,114 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.Collections; +import java.util.List; + +import java.util.stream.Stream; + +import org.apache.commons.lang3.Validate; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; + +/** + * Implements {@link Literal} objects. A literal is a formula of the form + * +P(t1,...,tn) or -P(t1,...,tn) for P a {@link Predicate} name, and t1,...,tn + * some {@link Term}s. The number of terms corresponds to the {@link Predicate} + * arity. + * + * @author david.carral@tu-dresden.de + * @author Markus Krötzsch + */ +public abstract class AbstractLiteralImpl implements Literal { + + private final Predicate predicate; + private final List terms; + + /** + * Creates a {@link Literal} of the form "{@code predicate}({@code terms})". + * + * @param predicate non-blank predicate name + * @param terms non-empty list of non-null terms. List size must be the same + * as the predicate arity. + */ + public AbstractLiteralImpl(final Predicate predicate, final List terms) { + Validate.notNull(predicate, "Literal predicates cannot be null."); + Validate.noNullElements(terms, + "Null terms cannot appear in literals. The list contains a null at position [%d]."); + Validate.notEmpty(terms, "Literals of arity zero are not supported: please specify at least one term."); + + Validate.isTrue(terms.size() == predicate.getArity(), "Terms size [%d] does not match predicate arity [%d].", + terms.size(), predicate.getArity()); + + this.predicate = predicate; + this.terms = terms; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + (this.isNegated() ? 1231 : 1237); + result = prime * result + this.getPredicate().hashCode(); + result = prime * result + this.getArguments().hashCode(); + return result; + } + + @Override + public boolean equals(final Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (!(obj instanceof Literal)) { + return false; + } + final Literal other = (Literal) obj; + + return this.isNegated() == other.isNegated() && this.getPredicate().equals(other.getPredicate()) + && this.getArguments().equals(other.getArguments()); + } + + @Override + public String toString() { + return Serializer.getSerialization(serializer -> serializer.writeLiteral(this)); + } + + @Override + public Predicate getPredicate() { + return this.predicate; + } + + @Override + public List getArguments() { + return Collections.unmodifiableList(this.terms); + } + + @Override + public Stream getTerms() { + return getArguments().stream().distinct(); + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java new file mode 100644 index 000000000..0d8de1736 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractPrefixDeclarationRegistry.java @@ -0,0 +1,155 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.Map.Entry; + +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; + +/** + * Implementation of the common logic for prefix declaration registries. + * + * @author Maximilian Marx + */ +public abstract class AbstractPrefixDeclarationRegistry implements PrefixDeclarationRegistry { + + /** + * Pattern for strings that are permissible as local names in abbreviated forms. + */ + static public final String REGEXP_LOCNAME = "^[a-zA-Z]([/a-zA-Z0-9_-])*$"; + + /** + * Map associating each prefixName with the full prefixIri. + */ + protected Map prefixes = new HashMap<>(); + + /** + * Iri holding the base namespace. + */ + protected String baseIri = null; + + @Override + public void clear() { + baseIri = null; + prefixes = new HashMap<>(); + } + + @Override + public String getBaseIri() { + if (baseIri == null) { + baseIri = PrefixDeclarationRegistry.EMPTY_BASE; + } + + return baseIri; + } + + @Override + public String getPrefixIri(String prefixName) throws PrefixDeclarationException { + if (!prefixes.containsKey(prefixName)) { + throw new PrefixDeclarationException( + "Prefix \"" + prefixName + "\" cannot be resolved (not declared yet)."); + } + + return prefixes.get(prefixName); + } + + @Override + public void unsetPrefix(String prefixName) { + prefixes.remove(prefixName); + } + + @Override + public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException { + int colon = prefixedName.indexOf(":"); + String prefix = prefixedName.substring(0, colon + 1); + String suffix = prefixedName.substring(colon + 1); + + return getPrefixIri(prefix) + suffix; + } + + @Override + public String absolutizeIri(String potentiallyRelativeIri) throws PrefixDeclarationException { + URI relative; + + try { + relative = new URI(potentiallyRelativeIri); + } catch (URISyntaxException e) { + throw new PrefixDeclarationException("Failed to parse IRI", e); + } + + if (relative.isAbsolute()) { + return potentiallyRelativeIri; + } else { + return getBaseIri() + potentiallyRelativeIri; + } + } + + @Override + public String unresolveAbsoluteIri(String iri, boolean addIriBrackets) { + String shortestIri; + if (addIriBrackets) { + if (!iri.contains(":") && iri.matches(REGEXP_LOCNAME)) { + shortestIri = iri; + if (baseIri != null && !PrefixDeclarationRegistry.EMPTY_BASE.equals(baseIri)) { + throw new RulewerkRuntimeException("Relative IRIs cannot be serialized when a base is declared."); + } + } else { + shortestIri = "<" + iri + ">"; + } + } else { + shortestIri = iri; + } + + String baseIri = getBaseIri(); + + if (!PrefixDeclarationRegistry.EMPTY_BASE.equals(baseIri) && iri.length() > baseIri.length() + && iri.startsWith(baseIri)) { + String shorterIri = iri.substring(baseIri.length()); + // Only allow very simple names of this form, to avoid confusion, e.g., with + // numbers or boolean literals: + if (shorterIri.matches(REGEXP_LOCNAME) && !"true".equals(shorterIri) || !"false".equals(shorterIri)) { + shortestIri = shorterIri; + } + } + + for (Map.Entry entry : prefixes.entrySet()) { + int localNameLength = iri.length() - entry.getValue().length(); + if (localNameLength > 0 && shortestIri.length() > localNameLength + entry.getKey().length() + && iri.startsWith(entry.getValue())) { + shortestIri = entry.getKey() + iri.substring(entry.getValue().length()); + } + } + + return shortestIri; + } + + @Override + public Iterator> iterator() { + return this.prefixes.entrySet().iterator(); + } +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractTermImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractTermImpl.java similarity index 75% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractTermImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractTermImpl.java index ff5a4060f..e6c037d43 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AbstractTermImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/AbstractTermImpl.java @@ -1,15 +1,13 @@ -package org.semanticweb.vlog4j.core.model.implementation; +package org.semanticweb.rulewerk.core.model.implementation; import org.apache.commons.lang3.Validate; -import org.eclipse.jdt.annotation.NonNull; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.TermType; +import org.semanticweb.rulewerk.core.model.api.Term; /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,9 +24,7 @@ */ /** - * Abstract class implementing all methods used by all types of terms - * ({@link TermType##CONSTANT}, {@link TermType##BLANK}, and - * {@link TermType##VARIABLE}). + * Abstract class implementing all methods used by all types of terms. * * @author david.carral@tu-dresden.de */ @@ -36,7 +32,7 @@ public abstract class AbstractTermImpl implements Term { private final String name; - public AbstractTermImpl(@NonNull final String name) { + public AbstractTermImpl(final String name) { Validate.notBlank(name, "Terms cannot be named by blank strings"); this.name = new String(name); } @@ -46,9 +42,6 @@ public String getName() { return this.name; } - @Override - public abstract @NonNull TermType getType(); - @Override public int hashCode() { final int prime = 31; diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImpl.java new file mode 100644 index 000000000..6da458d75 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImpl.java @@ -0,0 +1,93 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.stream.Stream; + +import org.apache.commons.lang3.Validate; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.Term; + +/** + * Simple implementation of {@link Conjunction}. + * + * @author Markus Krötzsch + */ +public class ConjunctionImpl implements Conjunction { + + final List literals; + + /** + * Constructor. + * + * @param literals a non-null list of literals, that cannot contain null + * elements. + */ + public ConjunctionImpl(List literals) { + Validate.noNullElements(literals); + this.literals = literals; + } + + @Override + public List getLiterals() { + return Collections.unmodifiableList(this.literals); + } + + @Override + public Stream getTerms() { + return this.literals.stream().flatMap(l -> l.getTerms()).distinct(); + } + + @Override + public int hashCode() { + return this.literals.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (!(obj instanceof Conjunction)) { + return false; + } + final Conjunction other = (Conjunction) obj; + return this.literals.equals(other.getLiterals()); + } + + @Override + public Iterator iterator() { + return getLiterals().iterator(); + } + + @Override + public String toString() { + return Serializer.getSerialization(serializer -> serializer.writeLiteralConjunction(this)); + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationImpl.java new file mode 100644 index 000000000..c031d5af4 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationImpl.java @@ -0,0 +1,91 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.apache.commons.lang3.Validate; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.StatementVisitor; + +/** + * Basic implementation for {@link DataSourceDeclaration}. + * + * @author Markus Kroetzsch + * + */ +public class DataSourceDeclarationImpl implements DataSourceDeclaration { + + final Predicate predicate; + final DataSource dataSource; + + public DataSourceDeclarationImpl(Predicate predicate, DataSource dataSource) { + Validate.notNull(predicate, "Predicate cannot be null."); + Validate.notNull(dataSource, "Data source cannot be null."); + this.predicate = predicate; + this.dataSource = dataSource; + } + + @Override + public Predicate getPredicate() { + return this.predicate; + } + + @Override + public DataSource getDataSource() { + return this.dataSource; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = this.predicate.hashCode(); + result = prime * result + this.dataSource.hashCode(); + return result; + } + + @Override + public boolean equals(final Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (!(obj instanceof DataSourceDeclaration)) { + return false; + } + final DataSourceDeclaration other = (DataSourceDeclaration) obj; + + return (this.predicate.equals(other.getPredicate())) && this.dataSource.equals(other.getDataSource()); + } + + @Override + public T accept(StatementVisitor statementVisitor) { + return statementVisitor.visit(this); + } + + @Override + public String toString() { + return Serializer.getSerialization(serializer -> serializer.writeDataSourceDeclaration(this)); + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java new file mode 100644 index 000000000..f4810bee4 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/DatatypeConstantImpl.java @@ -0,0 +1,101 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +import org.apache.commons.lang3.Validate; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.TermVisitor; + +/** + * Simple implementation of {@link DatatypeConstant}. + * + * @author Markus Kroetzsch + * + */ +public class DatatypeConstantImpl implements DatatypeConstant { + + final String datatype; + final String lexicalValue; + + public DatatypeConstantImpl(String lexicalValue, String datatype) { + Validate.notNull(lexicalValue); + Validate.notBlank(datatype, "Datatype IRIs cannot be blank strings."); + this.lexicalValue = lexicalValue; + this.datatype = datatype; + } + + @Override + public T accept(TermVisitor termVisitor) { + return termVisitor.visit(this); + } + + @Override + public String getDatatype() { + return this.datatype; + } + + @Override + public String getLexicalValue() { + return this.lexicalValue; + } + + @Override + public String getRdfLiteralString(boolean useAbbreviations) { + if (useAbbreviations) { + return Serializer.getSerialization(serializer -> serializer.writeDatatypeConstant(this)); + } else { + return Serializer.getSerialization(serializer -> serializer.writeDatatypeConstantNoAbbreviations(this)); + } + } + + @Override + public String toString() { + return Serializer.getSerialization(serializer -> serializer.writeDatatypeConstant(this)); + } + + @Override + public String getName() { + return toString(); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = datatype.hashCode(); + result = prime * result + lexicalValue.hashCode(); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + DatatypeConstantImpl other = (DatatypeConstantImpl) obj; + + return this.lexicalValue.equals(other.getLexicalValue()) && this.datatype.equals(other.getDatatype()); + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ExistentialVariableImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ExistentialVariableImpl.java new file mode 100644 index 000000000..3a130ee21 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/ExistentialVariableImpl.java @@ -0,0 +1,51 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.TermVisitor; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; + +/** + * Simple implementation of {@link ExistentialVariable}. + * + * @author Markus Kroetzsch + */ +public class ExistentialVariableImpl extends AbstractTermImpl implements ExistentialVariable { + + /** + * Constructor. + * + * @param name cannot be a blank String (null, empty or whitespace). + */ + public ExistentialVariableImpl(final String name) { + super(name); + } + + @Override + public T accept(TermVisitor termVisitor) { + return termVisitor.visit(this); + } + + @Override + public String toString() { + return Serializer.getSerialization(serializer -> serializer.writeExistentialVariable(this)); + } +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java new file mode 100644 index 000000000..1d465c4b1 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Expressions.java @@ -0,0 +1,363 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.NegativeLiteral; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; + +/** + * This utilities class provides static methods for creating terms and formulas + * in Rulewerk. + * + * @author Markus Krötzsch + * + */ + +public final class Expressions { + + /** + * Private constructor prevents this utilities class to be instantiated. + */ + private Expressions() { + } + + /** + * Creates a {@link UniversalVariable}. + * + * @param name name of the variable + * @return a {@link UniversalVariable} corresponding to the input. + */ + public static UniversalVariable makeUniversalVariable(String name) { + return new UniversalVariableImpl(name); + } + + /** + * Creates an {@link ExistentialVariable}. + * + * @param name name of the variable + * @return a {@link ExistentialVariable} corresponding to the input. + */ + public static ExistentialVariable makeExistentialVariable(String name) { + return new ExistentialVariableImpl(name); + } + + /** + * Creates an {@link AbstractConstant}. + * + * @param name name of the constant + * @return an {@link AbstractConstant} corresponding to the input. + */ + public static AbstractConstant makeAbstractConstant(String name) { + return new AbstractConstantImpl(name); + } + + /** + * Creates a {@link DatatypeConstant} from the given input. + * + * @param lexicalValue the lexical representation of the data value + * @param datatypeIri the full absolute IRI of the datatype of this literal + * @return a {@link DatatypeConstant} corresponding to the input. + */ + public static DatatypeConstant makeDatatypeConstant(String lexicalValue, String datatypeIri) { + return new DatatypeConstantImpl(lexicalValue, datatypeIri); + } + + /** + * Creates a {@link LanguageStringConstant} from the given input. + * + * @param string the string value of the constant + * @param languageTag the BCP 47 language tag of the constant; should be in + * lower case + * @return a {@link LanguageStringConstant} corresponding to the input. + */ + public static LanguageStringConstant makeLanguageStringConstant(String string, String languageTag) { + return new LanguageStringConstantImpl(string, languageTag); + } + + /** + * Creates a {@link Predicate}. + * + * @param name non-blank predicate name + * @param arity predicate arity, strictly greater than 0 + * @return a {@link Predicate} corresponding to the input. + */ + public static Predicate makePredicate(String name, int arity) { + return new PredicateImpl(name, arity); + } + + /** + * Creates a {@code Fact}. + * + * @param predicateName non-blank {@link Predicate} name + * @param terms non-empty, non-null list of non-null terms that are + * constants + * @return a {@link Fact} with given {@code terms} and {@link Predicate} + * constructed from name given {@code predicateName} and {@code arity} + * given {@code terms} size. + */ + public static Fact makeFact(final String predicateName, final List terms) { + final Predicate predicate = makePredicate(predicateName, terms.size()); + + return new FactImpl(predicate, terms); + } + + /** + * Creates a {@code Fact}. + * + * @param predicateName on-blank {@link Predicate} name + * @param terms non-empty, non-null array of non-null terms + * @return a {@link Fact} with given {@code terms} and {@link Predicate} + * constructed from name given {@code predicateName} and {@code arity} + * given {@code terms} size. + */ + public static Fact makeFact(final String predicateName, Term... terms) { + final Predicate predicate = makePredicate(predicateName, terms.length); + + return new FactImpl(predicate, Arrays.asList(terms)); + } + + /** + * Creates a {@code Fact}. + * + * @param predicate a non-null {@link Predicate} + * @param terms non-empty, non-null list of non-null terms. List size must + * be the same as the given {@code predicate} arity. + * @return a {@link Fact} corresponding to the input. + */ + public static Fact makeFact(final Predicate predicate, final List terms) { + return new FactImpl(predicate, terms); + } + + /** + * Creates a {@code Fact}. + * + * @param predicate a non-null {@link Predicate} + * @param terms non-empty, non-null array of non-null terms. Array size must + * be the same as the given {@code predicate} arity. + * @return a {@link Fact} corresponding to the input. + */ + public static Fact makeFact(final Predicate predicate, final Term... terms) { + return new FactImpl(predicate, Arrays.asList(terms)); + } + + /** + * Creates a {@code PositiveLiteral}. + * + * @param predicateName non-blank {@link Predicate} name + * @param terms non-empty, non-null list of non-null terms + * @return a {@link PositiveLiteral} with given {@code terms} and + * {@link Predicate} constructed from name given {@code predicateName} + * and {@code arity} given {@code terms} size. + */ + public static PositiveLiteral makePositiveLiteral(final String predicateName, final List terms) { + final Predicate predicate = makePredicate(predicateName, terms.size()); + + return new PositiveLiteralImpl(predicate, terms); + } + + /** + * Creates a {@code PositiveLiteral}. + * + * @param predicateName non-blank {@link Predicate} name + * @param terms non-empty, non-null array of non-null terms + * @return a {@link PositiveLiteral} with given {@code terms} and + * {@link Predicate} constructed from name given {@code predicateName} + * and {@code arity} given {@code terms} length. + */ + public static PositiveLiteral makePositiveLiteral(final String predicateName, final Term... terms) { + final Predicate predicate = makePredicate(predicateName, terms.length); + + return new PositiveLiteralImpl(predicate, Arrays.asList(terms)); + } + + /** + * Creates a {@code PositiveLiteral}. + * + * @param predicate a non-null {@link Predicate} + * @param terms non-empty, non-null list of non-null terms. List size must + * be the same as the given {@code predicate} arity. + * @return a {@link PositiveLiteral} corresponding to the input. + */ + public static PositiveLiteral makePositiveLiteral(final Predicate predicate, final List terms) { + return new PositiveLiteralImpl(predicate, terms); + } + + /** + * Creates a {@code PositiveLiteral}. + * + * @param predicate a non-null {@link Predicate} + * @param terms non-empty, non-null array of non-null terms. Array size must + * be the same as the given {@code predicate} arity. + * @return a {@link PositiveLiteral} corresponding to the input + */ + public static PositiveLiteral makePositiveLiteral(final Predicate predicate, final Term... terms) { + return new PositiveLiteralImpl(predicate, Arrays.asList(terms)); + } + + /** + * Creates a {@code NegativeLiteral}. + * + * @param predicateName non-blank {@link Predicate} name + * @param terms non-empty, non-null list of non-null terms + * @return a {@link NegativeLiteral} with given {@code terms} and + * {@link Predicate} constructed from name given {@code predicateName} + * and {@code arity} given {@code terms} size. + */ + public static NegativeLiteral makeNegativeLiteral(final String predicateName, final List terms) { + final Predicate predicate = makePredicate(predicateName, terms.size()); + + return new NegativeLiteralImpl(predicate, terms); + } + + /** + * Creates a {@code NegativeLiteral}. + * + * @param predicateName non-blank {@link Predicate} name + * @param terms non-empty, non-null array of non-null terms + * @return a {@link NegativeLiteral} with given {@code terms} and + * {@link Predicate} constructed from name given {@code predicateName} + * and {@code arity} given {@code terms} length. + */ + public static NegativeLiteral makeNegativeLiteral(final String predicateName, final Term... terms) { + final Predicate predicate = makePredicate(predicateName, terms.length); + + return new NegativeLiteralImpl(predicate, Arrays.asList(terms)); + } + + /** + * Creates a {@code NegativeLiteral}. + * + * @param predicate a non-null {@link Predicate} + * @param terms non-empty, non-null list of non-null terms. List size must + * be the same as the given {@code predicate} arity. + * @return a {@link NegativeLiteral} corresponding to the input. + */ + public static NegativeLiteral makeNegativeLiteral(final Predicate predicate, final List terms) { + return new NegativeLiteralImpl(predicate, terms); + } + + /** + * Creates a {@code NegativeLiteral}. + * + * @param predicate a non-null {@link Predicate} + * @param terms non-empty, non-null array of non-null terms. Array size must + * be the same as the given {@code predicate} arity. + * @return a {@link NegativeLiteral} corresponding to the input + */ + public static NegativeLiteral makeNegativeLiteral(final Predicate predicate, final Term... terms) { + return new NegativeLiteralImpl(predicate, Arrays.asList(terms)); + } + + /** + * Creates a {@link Conjunction} of {@code T} ({@link Literal} type) objects. + * + * @param literals list of non-null literals + * @return a {@link Conjunction} corresponding to the input + */ + public static Conjunction makeConjunction(final List literals) { + return new ConjunctionImpl<>(literals); + } + + /** + * Creates a {@code Conjunction} of {@link Literal} objects. + * + * @param literals array of non-null literals + * @return a {@link Conjunction} corresponding to the input + */ + public static Conjunction makeConjunction(final Literal... literals) { + return new ConjunctionImpl<>(Arrays.asList(literals)); + } + + /** + * Creates a {@code Conjunction} of {@code T} ({@link PositiveLiteral} type) + * objects. + * + * @param literals list of non-null positive literals + * @return a {@link Conjunction} corresponding to the input + */ + public static Conjunction makePositiveConjunction(final List literals) { + return new ConjunctionImpl<>(literals); + } + + /** + * Creates a {@code Conjunction} of {@link PositiveLiteral} objects. + * + * @param literals array of non-null positive literals + * @return a {@link Conjunction} corresponding to the input + */ + public static Conjunction makePositiveConjunction(final PositiveLiteral... literals) { + return new ConjunctionImpl<>(Arrays.asList(literals)); + } + + /** + * Creates a {@code Rule} with a single atom in its head. + * + * @param headLiteral the single positive literal in the rule head + * @param bodyLiterals array of non-null literals + * @return a {@link Rule} corresponding to the input + */ + public static Rule makeRule(final PositiveLiteral headLiteral, final Literal... bodyLiterals) { + return new RuleImpl(new ConjunctionImpl<>(Arrays.asList(headLiteral)), + new ConjunctionImpl<>(Arrays.asList(bodyLiterals))); + } + + /** + * Creates a {@code Rule}. + * + * @param head conjunction of positive (non-negated) literals + * @param body conjunction of literals (negated or not) + * @return a {@link Rule} corresponding to the input + */ + public static Rule makeRule(final Conjunction head, final Conjunction body) { + return new RuleImpl(head, body); + } + + /** + * Creates a {@link Rule}. + * + * @param head conjunction of positive (non-negated) literals + * @param body conjunction of positive (non-negated) literals + * @return a {@link Rule} corresponding to the input + */ + public static Rule makePositiveLiteralsRule(final Conjunction head, + final Conjunction body) { + final List bodyLiteralList = new ArrayList<>(body.getLiterals()); + final Conjunction literalsBody = makeConjunction(bodyLiteralList); + return new RuleImpl(head, literalsBody); + + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/FactImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/FactImpl.java new file mode 100644 index 000000000..aa984d178 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/FactImpl.java @@ -0,0 +1,57 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.List; + +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.StatementVisitor; +import org.semanticweb.rulewerk.core.model.api.Term; + +/** + * Standard implementation of the {@link Fact} interface. + * + * @author Markus Kroetzsch + * + */ +public class FactImpl extends PositiveLiteralImpl implements Fact { + + public FactImpl(final Predicate predicate, final List terms) { + super(predicate, terms); + for (final Term t : terms) { + if (t.isVariable()) { + throw new IllegalArgumentException("Facts cannot contain variables."); + } + } + } + + @Override + public T accept(final StatementVisitor statementVisitor) { + return statementVisitor.visit(this); + } + + @Override + public String toString() { + return Serializer.getSerialization(serializer -> serializer.writeFact(this)); + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java new file mode 100644 index 000000000..e7ab0f8dd --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/LanguageStringConstantImpl.java @@ -0,0 +1,90 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.apache.commons.lang3.Validate; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.TermVisitor; + +/** + * Simple implementation of {@link LanguageStringConstant}. + * + * @author Markus Kroetzsch + * + */ +public class LanguageStringConstantImpl implements LanguageStringConstant { + + final String string; + final String lang; + + public LanguageStringConstantImpl(String string, String languageTag) { + Validate.notNull(string); + Validate.notBlank(languageTag, "Language tags cannot be blank strings."); + this.string = string; + this.lang = languageTag; + } + + @Override + public String getName() { + return toString(); + } + + @Override + public T accept(TermVisitor termVisitor) { + return termVisitor.visit(this); + } + + @Override + public String getString() { + return this.string; + } + + @Override + public String getLanguageTag() { + return this.lang; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = lang.hashCode(); + result = prime * result + string.hashCode(); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + LanguageStringConstantImpl other = (LanguageStringConstantImpl) obj; + return this.string.equals(other.getString()) && this.lang.equals(other.getLanguageTag()); + } + + @Override + public String toString() { + return Serializer.getSerialization(serializer -> serializer.writeLanguageStringConstant(this)); + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java new file mode 100644 index 000000000..3413f19c4 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistry.java @@ -0,0 +1,129 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.Map.Entry; + +import org.apache.commons.lang3.Validate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; + +/** + * Implementation of {@link PrefixDeclarationRegistry} that is suitable for + * incrementally parsing from multiple sources. When trying to merge in + * conflicting prefix declarations, a fresh non-conflicting prefix is generated + * instead. + * + * @author Maximilian Marx + */ +final public class MergingPrefixDeclarationRegistry extends AbstractPrefixDeclarationRegistry { + /** + * Next index to use for generated prefix names. + */ + private Integer nextIndex = 0; + + /** + * Template string to use for generated prefix name + */ + private static final String GENERATED_PREFIX_TEMPLATE = "rw_gen%d" + + PrefixDeclarationRegistry.PREFIX_NAME_SEPARATOR; + + public MergingPrefixDeclarationRegistry() { + super(); + } + + public MergingPrefixDeclarationRegistry(final PrefixDeclarationRegistry prefixDeclarations) { + super(); + mergePrefixDeclarations(prefixDeclarations); + } + + /** + * Sets the base namespace to the given value. If a base Iri has already been + * set, one of them will be added as a prefix declaration with a fresh + * prefixName. + * + * @param baseIri the new base namespace. + */ + @Override + public void setBaseIri(String baseIri) { + Validate.notNull(baseIri, "baseIri must not be null"); + if (baseIri == this.baseIri) { + return; + } + + if (this.baseIri == null) { + this.baseIri = baseIri; + } else if (this.baseIri == PrefixDeclarationRegistry.EMPTY_BASE) { + // we need to keep the empty base, so that we don't + // accidentally relativise absolute Iris to + // baseIri. Hence, introduce baseIri as a fresh prefix. + prefixes.put(getFreshPrefix(), baseIri); + } else { + prefixes.put(getFreshPrefix(), this.baseIri); + this.baseIri = baseIri; + } + } + + /** + * Registers a prefix declaration. If prefixName is already registered for + * another IRI, a freshly generated name will be used instead. + * + * @param prefixName the name of the prefix. + * @param prefixIri the IRI of the prefix. + */ + @Override + public void setPrefixIri(String prefixName, String prefixIri) { + String name; + if (prefixes.containsKey(prefixName) && !prefixIri.equals(prefixes.get(prefixName))) { + name = getFreshPrefix(); + } else { + name = prefixName; + } + prefixes.put(name, prefixIri); + } + + /** + * Merge another set of prefix declarations. + * + * @param other the set of prefix declarations to merge. Conflicting prefixes + * from {@code other} will be renamed. + */ + public void mergePrefixDeclarations(final PrefixDeclarationRegistry other) { + this.setBaseIri(other.getBaseIri()); + + for (Entry prefix : other) { + this.setPrefixIri(prefix.getKey(), prefix.getValue()); + } + } + + private String getNextFreshPrefixCandidate() { + return String.format(GENERATED_PREFIX_TEMPLATE, this.nextIndex++); + } + + private String getFreshPrefix() { + while (true) { + String candidate = getNextFreshPrefixCandidate(); + + if (!prefixes.containsKey(candidate)) { + return candidate; + } + } + } +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/VariableImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NamedNullImpl.java similarity index 50% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/VariableImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NamedNullImpl.java index d94d3c1a4..0dae674db 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/VariableImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NamedNullImpl.java @@ -1,14 +1,10 @@ -package org.semanticweb.vlog4j.core.model.implementation; - -import org.semanticweb.vlog4j.core.model.api.TermType; -import org.semanticweb.vlog4j.core.model.api.TermVisitor; -import org.semanticweb.vlog4j.core.model.api.Variable; +package org.semanticweb.rulewerk.core.model.implementation; /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -24,30 +20,27 @@ * #L% */ +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.TermVisitor; + /** - * Implements {@link #VARIABLE} terms. A variable is a parameter that stands for - * an arbitrary domain element. + * Implements {@link NamedNull} terms. A null is an entity used to represent + * anonymous domain elements introduced during the reasoning process to satisfy + * existential restrictions. * * @author david.carral@tu-dresden.de */ -public class VariableImpl extends AbstractTermImpl implements Variable { +public class NamedNullImpl extends AbstractTermImpl implements NamedNull { /** - * Instantiates a {@code VariableImpl} object with the name - * {@code name}. + * Constructor. * - * @param name - * cannot be a blank String (null, empty or whitespace). + * @param name String that is not blank (null, empty, or whitespace). */ - public VariableImpl(final String name) { + public NamedNullImpl(final String name) { super(name); } - @Override - public TermType getType() { - return TermType.VARIABLE; - } - @Override public T accept(TermVisitor termVisitor) { return termVisitor.visit(this); @@ -55,6 +48,6 @@ public T accept(TermVisitor termVisitor) { @Override public String toString() { - return "?" + this.getName(); + return Serializer.getSerialization(serializer -> serializer.writeNamedNull(this)); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java new file mode 100644 index 000000000..d4efc8496 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImpl.java @@ -0,0 +1,35 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.List; + +import org.semanticweb.rulewerk.core.model.api.NegativeLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; + +public class NegativeLiteralImpl extends AbstractLiteralImpl implements NegativeLiteral { + + public NegativeLiteralImpl(final Predicate predicate, final List terms) { + super(predicate, terms); + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java new file mode 100644 index 000000000..dc0892e78 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImpl.java @@ -0,0 +1,34 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.List; + +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; + +public class PositiveLiteralImpl extends AbstractLiteralImpl implements PositiveLiteral { + + public PositiveLiteralImpl(final Predicate predicate, final List terms) { + super(predicate, terms); + } +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java similarity index 74% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java index 3ad38a877..accb87bc4 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/PredicateImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImpl.java @@ -1,95 +1,92 @@ -package org.semanticweb.vlog4j.core.model.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.apache.commons.lang3.Validate; -import org.eclipse.jdt.annotation.NonNull; -import org.semanticweb.vlog4j.core.model.api.Predicate; - -/** - * Implementation for {@link Predicate}. Supports predicates of arity 1 or - * higher. - * - * @author Irina Dragoste - * - */ -public class PredicateImpl implements Predicate { - - final private String name; - - final private int arity; - - /** - * Constructor for {@link Predicate}s of arity 1 or higher. - * - * @param name - * a non-blank String (not null, nor empty or whitespace). - * @param arity - * an int value strictly greater than 0. - */ - public PredicateImpl(@NonNull String name, int arity) { - Validate.notBlank(name, "Predicates cannot be named by blank Strings."); - Validate.isTrue(arity > 0, "Predicate arity must be greater than zero: %d", arity); - - this.name = name; - this.arity = arity; - } - - @Override - public String getName() { - return this.name; - } - - @Override - public int getArity() { - return this.arity; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = this.arity; - result = prime * result + this.name.hashCode(); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (!(obj instanceof Predicate)) { - return false; - } - final Predicate other = (Predicate) obj; - - return this.arity == other.getArity() && this.name.equals(other.getName()); - } - - @Override - public String toString() { - return "PredicateImpl [name=" + this.name + ", arity=" + this.arity + "]"; - } - -} +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.apache.commons.lang3.Validate; +import org.semanticweb.rulewerk.core.model.api.Predicate; + +/** + * Implementation for {@link Predicate}. Supports predicates of arity 1 or + * higher. + * + * @author Irina Dragoste + * + */ +public class PredicateImpl implements Predicate { + + final private String name; + + final private int arity; + + /** + * Constructor for {@link Predicate}s of arity 1 or higher. + * + * @param name a non-blank String (not null, nor empty or whitespace). + * @param arity an int value strictly greater than 0. + */ + public PredicateImpl(final String name, int arity) { + Validate.notBlank(name, "Predicates cannot be named by blank Strings."); + Validate.isTrue(arity > 0, "Predicate arity must be greater than zero: %d", arity); + + this.name = name; + this.arity = arity; + } + + @Override + public String getName() { + return this.name; + } + + @Override + public int getArity() { + return this.arity; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = this.arity; + result = prime * result + this.name.hashCode(); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (!(obj instanceof Predicate)) { + return false; + } + final Predicate other = (Predicate) obj; + + return this.arity == other.getArity() && this.name.equals(other.getName()); + } + + @Override + public String toString() { + return Serializer.getSerialization(serializer -> serializer.writePredicate(this)); + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RenamedNamedNull.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RenamedNamedNull.java new file mode 100644 index 000000000..140ac7532 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RenamedNamedNull.java @@ -0,0 +1,42 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +import java.util.UUID; + +/* + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; + +/** + * A {@link NamedNull} term that has been renamed during parsing. + * + * @author Maximilian Marx + */ +public class RenamedNamedNull extends NamedNullImpl { + /** + * Construct a new renamed named null, with the given UUID as a name. + * + * @param name the name of the named null. + */ + public RenamedNamedNull(UUID name) { + super(name.toString()); + } +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RuleImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RuleImpl.java new file mode 100644 index 000000000..15f35d27e --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/RuleImpl.java @@ -0,0 +1,131 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +import java.util.Set; +import java.util.stream.Collectors; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.stream.Stream; + +import org.apache.commons.lang3.Validate; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.StatementVisitor; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; + +/** + * Implementation for {@link Rule}. Represents rules with non-empty heads and + * bodies. + * + * @author Irina Dragoste + * + */ +public class RuleImpl implements Rule { + + final Conjunction body; + final Conjunction head; + + /** + * Creates a Rule with a non-empty body and an non-empty head. All variables in + * the body must be universally quantified; all variables in the head that do + * not occur in the body must be existentially quantified. + * + * @param head list of Literals (negated or non-negated) representing the rule + * body conjuncts. + * @param body list of positive (non-negated) Literals representing the rule + * head conjuncts. + */ + public RuleImpl(final Conjunction head, final Conjunction body) { + Validate.notNull(head); + Validate.notNull(body); + Validate.notEmpty(body.getLiterals(), + "Empty rule body not supported. Use Fact objects to assert unconditionally true atoms."); + Validate.notEmpty(head.getLiterals(), + "Empty rule head not supported. To capture integrity constraints, use a dedicated predicate that represents a contradiction."); + if (body.getExistentialVariables().count() > 0) { + throw new IllegalArgumentException( + "Rule body cannot contain existential variables. Rule was: " + head + " :- " + body); + } + Set bodyVariables = body.getUniversalVariables().collect(Collectors.toSet()); + if (head.getUniversalVariables().filter(x -> !bodyVariables.contains(x)).count() > 0) { + throw new IllegalArgumentException( + "Universally quantified variables in rule head must also occur in rule body. Rule was: " + head + + " :- " + body); + } + + this.head = head; + this.body = body; + + } + + @Override + public int hashCode() { + final int prime = 31; + int result = this.body.hashCode(); + result = prime * result + this.head.hashCode(); + return result; + } + + @Override + public boolean equals(final Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (!(obj instanceof Rule)) { + return false; + } + final Rule other = (Rule) obj; + + return this.head.equals(other.getHead()) && this.body.equals(other.getBody()); + } + + @Override + public String toString() { + return Serializer.getSerialization(serializer -> serializer.writeRule(this)); + } + + @Override + public Conjunction getHead() { + return this.head; + } + + @Override + public Conjunction getBody() { + return this.body; + } + + @Override + public T accept(StatementVisitor statementVisitor) { + return statementVisitor.visit(this); + } + + @Override + public Stream getTerms() { + return Stream.concat(this.body.getTerms(), this.head.getTerms()).distinct(); + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java new file mode 100644 index 000000000..258d0f7f9 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/Serializer.java @@ -0,0 +1,594 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +import java.io.IOException; +import java.io.StringWriter; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.Writer; +import java.util.Iterator; +import java.util.List; +import java.util.Map.Entry; +import java.util.function.Function; + +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Statement; +import org.semanticweb.rulewerk.core.model.api.StatementVisitor; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.TermVisitor; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; + +/** + * Objects of this class are used to create string representations of syntactic + * objects. + * + * @see RuleWerk + * rule syntax + * + * @author Markus Kroetzsch + * + */ +public class Serializer { + + public static final String STATEMENT_END = " ."; + + /** + * Default IRI serializer that can be used if no abbreviations (prefixes, base, + * etc.) are used. + */ + public static final Function identityIriSerializer = new Function() { + @Override + public String apply(final String iri) { + if (iri.contains(":") || !iri.matches(AbstractPrefixDeclarationRegistry.REGEXP_LOCNAME)) { + return "<" + iri + ">"; + } else { + return iri; + } + } + }; + + /** + * Interface for a method that writes something to a writer. + */ + @FunctionalInterface + public interface SerializationWriter { + void write(final Serializer serializer) throws IOException; + } + + final Writer writer; + final Function iriTransformer; + final SerializerTermVisitor serializerTermVisitor = new SerializerTermVisitor(); + final SerializerStatementVisitor serializerStatementVisitor = new SerializerStatementVisitor(); + + /** + * Runtime exception used to report errors that occurred in visitors that do not + * declare checked exceptions. + * + * @author Markus Kroetzsch + * + */ + private class RuntimeIoException extends RuntimeException { + private static final long serialVersionUID = 1L; + final IOException cause; + + public RuntimeIoException(final IOException cause) { + super(cause); + this.cause = cause; + } + + public IOException getIoException() { + return this.cause; + } + } + + /** + * Auxiliary class to visit {@link Term} objects for writing. + * + * @author Markus Kroetzsch + * + */ + private class SerializerTermVisitor implements TermVisitor { + + @Override + public Void visit(final AbstractConstant term) { + try { + Serializer.this.writeAbstractConstant(term); + } catch (final IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + @Override + public Void visit(final DatatypeConstant term) { + try { + Serializer.this.writeDatatypeConstant(term); + } catch (final IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + @Override + public Void visit(final LanguageStringConstant term) { + try { + Serializer.this.writeLanguageStringConstant(term); + } catch (final IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + @Override + public Void visit(final UniversalVariable term) { + try { + Serializer.this.writeUniversalVariable(term); + } catch (final IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + @Override + public Void visit(final ExistentialVariable term) { + try { + Serializer.this.writeExistentialVariable(term); + } catch (final IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + @Override + public Void visit(final NamedNull term) { + try { + Serializer.this.writeNamedNull(term); + } catch (final IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + } + + /** + * Auxiliary class to visit {@link Statement} objects for writing. + * + * @author Markus Kroetzsch + * + */ + private class SerializerStatementVisitor implements StatementVisitor { + + @Override + public Void visit(final Fact statement) { + try { + Serializer.this.writeFact(statement); + } catch (final IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + @Override + public Void visit(final Rule statement) { + try { + Serializer.this.writeRule(statement); + } catch (final IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + @Override + public Void visit(final DataSourceDeclaration statement) { + try { + Serializer.this.writeDataSourceDeclaration(statement); + } catch (final IOException e) { + throw new RuntimeIoException(e); + } + return null; + } + + } + + /** + * Construct a serializer that uses a specific function to serialize IRIs. + * + * @param writer the object used to write serializations + * @param iriTransformer a function used to abbreviate IRIs, e.g., if namespace + * prefixes were declared + */ + public Serializer(final Writer writer, final Function iriTransformer) { + this.writer = writer; + this.iriTransformer = iriTransformer; + } + + /** + * Construct a serializer that serializes IRIs without any form of + * transformation or abbreviation. + * + * @param writer the object used to write serializations + */ + public Serializer(final Writer writer) { + this(writer, identityIriSerializer); + } + + /** + * Construct a serializer that uses the given {@link PrefixDeclarationRegistry} + * to abbreviate IRIs. + * + * @param writer the object used to write serializations + * @param prefixDeclarationRegistry the object used to abbreviate IRIs + */ + public Serializer(final Writer writer, final PrefixDeclarationRegistry prefixDeclarationRegistry) { + this(writer, (string) -> { + return prefixDeclarationRegistry.unresolveAbsoluteIri(string, true); + }); + } + + /** + * Writes a serialization of the given {@link Statement}. + * + * @param statement a {@link Statement} to serialize + * @throws IOException + */ + public void writeStatement(final Statement statement) throws IOException { + try { + statement.accept(this.serializerStatementVisitor); + } catch (final Serializer.RuntimeIoException e) { + throw e.getIoException(); + } + } + + /** + * Writes a serialization of the given {@link Fact}. + * + * @param fact a {@link Fact} + * @throws IOException + */ + public void writeFact(final Fact fact) throws IOException { + this.writeLiteral(fact); + this.writer.write(STATEMENT_END); + } + + /** + * Writes a serialization of the given {@link Rule}. + * + * @param rule a {@link Rule} + * @throws IOException + */ + public void writeRule(final Rule rule) throws IOException { + this.writeRuleNoStatment(rule); + this.writer.write(STATEMENT_END); + } + + /** + * Writes a serialization of the given {@link Rule} without the final dot. + * + * @param rule a {@link Rule} + * @throws IOException + */ + private void writeRuleNoStatment(final Rule rule) throws IOException { + this.writeLiteralConjunction(rule.getHead()); + this.writer.write(" :- "); + this.writeLiteralConjunction(rule.getBody()); + } + + /** + * Writes a serialization of the given {@link DataSourceDeclaration}. + * + * @param dataSourceDeclaration a {@link DataSourceDeclaration} + * @throws IOException + */ + public void writeDataSourceDeclaration(final DataSourceDeclaration dataSourceDeclaration) throws IOException { + this.writer.write("@source "); + this.writePredicate(dataSourceDeclaration.getPredicate()); + this.writer.write(": "); + this.writeLiteral(dataSourceDeclaration.getDataSource().getDeclarationFact()); + this.writer.write(STATEMENT_END); + } + + /** + * Writes a serialization of the given {@link Literal}. + * + * @param literal a {@link Literal} + * @throws IOException + */ + public void writeLiteral(final Literal literal) throws IOException { + if (literal.isNegated()) { + this.writer.write("~"); + } + this.writePositiveLiteral(literal.getPredicate(), literal.getArguments()); + } + + /** + * Serialize the given predicate and list of terms like a + * {@link PositiveLiteral}. + * + * @param predicate a {@link Predicate} + * @param arguments a list of {@link Term} arguments + * @throws IOException + */ + public void writePositiveLiteral(final Predicate predicate, final List arguments) throws IOException { + this.writer.write(this.getIri(predicate.getName())); + this.writer.write("("); + + boolean first = true; + for (final Term term : arguments) { + if (first) { + first = false; + } else { + this.writer.write(", "); + } + this.writeTerm(term); + } + + this.writer.write(")"); + } + + /** + * Writes a serialization of the given {@link Conjunction} of {@link Literal} + * objects. + * + * @param literals a {@link Conjunction} + * @throws IOException + */ + public void writeLiteralConjunction(final Conjunction literals) throws IOException { + boolean first = true; + for (final Literal literal : literals.getLiterals()) { + if (first) { + first = false; + } else { + this.writer.write(", "); + } + this.writeLiteral(literal); + } + } + + /** + * Writes a serialization of the given {@link Predicate}. This serialization + * specifies the name and arity of the predicate. + * + * @param predicate a {@link Predicate} + * @throws IOException + */ + public void writePredicate(final Predicate predicate) throws IOException { + this.writer.write(this.getIri(predicate.getName())); + this.writer.write("["); + this.writer.write(String.valueOf(predicate.getArity())); + this.writer.write("]"); + } + + /** + * Writes a serialization of the given {@link Term}. + * + * @param term a {@link Term} + * @throws IOException + */ + public void writeTerm(final Term term) throws IOException { + try { + term.accept(this.serializerTermVisitor); + } catch (final Serializer.RuntimeIoException e) { + throw e.getIoException(); + } + } + + /** + * Writes a serialization of the given {@link AbstractConstant}. + * + * @param abstractConstant a {@link AbstractConstant} + * @throws IOException + */ + public void writeAbstractConstant(final AbstractConstant abstractConstant) throws IOException { + this.writer.write(this.getIri(abstractConstant.getName())); + } + + /** + * Writes a serialization of the given {@link DatatypeConstant}. + * + * @param datatypeConstant a {@link DatatypeConstant} + * @throws IOException + */ + public void writeDatatypeConstant(final DatatypeConstant datatypeConstant) throws IOException { + if (PrefixDeclarationRegistry.XSD_STRING.equals(datatypeConstant.getDatatype())) { + this.writer.write(this.getQuotedString(datatypeConstant.getLexicalValue())); + } else if (PrefixDeclarationRegistry.XSD_INTEGER.equals(datatypeConstant.getDatatype())) { + this.writer.write(datatypeConstant.getLexicalValue()); + } else { + this.writeDatatypeConstantNoAbbreviations(datatypeConstant); + } + } + + /** + * Writes a serialization of the given {@link DatatypeConstant} without using + * any Turtle-style abbreviations for common datatypes like string and int. + * + * @param datatypeConstant a {@link DatatypeConstant} + * @throws IOException + */ + public void writeDatatypeConstantNoAbbreviations(final DatatypeConstant datatypeConstant) throws IOException { + this.writer.write(this.getQuotedString(datatypeConstant.getLexicalValue())); + this.writer.write("^^"); + this.writer.write(this.getIri(datatypeConstant.getDatatype())); + } + + /** + * Writes a serialization of the given {@link UniversalVariable}. + * + * @param universalVariable a {@link UniversalVariable} + * @throws IOException + */ + public void writeUniversalVariable(final UniversalVariable universalVariable) throws IOException { + this.writer.write("?"); + this.writer.write(universalVariable.getName()); + } + + /** + * Writes a serialization of the given {@link ExistentialVariable}. + * + * @param existentialVariable a {@link ExistentialVariable} + * @throws IOException + */ + public void writeExistentialVariable(final ExistentialVariable existentialVariable) throws IOException { + this.writer.write("!"); + this.writer.write(existentialVariable.getName()); + } + + /** + * Writes a serialization of the given {@link NamedNull}. + * + * @param namedNull a {@link NamedNull} + * @throws IOException + */ + public void writeNamedNull(final NamedNull namedNull) throws IOException { + this.writer.write("_:"); + this.writer.write(namedNull.getName()); + } + + /** + * Writes a serialization of the given {@link PrefixDeclarationRegistry}, and + * returns true if anything has been written. + * + * @param prefixDeclarationRegistry a {@link PrefixDeclarationRegistry} + * @throws IOException + * @return true if anything has been written + */ + public boolean writePrefixDeclarationRegistry(final PrefixDeclarationRegistry prefixDeclarationRegistry) + throws IOException { + boolean result = false; + final String baseIri = prefixDeclarationRegistry.getBaseIri(); + if (!PrefixDeclarationRegistry.EMPTY_BASE.contentEquals(baseIri)) { + this.writer.write("@base <"); + this.writer.write(baseIri); + this.writer.write(">"); + this.writer.write(STATEMENT_END); + this.writer.write("\n"); + result = true; + } + + final Iterator> prefixIterator = prefixDeclarationRegistry.iterator(); + while (prefixIterator.hasNext()) { + final Entry entry = prefixIterator.next(); + this.writer.write("@prefix "); + this.writer.write(entry.getKey()); + this.writer.write(" <"); + this.writer.write(entry.getValue()); + this.writer.write(">"); + this.writer.write(STATEMENT_END); + this.writer.write("\n"); + result = true; + } + return result; + } + + /** + * Writes a serialization of the given {@link LanguageStringConstant}. + * + * @param languageStringConstant a {@link LanguageStringConstant} + * @throws IOException + */ + public void writeLanguageStringConstant(final LanguageStringConstant languageStringConstant) throws IOException { + this.writer.write(this.getQuotedString(languageStringConstant.getString())); + this.writer.write("@"); + this.writer.write(languageStringConstant.getLanguageTag()); + } + + /** + * Writes a serialization of the given {@link Command}. + * + * @param command a {@link Command} + * @throws IOException + */ + public void writeCommand(final Command command) throws IOException { + this.writer.write("@"); + this.writer.write(command.getName()); + + for (final Argument argument : command.getArguments()) { + this.writer.write(" "); + if (argument.fromRule().isPresent()) { + this.writeRuleNoStatment(argument.fromRule().get()); + } else if (argument.fromPositiveLiteral().isPresent()) { + this.writeLiteral(argument.fromPositiveLiteral().get()); + } else { + this.writeTerm(argument.fromTerm().get()); + } + } + this.writer.write(STATEMENT_END); + } + + /** + * Convenience method for obtaining serializations as Java strings. + * + * @param writeAction a function that accepts a {@link Serializer} and produces + * a string + * @return serialization string + */ + public static String getSerialization(final SerializationWriter writeAction) { + final StringWriter stringWriter = new StringWriter(); + final Serializer serializer = new Serializer(stringWriter); + try { + writeAction.write(serializer); + } catch (final IOException e) { + throw new RuntimeException("StringWriter should never throw an IOException."); + } + return stringWriter.toString(); + } + + /** + * Escapes (with {@code \}) special character occurrences in given + * {@code string}. The special characters are: + *

    + *
  • {@code \}
  • + *
  • {@code "}
  • + *
  • {@code \t}
  • + *
  • {@code \b}
  • + *
  • {@code \n}
  • + *
  • {@code \r}
  • + *
  • {@code \f}
  • + *
+ * + * @param string + * @return an escaped string + */ + private String getQuotedString(final String string) { + return "\"" + string.replace("\\", "\\\\").replace("\"", "\\\"").replace("\t", "\\t").replace("\b", "\\b") + .replace("\n", "\\n").replace("\r", "\\r").replace("\f", "\\f") + "\""; + } + + private String getIri(final String string) { + return this.iriTransformer.apply(string); + } +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/TermFactory.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/TermFactory.java new file mode 100644 index 000000000..c32c0bb82 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/TermFactory.java @@ -0,0 +1,172 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.LinkedHashMap; +import java.util.Map; + +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; + +/** + * Class for creating various kinds of terms. Instances of this class maintain + * an internal cache that allows them to re-use the generated objects, which is + * useful to safe memory since the same term is often needed in multiple places. + * + * @author Markus Kroetzsch + * + */ +public class TermFactory { + + /** + * Use of Java's LinkedHashMap for implementing a simple LRU cache that is used + * here for mapping VLog ids to terms. + * + * @author Markus Kroetzsch + * + * @param + * @param + */ + static class SimpleLruMap extends LinkedHashMap { + private static final long serialVersionUID = 7151535464938775359L; + private int maxCapacity; + + public SimpleLruMap(int initialCapacity, int maxCapacity) { + super(initialCapacity, 0.75f, true); + this.maxCapacity = maxCapacity; + } + + @Override + protected boolean removeEldestEntry(Map.Entry eldest) { + return size() >= this.maxCapacity; + } + } + + final private SimpleLruMap abstractConstants; + final private SimpleLruMap existentialVariables; + final private SimpleLruMap universalVariables; + final private SimpleLruMap predicates; + + public TermFactory() { + this(65536); + } + + public TermFactory(int cacheSize) { + abstractConstants = new SimpleLruMap<>(256, cacheSize); + existentialVariables = new SimpleLruMap<>(64, 1024); + universalVariables = new SimpleLruMap<>(64, 1024); + predicates = new SimpleLruMap<>(256, 4096); + } + + /** + * Creates a {@link UniversalVariable}. + * + * @param name name of the variable + * @return a {@link UniversalVariable} corresponding to the input. + */ + public UniversalVariable makeUniversalVariable(String name) { + if (universalVariables.containsKey(name)) { + return universalVariables.get(name); + } else { + UniversalVariable result = new UniversalVariableImpl(name); + universalVariables.put(name, result); + return result; + } + } + + /** + * Creates an {@link ExistentialVariable}. + * + * @param name name of the variable + * @return a {@link ExistentialVariable} corresponding to the input. + */ + public ExistentialVariable makeExistentialVariable(String name) { + if (existentialVariables.containsKey(name)) { + return existentialVariables.get(name); + } else { + ExistentialVariable result = new ExistentialVariableImpl(name); + existentialVariables.put(name, result); + return result; + } + } + + /** + * Creates an {@link AbstractConstant}. + * + * @param name name of the constant + * @return an {@link AbstractConstant} corresponding to the input. + */ + public AbstractConstant makeAbstractConstant(String name) { + if (abstractConstants.containsKey(name)) { + return abstractConstants.get(name); + } else { + AbstractConstant result = new AbstractConstantImpl(name); + abstractConstants.put(name, result); + return result; + } + } + + /** + * Creates a {@link DatatypeConstant} from the given input. + * + * @param lexicalValue the lexical representation of the data value + * @param datatypeIri the full absolute IRI of the datatype of this literal + * @return a {@link DatatypeConstant} corresponding to the input. + */ + public DatatypeConstant makeDatatypeConstant(String lexicalValue, String datatypeIri) { + return new DatatypeConstantImpl(lexicalValue, datatypeIri); + } + + /** + * Creates a {@link LanguageStringConstant} from the given input. + * + * @param string the string value of the constant + * @param languageTag the BCP 47 language tag of the constant; should be in + * lower case + * @return a {@link LanguageStringConstant} corresponding to the input. + */ + public LanguageStringConstant makeLanguageStringConstant(String string, String languageTag) { + return new LanguageStringConstantImpl(string, languageTag); + } + + /** + * Creates a {@link Predicate}. + * + * @param name non-blank predicate name + * @param arity predicate arity, strictly greater than 0 + * @return a {@link Predicate} corresponding to the input. + */ + public Predicate makePredicate(String name, int arity) { + String key = name + "#" + String.valueOf(arity); + if (predicates.containsKey(key)) { + return predicates.get(key); + } else { + Predicate result = new PredicateImpl(name, arity); + predicates.put(key, result); + return result; + } + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/UniversalVariableImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/UniversalVariableImpl.java new file mode 100644 index 000000000..4dde3f11d --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/model/implementation/UniversalVariableImpl.java @@ -0,0 +1,51 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +import org.semanticweb.rulewerk.core.model.api.TermVisitor; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; + +/* + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Simple implementation of {@link UniversalVariable}. + * + * @author david.carral@tu-dresden.de + */ +public class UniversalVariableImpl extends AbstractTermImpl implements UniversalVariable { + + /** + * Constructor. + * + * @param name cannot be a blank String (null, empty or whitespace). + */ + public UniversalVariableImpl(final String name) { + super(name); + } + + @Override + public T accept(TermVisitor termVisitor) { + return termVisitor.visit(this); + } + + @Override + public String toString() { + return Serializer.getSerialization(serializer -> serializer.writeUniversalVariable(this)); + } +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/AcyclicityNotion.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/AcyclicityNotion.java new file mode 100644 index 000000000..64c828b51 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/AcyclicityNotion.java @@ -0,0 +1,40 @@ +package org.semanticweb.rulewerk.core.reasoner; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +//TODO javadoc +public enum AcyclicityNotion { + /** + * Joint Acyclicity + */ + JA, + /** + * Restricted Joint Acyclicity + */ + RJA, + /** + * Model-Faithful Acyclicity + */ + MFA, + /** + * Restricted Model-Faithful Acyclicity + */ + RMFA +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Algorithm.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Algorithm.java similarity index 83% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Algorithm.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Algorithm.java index 8952a039c..d45b9359d 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Algorithm.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Algorithm.java @@ -1,10 +1,10 @@ -package org.semanticweb.vlog4j.core.reasoner; - -/* +package org.semanticweb.rulewerk.core.reasoner; + +/* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,9 +17,9 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * #L% - */ - -public enum Algorithm { - SKOLEM_CHASE, RESTRICTED_CHASE -} + * #L% + */ + +public enum Algorithm { + SKOLEM_CHASE, RESTRICTED_CHASE +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Correctness.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Correctness.java new file mode 100644 index 000000000..e2a00152c --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Correctness.java @@ -0,0 +1,61 @@ +package org.semanticweb.rulewerk.core.reasoner; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Enumeration of different correctness results (for example, the correctness of + * query answering for a reasoner). + * + * @author Irina Dragoste + * + */ +public enum Correctness { + + /** + * Completeness is not guaranteed, but soundness is. For example, query + * answering yields sound, but possibly incomplete answers. + */ + SOUND_BUT_INCOMPLETE("sound but possibly incomplete"), + + /** + * Soundness is not guaranteed. For example, query answering may give incorrect + * (unsound and incomplete) answers. + */ + INCORRECT("possibly incorrect"), + + /** + * Correctness is guaranteed. For example, query answering yealds are correct + * (sound and complete) answers. + */ + SOUND_AND_COMPLETE("sound and complete"); + + private final String name; + + private Correctness(String name) { + this.name = name; + } + + @Override + public String toString() { + return name; + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/CyclicityResult.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/CyclicityResult.java new file mode 100644 index 000000000..c5898ef05 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/CyclicityResult.java @@ -0,0 +1,49 @@ +package org.semanticweb.rulewerk.core.reasoner; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Enumeration for the cyclicity property of a set of rules and predicates. The + * cyclicity property determines whether the Restricted Chase + * ({@link Algorithm#RESTRICTED_CHASE}) is guaranteed to terminate for given + * rules and any set of facts over given EDB predicates. + * + * @author Irina Dragoste + * + */ +public enum CyclicityResult { + /** + * Reasoning with restricted chase algorithm + * ({@link Algorithm#RESTRICTED_CHASE}) is guaranteed to terminate for given set + * of rules and any facts over given EDB predicates. + */ + CYCLIC, + /** + * There exists a set of facts over given EDB predicates for which reasoning + * with restricted chase algorithm ({@link Algorithm#RESTRICTED_CHASE}) + * guaranteed to not terminate for given set of rules. + */ + ACYCLIC, + /** + * (A)cyclicity cannot be determined. + */ + UNDETERMINED +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java new file mode 100644 index 000000000..a7bd76fc2 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBase.java @@ -0,0 +1,640 @@ +package org.semanticweb.rulewerk.core.reasoner; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStreamWriter; +import java.io.Writer; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import org.apache.commons.lang3.Validate; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkException; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Statement; +import org.semanticweb.rulewerk.core.model.api.StatementVisitor; +import org.semanticweb.rulewerk.core.model.implementation.MergingPrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; + +/** + * A knowledge base with rules, facts, and declarations for loading data from + * further sources. This is a "syntactic" object in that it represents some + * information that is not relevant for the semantics of reasoning, but that is + * needed to ensure faithful re-serialisation of knowledge bases loaded from + * files (e.g., preserving order). + * + * @author Markus Kroetzsch + * + */ +public class KnowledgeBase implements Iterable { + + private final Set listeners = new HashSet<>(); + + /** + * All (canonical) file paths imported so far, used to prevent cyclic imports. + */ + private final Set importedFilePaths = new HashSet<>(); + + /** + * Auxiliary class to process {@link Statement}s when added to the knowledge + * base. Returns true if a statement was added successfully. + * + * @author Markus Kroetzsch + * + */ + private class AddStatementVisitor implements StatementVisitor { + @Override + public Boolean visit(final Fact statement) { + KnowledgeBase.this.addFact(statement); + return true; + } + + @Override + public Boolean visit(final Rule statement) { + return true; + } + + @Override + public Boolean visit(final DataSourceDeclaration statement) { + KnowledgeBase.this.dataSourceDeclarations.add(statement); + return true; + } + } + + private final AddStatementVisitor addStatementVisitor = new AddStatementVisitor(); + + /** + * Auxiliary class to process {@link Statement}s when removed from the knowledge + * base. Returns true if a statement was removed successfully. + * + * @author Irina Dragoste + * + */ + private class RemoveStatementVisitor implements StatementVisitor { + + @Override + public Boolean visit(final Fact statement) { + KnowledgeBase.this.removeFact(statement); + return true; + } + + @Override + public Boolean visit(final Rule statement) { + return true; + } + + @Override + public Boolean visit(final DataSourceDeclaration statement) { + KnowledgeBase.this.dataSourceDeclarations.remove(statement); + return true; + } + } + + private final RemoveStatementVisitor removeStatementVisitor = new RemoveStatementVisitor(); + + private class ExtractStatementsVisitor implements StatementVisitor { + + final ArrayList extracted = new ArrayList<>(); + final Class ownType; + + ExtractStatementsVisitor(final Class type) { + this.ownType = type; + } + + ArrayList getExtractedStatements() { + return this.extracted; + } + + @SuppressWarnings("unchecked") + @Override + public Void visit(final Fact statement) { + if (this.ownType.equals(Fact.class)) { + this.extracted.add((T) statement); + } + return null; + } + + @SuppressWarnings("unchecked") + @Override + public Void visit(final Rule statement) { + if (this.ownType.equals(Rule.class)) { + this.extracted.add((T) statement); + } + return null; + } + + @SuppressWarnings("unchecked") + @Override + public Void visit(final DataSourceDeclaration statement) { + if (this.ownType.equals(DataSourceDeclaration.class)) { + this.extracted.add((T) statement); + } + return null; + } + } + + /** + * The primary storage for the contents of the knowledge base. + */ + private final LinkedHashSet statements = new LinkedHashSet<>(); + + /** + * Known prefixes that can be used to pretty-print the contents of the knowledge + * base. We try to preserve user-provided prefixes found in files when loading + * data. + */ + private MergingPrefixDeclarationRegistry prefixDeclarationRegistry = new MergingPrefixDeclarationRegistry(); + + /** + * Index structure that organises all facts by their predicate. + */ + private final Map> factsByPredicate = new HashMap<>(); + + /** + * Index structure that holds all data source declarations of this knowledge + * base. + */ + private final Set dataSourceDeclarations = new HashSet<>(); + + /** + * Registers a listener for changes on the knowledge base + * + * @param listener a KnowledgeBaseListener + */ + public void addListener(final KnowledgeBaseListener listener) { + this.listeners.add(listener); + } + + /** + * Unregisters given listener from changes on the knowledge base + * + * @param listener KnowledgeBaseListener + */ + public void deleteListener(final KnowledgeBaseListener listener) { + this.listeners.remove(listener); + } + + /** + * Adds a single statement to the knowledge base. + * + * @param statement the statement to be added + */ + public void addStatement(final Statement statement) { + if (this.doAddStatement(statement)) { + this.notifyListenersOnStatementAdded(statement); + } + } + + /** + * Adds a single statement to the knowledge base. + * + * @param statement the statement to be added + * @return true, if the knowledge base has changed. + */ + boolean doAddStatement(final Statement statement) { + Validate.notNull(statement, "Statement cannot be Null!"); + if (!this.statements.contains(statement) && statement.accept(this.addStatementVisitor)) { + this.statements.add(statement); + return true; + } + return false; + } + + /** + * Adds a collection of statements to the knowledge base. + * + * @param statements the statements to be added + */ + public void addStatements(final Collection statements) { + final List addedStatements = new ArrayList<>(); + + for (final Statement statement : statements) { + if (this.doAddStatement(statement)) { + addedStatements.add(statement); + } + } + + this.notifyListenersOnStatementsAdded(addedStatements); + } + + /** + * Adds a list of statements to the knowledge base. + * + * @param statements the statements to be added + */ + public void addStatements(final Statement... statements) { + final List addedStatements = new ArrayList<>(); + + for (final Statement statement : statements) { + if (this.doAddStatement(statement)) { + addedStatements.add(statement); + } + } + + this.notifyListenersOnStatementsAdded(addedStatements); + } + + /** + * Removes a single statement from the knowledge base, and returns the number of + * statements that were actually removed (0 or 1). + * + * @param statement the statement to remove + * @return number of removed statements + */ + public int removeStatement(final Statement statement) { + if (this.doRemoveStatement(statement)) { + this.notifyListenersOnStatementRemoved(statement); + return 1; + } else { + return 0; + } + } + + /** + * Removes a single statement from the knowledge base. + * + * @param statement the statement to remove + * @return true, if the knowledge base has changed. + */ + boolean doRemoveStatement(final Statement statement) { + Validate.notNull(statement, "Statement cannot be Null!"); + + if (this.statements.contains(statement) && statement.accept(this.removeStatementVisitor)) { + this.statements.remove(statement); + return true; + } + return false; + } + + /** + * Removes a collection of statements to the knowledge base. + * + * @param statements the statements to remove + * @return number of removed statements + */ + public int removeStatements(final Collection statements) { + final List removedStatements = new ArrayList<>(); + + for (final Statement statement : statements) { + if (this.doRemoveStatement(statement)) { + removedStatements.add(statement); + } + } + + this.notifyListenersOnStatementsRemoved(removedStatements); + return removedStatements.size(); + } + + /** + * Removes a list of statements from the knowledge base. + * + * @param statements the statements to remove + * @return number of removed statements + */ + public int removeStatements(final Statement... statements) { + final List removedStatements = new ArrayList<>(); + + for (final Statement statement : statements) { + if (this.doRemoveStatement(statement)) { + removedStatements.add(statement); + } + } + + this.notifyListenersOnStatementsRemoved(removedStatements); + return removedStatements.size(); + } + + private void notifyListenersOnStatementAdded(final Statement addedStatement) { + for (final KnowledgeBaseListener listener : this.listeners) { + listener.onStatementAdded(addedStatement); + } + } + + private void notifyListenersOnStatementsAdded(final List addedStatements) { + if (!addedStatements.isEmpty()) { + for (final KnowledgeBaseListener listener : this.listeners) { + listener.onStatementsAdded(addedStatements); + } + } + } + + private void notifyListenersOnStatementRemoved(final Statement removedStatement) { + for (final KnowledgeBaseListener listener : this.listeners) { + listener.onStatementRemoved(removedStatement); + } + } + + private void notifyListenersOnStatementsRemoved(final List removedStatements) { + if (!removedStatements.isEmpty()) { + for (final KnowledgeBaseListener listener : this.listeners) { + listener.onStatementsRemoved(removedStatements); + } + } + } + + /** + * Get the list of all rules that have been added to the knowledge base. The + * list is read-only and cannot be modified to add or delete rules. + * + * @return list of {@link Rule}s + */ + public List getRules() { + return this.getStatementsByType(Rule.class); + } + + /** + * Get the list of all facts that have been added to the knowledge base. The + * list is read-only and cannot be modified to add or delete facts. + * + * @return list of {@link Fact}s + */ + public List getFacts() { + return this.getStatementsByType(Fact.class); + } + + /** + * Get the list of all data source declarations that have been added to the + * knowledge base. The list is read-only and cannot be modified to add or delete + * facts. + * + * @return list of {@link DataSourceDeclaration}s + */ + public List getDataSourceDeclarations() { + return this.getStatementsByType(DataSourceDeclaration.class); + } + + List getStatementsByType(final Class type) { + final ExtractStatementsVisitor visitor = new ExtractStatementsVisitor<>(type); + for (final Statement statement : this.statements) { + statement.accept(visitor); + } + return Collections.unmodifiableList(visitor.getExtractedStatements()); + } + + /** + * Add a single fact to the internal data structures. It is assumed that it has + * already been checked that this fact is not present yet. + * + * @param fact the fact to add + */ + void addFact(final Fact fact) { + final Predicate predicate = fact.getPredicate(); + this.factsByPredicate.putIfAbsent(predicate, new HashSet<>()); + this.factsByPredicate.get(predicate).add(fact); + } + + /** + * Removes a single fact from the internal data structure. It is assumed that it + * has already been checked that this fact is already present. + * + * @param fact the fact to remove + */ + void removeFact(final Fact fact) { + final Predicate predicate = fact.getPredicate(); + final Set facts = this.factsByPredicate.get(predicate); + facts.remove(fact); + if (facts.isEmpty()) { + this.factsByPredicate.remove(predicate); + } + } + + /** + * Returns all {@link Statement}s of this knowledge base. + * + * The result can be iterated over and will return statements in the original + * order. The collection is read-only and cannot be modified to add or delete + * statements. + * + * @return a collection of statements + */ + public Collection getStatements() { + return Collections.unmodifiableCollection(this.statements); + } + + @Override + public Iterator iterator() { + return Collections.unmodifiableCollection(this.statements).iterator(); + } + + Map> getFactsByPredicate() { + return this.factsByPredicate; + } + + /** + * Interface for a method that parses the contents of a stream into a + * KnowledgeBase. + * + * This is essentially + * {@link org.semanticweb.rulewerk.parser.RuleParser#parseInto}, but we need to + * avoid a circular dependency here -- this is also why we throw + * {@link RulewerkException} instead of + * {@link org.semanticweb.rulewerk.parser.ParsingException}. + */ + @FunctionalInterface + public interface AdditionalInputParser { + void parseInto(InputStream stream, KnowledgeBase kb) throws IOException, RulewerkException; + } + + /** + * Import rules from a file. + * + * @param file the file to import + * @param parseFunction a function that transforms a {@link KnowledgeBase} using + * the {@link InputStream}. + * + * @throws IOException when reading {@code file} fails + * @throws IllegalArgumentException when {@code file} is null or has already + * been imported + * @throws RulewerkException when parseFunction throws RulewerkException + */ + public void importRulesFile(File file, AdditionalInputParser parseFunction) + throws RulewerkException, IOException, IllegalArgumentException { + Validate.notNull(file, "file must not be null"); + + boolean isNewFile = this.importedFilePaths.add(file.getCanonicalPath()); + if (isNewFile) { + try (InputStream stream = new FileInputStream(file)) { + parseFunction.parseInto(stream, this); + } + } + } + + /** + * Merge {@link PrefixDeclarationRegistry} into this knowledge base. + * + * @param prefixDeclarationRegistry the prefix declarations to merge. + * Conflicting prefix names in + * {@code prefixDeclarationRegistry} will be + * renamed to some implementation-specific, + * fresh prefix name. + */ + public void mergePrefixDeclarations(PrefixDeclarationRegistry prefixDeclarationRegistry) { + this.prefixDeclarationRegistry.mergePrefixDeclarations(prefixDeclarationRegistry); + } + + /** + * Returns the {@link PrefixDeclarationRegistry} used by this knowledge base. + * + * @return registry for prefix declarations + */ + public PrefixDeclarationRegistry getPrefixDeclarationRegistry() { + return this.prefixDeclarationRegistry; + } + + /** + * Return the base IRI. + * + * @return the base IRI, if declared, or + * {@link PrefixDeclarationRegistry#EMPTY_BASE} otherwise. + */ + public String getBaseIri() { + return this.prefixDeclarationRegistry.getBaseIri(); + } + + /** + * Return the declared prefixes. + * + * @return an iterator over all known prefixes. + */ + public Iterator> getPrefixes() { + return this.prefixDeclarationRegistry.iterator(); + } + + /** + * Resolve {@code prefixName} into the declared IRI. + * + * @param prefixName the prefix name to resolve, including the terminating + * colon. + * + * @throws PrefixDeclarationException when the prefix has not been declared. + * + * @return the declared IRI for {@code prefixName}. + */ + public String getPrefixIri(String prefixName) throws PrefixDeclarationException { + return this.prefixDeclarationRegistry.getPrefixIri(prefixName); + } + + /** + * Resolve a prefixed name into an absolute IRI. Dual to + * {@link KnowledgeBase#unresolveAbsoluteIri}. + * + * @param prefixedName the prefixed name to resolve. + * + * @throws PrefixDeclarationException when the prefix has not been declared. + * + * @return an absolute IRI corresponding to the prefixed name. + */ + public String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException { + return this.prefixDeclarationRegistry.resolvePrefixedName(prefixedName); + } + + /** + * Potentially abbreviate an absolute IRI using the declared prefixes. Dual to + * {@link KnowledgeBase#resolvePrefixedName}. + * + * @param iri the absolute IRI to abbreviate. + * + * @return either a prefixed name corresponding to {@code iri} under the + * declared prefixes, or {@code iri} if no suitable prefix is declared. + */ + public String unresolveAbsoluteIri(String iri) { + return this.prefixDeclarationRegistry.unresolveAbsoluteIri(iri, false); + } + + /** + * Serialise the KnowledgeBase to the {@link Writer}. + * + * @param writer the {@link Writer} to serialise to. + * + * @throws IOException if an I/O error occurs while writing to given output + * stream + */ + public void writeKnowledgeBase(Writer writer) throws IOException { + Serializer serializer = new Serializer(writer, prefixDeclarationRegistry); + + boolean makeSeperator = serializer.writePrefixDeclarationRegistry(prefixDeclarationRegistry); + + for (DataSourceDeclaration dataSourceDeclaration : this.getDataSourceDeclarations()) { + if (makeSeperator) { + writer.write('\n'); + makeSeperator = false; + } + serializer.writeDataSourceDeclaration(dataSourceDeclaration); + writer.write('\n'); + } + makeSeperator |= !this.getDataSourceDeclarations().isEmpty(); + + for (Fact fact : this.getFacts()) { + if (makeSeperator) { + writer.write('\n'); + makeSeperator = false; + } + serializer.writeFact(fact); + writer.write('\n'); + } + makeSeperator |= !this.getFacts().isEmpty(); + + for (Rule rule : this.getRules()) { + if (makeSeperator) { + writer.write('\n'); + makeSeperator = false; + } + serializer.writeRule(rule); + writer.write('\n'); + } + } + + /** + * Serialise the KnowledgeBase to the given {@link File}. + * + * @param filePath path to the file to serialise into. + * + * @throws IOException + * @deprecated Use {@link KnowledgeBase#writeKnowledgeBase(Writer)} instead. The + * method will disappear. + */ + @Deprecated + public void writeKnowledgeBase(String filePath) throws IOException { + try (Writer writer = new OutputStreamWriter(new FileOutputStream(filePath), StandardCharsets.UTF_8)) { + this.writeKnowledgeBase(writer); + } + } +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseListener.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseListener.java new file mode 100644 index 000000000..c46fc60cb --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseListener.java @@ -0,0 +1,69 @@ +package org.semanticweb.rulewerk.core.reasoner; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.List; + +import org.semanticweb.rulewerk.core.model.api.Statement; + +/** + * Listener to {@link KnowledgeBase} content change events. + * + * @author Irina Dragoste + * + */ +public interface KnowledgeBaseListener { + + /** + * Event triggered whenever a new statement is added to the associated knowledge + * base. + * + * @param statementAdded new statement added to the knowledge base. + */ + void onStatementAdded(Statement statementAdded); + + /** + * Event triggered whenever new statements are added to the associated knowledge + * base. + * + * @param statementsAdded a list of new statements that have been added to the + * knowledge base. + */ + void onStatementsAdded(List statementsAdded); + + /** + * Event triggered whenever a new statement is removed from the associated + * knowledge base. + * + * @param statementRemoved statement removed from the knowledge base. + */ + void onStatementRemoved(Statement statementRemoved); + + /** + * Event triggered whenever new statements are removed from the associated + * knowledge base. + * + * @param statementsRemoved a list of new statements that have been removed from + * the knowledge base. + */ + void onStatementsRemoved(List statementsRemoved); + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LiteralQueryResultPrinter.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LiteralQueryResultPrinter.java new file mode 100644 index 000000000..129423051 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LiteralQueryResultPrinter.java @@ -0,0 +1,134 @@ +package org.semanticweb.rulewerk.core.reasoner; + +import java.io.IOException; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.Writer; +import java.util.LinkedHashMap; +import java.util.Map.Entry; + +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.TermType; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; + +/** + * Class for writing {@link QueryResult} objects in pretty print. + * + * @author Markus Kroetzsch + * + */ +public class LiteralQueryResultPrinter { + + final LinkedHashMap firstIndex = new LinkedHashMap<>(); + final Writer writer; + final Serializer serializer; + + int resultCount = 0; + + /** + * Constructor. + * + * @param positiveLiteral the query pattern for which query results + * are to be printed + * @param writer the object to write the output to + * @param prefixDeclarationRegistry information on prefixes used to compute IRI + * abbreviations; can be null + */ + public LiteralQueryResultPrinter(PositiveLiteral positiveLiteral, Writer writer, + PrefixDeclarationRegistry prefixDeclarationRegistry) { + this.writer = writer; + if (prefixDeclarationRegistry == null) { + this.serializer = new Serializer(writer); + } else { + this.serializer = new Serializer(writer, prefixDeclarationRegistry); + } + + int i = 0; + for (Term term : positiveLiteral.getArguments()) { + if (term.getType() == TermType.UNIVERSAL_VARIABLE) { + UniversalVariable variable = (UniversalVariable) term; + if (!firstIndex.containsKey(variable)) { + firstIndex.put(variable, i); + } + } + i++; + } + } + + /** + * Writes a {@link QueryResult} to the specified writer. Nothing is written for + * results of Boolean queries (not even a linebreak). + * + * @param queryResult the {@link QueryResult} to write; this result must be + * based on the query literal specified in the constructor + * @throws IOException if a problem occurred in writing + */ + public void write(QueryResult queryResult) throws IOException { + boolean first = true; + for (Entry entry : firstIndex.entrySet()) { + if (first) { + first = false; + } else { + writer.write(", "); + } + serializer.writeUniversalVariable(entry.getKey()); + writer.write(" -> "); + serializer.writeTerm(queryResult.getTerms().get(entry.getValue())); + } + resultCount++; + if (!first) { + writer.write("\n"); + } + } + + /** + * Returns the number of results written so far. + * + * @return number of results + */ + public int getResultCount() { + return resultCount; + } + + /** + * Returns true if the query has had any results. + * + * @return true if query result is not empty + */ + public boolean hadResults() { + return resultCount != 0; + } + + /** + * Returns true if the query is boolean, i.e., has no answer variables. + * + * @return true if query is boolean + */ + public boolean isBooleanQuery() { + return firstIndex.size() == 0; + } + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/LogLevel.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LogLevel.java similarity index 83% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/LogLevel.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LogLevel.java index 9b9ca1e1d..875612e2f 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/LogLevel.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/LogLevel.java @@ -1,10 +1,10 @@ -package org.semanticweb.vlog4j.core.reasoner; +package org.semanticweb.rulewerk.core.reasoner; /*- * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryAnswerCount.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryAnswerCount.java new file mode 100644 index 000000000..c34419579 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryAnswerCount.java @@ -0,0 +1,78 @@ +package org.semanticweb.rulewerk.core.reasoner; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Container for correctness and number of query answers, i.e. the number of + * facts that the query maps to. + * + * Depending on the state of the reasoning (materialisation) and its + * {@link KnowledgeBase}, the answers can have a different {@link Correctness} + *
    + *
  • If {@link Correctness#SOUND_AND_COMPLETE}, materialisation over current + * knowledge base has completed, and the query answers are guaranteed to be + * correct.
  • + *
  • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed + * to be sound, but may be incomplete. This can happen + *
      + *
    • when materialisation has not completed ({@link Reasoner#reason()} returns + * {@code false}),
    • + *
    • or when the knowledge base was modified after reasoning, and the + * materialisation does not reflect the current knowledge base. + * Re-materialisation ({@link Reasoner#reason()}) is required in order to obtain + * complete query answers with respect to the current knowledge base.
    • + *
    + *
  • + *
  • If {@link Correctness#INCORRECT}, the results may be incomplete, and some + * results may be unsound. This can happen when the knowledge base was modified + * and the reasoner materialisation is no longer consistent with the current + * knowledge base. Re-materialisation ({@link Reasoner#reason()}) is required, + * in order to obtain correct query answers. + *
+ * + * @author Larry González + * + */ +public interface QueryAnswerCount { + + /** + * Returns the correctness of the query result. + *
    + *
  • If {@link Correctness#SOUND_AND_COMPLETE}, the query results are + * guaranteed to be correct.
  • + *
  • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed + * to be sound, but may be incomplete.
  • + *
  • If {@link Correctness#INCORRECT}, the results may be incomplete, and some + * results may be unsound. + *
+ * + * @return query result correctness + */ + Correctness getCorrectness(); + + /** + * + * @return number of query answers, i.e., the number of facts that the query + * maps to. + */ + long getCount(); + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryResultIterator.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryResultIterator.java new file mode 100644 index 000000000..981d1f5b8 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/QueryResultIterator.java @@ -0,0 +1,52 @@ +package org.semanticweb.rulewerk.core.reasoner; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.Iterator; + +import org.semanticweb.rulewerk.core.model.api.QueryResult; + +/** + * Iterator for {@link QueryResult}s. + * + * @author Irina Dragoste + * + */ +public interface QueryResultIterator extends Iterator, AutoCloseable { + + /** + * Returns the correctness of the query result. + *
    + *
  • If {@link Correctness#SOUND_AND_COMPLETE}, the query results are + * guaranteed to be correct.
  • + *
  • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed + * to be sound, but may be incomplete.
  • + *
  • If {@link Correctness#INCORRECT}, the results may be incomplete, and some + * results may be unsound. + *
+ * + * @return query result correctness + */ + public Correctness getCorrectness(); + + @Override + public void close(); +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java new file mode 100644 index 000000000..156a03d23 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Reasoner.java @@ -0,0 +1,577 @@ +package org.semanticweb.rulewerk.core.reasoner; + +/* + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.io.Writer; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.function.BiConsumer; +import java.util.stream.Stream; + +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.TermType; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; + +/** + * Interface that exposes the (existential) rule reasoning capabilities of a + * Reasoner.
+ * The knowledge base of the reasoner can be loaded with explicit facts + * and existential rules that would infer implicit facts trough + * reasoning.
+ * Facts can be added to the knowledge base: + *
    + *
  • as in-memory Java objects ({@link Fact})
  • + *
  • from a persistent data source ({@link DataSourceDeclaration})
  • + *
+ *
+ * Rules added to the knowledge base can be re-written internally by + * VLog, using the corresponding set {@link RuleRewriteStrategy}.
+ *
+ * + * The loaded reasoner can perform atomic queries on explicit and + * implicit facts after calling {@link Reasoner#reason()}. Queries can provide + * an iterator for the results ({@link #answerQuery(PositiveLiteral, boolean)}, + * or the results can be exported to a file + * ({@link #exportQueryAnswersToCsv(PositiveLiteral, String, boolean)}).
+ *
+ * Reasoning with various {@link Algorithm}s is supported, that can lead + * to different sets of inferred facts and different termination behavior. In + * some cases, reasoning with rules with existentially quantified variables + * ({@link ExistentialVariable}) may not terminate. We recommend reasoning with + * algorithm {@link Algorithm#RESTRICTED_CHASE}, as it leads to termination in + * more cases. To avoid non-termination, a reasoning timeout can be set + * ({@link Reasoner#setReasoningTimeout(Integer)}).
+ * + * @author Irina Dragoste + * + */ + +public interface Reasoner extends AutoCloseable, KnowledgeBaseListener { + /** + * Getter for the knowledge base to reason on. + * + * @return the reasoner's knowledge base + */ + KnowledgeBase getKnowledgeBase(); + + /** + * Interface for actions to perform on inferences. + * + * Essentially a {@link java.util.function.BiConsumer}, but with a more + * permissive Exception spec. + */ + @FunctionalInterface + public interface InferenceAction { + void accept(Predicate predicate, List termList) throws IOException; + } + + /** + * Performs the given action for each inference. + * + * @param action The action to be performed for each inference. + * @return the correctness of the inferences, depending on the state of the + * reasoning (materialisation) and its {@link KnowledgeBase}. + * @throws IOException + */ + Correctness forEachInference(InferenceAction action) throws IOException; + + /** + * Performs the given action for each inference, swallowing checked exceptions. + * + * @param action The action to be performed for each inference. + * @return the correctness of the inferences, depending on the state of the + * reasoning (materialisation) and its {@link KnowledgeBase}. + */ + default Correctness unsafeForEachInference(final BiConsumer> action) { + try { + return this.forEachInference(action::accept); + } catch (final IOException e) { + throw new RulewerkRuntimeException(e); + } + } + + /** + * Exports all the (explicit and implicit) facts inferred during reasoning of + * the knowledge base to an OutputStream. + * + * @param writer the {@link Writer} used to write inferences. + * @return the correctness of the query answers, depending on the state of the + * reasoning (materialisation) and its {@link KnowledgeBase}. + * @throws IOException + */ + default Correctness writeInferences(final Writer writer) throws IOException { + final PrefixDeclarationRegistry prefixDeclarationRegistry = this.getKnowledgeBase().getPrefixDeclarationRegistry(); + final Serializer serializer = new Serializer(writer, prefixDeclarationRegistry); + + serializer.writePrefixDeclarationRegistry(prefixDeclarationRegistry); + + return this.forEachInference((predicate, termList) -> { + serializer.writePositiveLiteral(predicate, termList); + writer.write(" .\n"); + }); + } + + /** + * Return a stream of all inferences. + * + * @return a {@link Stream} of {@link Fact} objects corresponding to all + * inferences. + */ + default Stream getInferences() { + final Stream.Builder builder = Stream.builder(); + this.unsafeForEachInference((predicate, termList) -> builder.accept(Expressions.makeFact(predicate, termList))); + + return builder.build(); + } + + /** + * Return the {@link Correctness} status of query answers. + * + * @return the correctnes of query answers, depending on the state of the + * reasoning (materialisation) and its {@link KnowledgeBase}. + */ + Correctness getCorrectness(); + + /** + * Exports all the (explicit and implicit) facts inferred during reasoning of + * the knowledge base to a desired file. + * + * @param filePath a String of the file path for the facts to be written to. + * @return the correctness of the query answers, depending on the state of the + * reasoning (materialisation) and its {@link KnowledgeBase}. + * @throws IOException + * @throws FileNotFoundException + * @deprecated Use {@link Reasoner#writeInferences(Writer)} instead. The + * method will disappear. + */ + @Deprecated + default Correctness writeInferences(final String filePath) throws FileNotFoundException, IOException { + try (Writer writer = new OutputStreamWriter(new FileOutputStream(filePath), StandardCharsets.UTF_8)) { + return this.writeInferences(writer); + } + } + + /** + * Sets the algorithm that will be used for reasoning over the knowledge base. + * If no algorithm is set, the default algorithm is + * {@link Algorithm#RESTRICTED_CHASE} will be used. + * + * @param algorithm the algorithm to be used for reasoning. + */ + void setAlgorithm(Algorithm algorithm); + + /** + * Getter for the algorithm that will be used for reasoning over the knowledge + * base. The default value is {@link Algorithm#RESTRICTED_CHASE}. + * + * @return the reasoning algorithm. + */ + Algorithm getAlgorithm(); + + /** + * In some cases, reasoning with rules with existentially quantified variables + * ({@link ExistentialVariable}) may not terminate. We recommend reasoning with + * algorithm {@link Algorithm#RESTRICTED_CHASE}, as it leads to termination in + * more cases.
+ * This method sets a timeout (in seconds) after which reasoning can be + * artificially interrupted if it has not reached completion. + * + * @param seconds interval after which reasoning will be interrupted, in + * seconds. If {@code null}, reasoning will not be interrupted + * and will return only after (if) it has reached completion. + */ + void setReasoningTimeout(Integer seconds); + + /** + * This method returns the reasoning timeout, representing the interval (in + * {@code seconds}) after which reasoning will be interrupted if it has not + * reached completion. The default value is {@code null}, in which case + * reasoning terminates only after (if) it reaches completion. + * + * @return if not {@code null}, number of seconds after which the reasoning will + * be interrupted, if it has not reached completion. + */ + Integer getReasoningTimeout(); + + /** + * Loaded {@link Rule}s can be re-written internally to an equivalent set of + * rules, according to given {@code ruleRewritingStrategy}. If no strategy is + * set, the default value is {@link RuleRewriteStrategy#NONE}, meaning that the + * rules will not be re-written. + * + * @param ruleRewritingStrategy strategy according to which the rules will be + * rewritten before reasoning. + */ + void setRuleRewriteStrategy(RuleRewriteStrategy ruleRewritingStrategy); + + /** + * Getter for the strategy according to which rules will be rewritten before + * reasoning. The default value is {@link RuleRewriteStrategy#NONE}, meaning + * that the rules will not be re-written. + * + * @return the current rule re-writing strategy + */ + RuleRewriteStrategy getRuleRewriteStrategy(); + + /** + * Sets the logging level of the internal VLog C++ resource. Default value is + * {@link LogLevel#WARNING} + * + * @param logLevel the logging level to be set for VLog C++ resource. + */ + void setLogLevel(LogLevel logLevel); + + /** + * Returns the logging level of the internal VLog C++ resource. If no value has + * been set, the default is {@link LogLevel#WARNING}. + * + * @return the logging level of the VLog C++ resource. + */ + LogLevel getLogLevel(); + + /** + * Redirects the logs of the internal VLog C++ resource to given file. If no log + * file is set or the given {@code filePath} is not a valid file path, VLog will + * log to the default system output. + * + * @param filePath the file for the internal VLog C++ resource to log to. If + * {@code null} or an invalid file path, the reasoner will log + * to the default system output. + */ + void setLogFile(String filePath); + + /** + * Checks whether the loaded rules and loaded fact EDB predicates are Acyclic, + * Cyclic, or cyclicity cannot be determined. + * + * @return the appropriate CyclicityResult. + */ + CyclicityResult checkForCycles(); + + /** + * Check the Joint Acyclicity (JA) property of loaded rules and EDB + * predicates of loaded facts. If a set of rules and EDB predicates is + * JA, then, for the given set of rules and any facts over the given EDB + * predicates, reasoning by {@link Algorithm#SKOLEM_CHASE Skolem chase} (and, + * implicitly, the {@link Algorithm#RESTRICTED_CHASE Restricted chase}) will + * always terminate. + * + * @return {@code true}, if the loaded set of rules is Joint Acyclic with + * respect to the EDB predicates of loaded facts.
+ * {@code false}, otherwise + */ + boolean isJA(); + + /** + * Check the Restricted Joint Acyclicity (RJA) property of loaded rules + * and EDB predicates of loaded facts. If a set of rules and EDB predicates is + * RJA, then, for the given set of rules and any facts over the given EDB + * predicates, reasoning by {@link Algorithm#RESTRICTED_CHASE Restricted chase} + * will always terminate + * + * @return {@code true}, if the loaded set of rules is Restricted Joint Acyclic + * with respect to the EDB predicates of loaded facts.
+ * {@code false}, otherwise + */ + boolean isRJA(); + + /** + * Check the Model-Faithful Acyclicity (MFA) property of loaded rules and + * EDB predicates of loaded facts. If a set of rules and EDB predicates is + * MFA, then, for the given set of rules and any facts over the given EDB + * predicates, reasoning by {@link Algorithm#SKOLEM_CHASE Skolem chase} (and, + * implicitly, the {@link Algorithm#RESTRICTED_CHASE Restricted chase}) will + * always terminate + * + * @return {@code true}, if the loaded set of rules is Model-Faithful Acyclic + * with respect to the EDB predicates of loaded facts.
+ * {@code false}, otherwise + */ + boolean isMFA(); + + /** + * Check the Restricted Model-Faithful Acyclicity (RMFA) property of + * loaded rules and EDB predicates of loaded facts. If a set of rules and EDB + * predicates is RMFA, then, for the given set of rules and any facts + * over the given EDB predicates, reasoning by {@link Algorithm#RESTRICTED_CHASE + * Restricted chase} will always terminate. If a set of rules and EDB predicates + * is MFA, then it is also JA. + * + * @return {@code true}, if the loaded set of rules is Restricted Model-Faithful + * Acyclic with respect to the EDB predicates of loaded facts.
+ * {@code false}, otherwise + */ + boolean isRMFA(); + + /** + * Check the Model-Faithful Cyclicity (MFC) property of loaded rules and + * EDB predicates of loaded facts. If a set of rules and EDB predicates is + * MFC, then there exists a set of facts over the given EDB predicates + * for which reasoning by {@link Algorithm#SKOLEM_CHASE Skolem chase} algorithm + * is guaranteed not to terminate for the loaded rules. If a set of rules and + * EDB predicates is RMFA, then it is also RJA. Therefore, if a set or rules and + * EDB predicates is MFC, it is not MFA, nor JA. + * + * @return {@code true}, if the loaded set of rules is Model-Faithful Cyclic + * with respect to the EDB predicates of loaded facts.
+ * {@code false}, otherwise + */ + boolean isMFC(); + + /** + * Performs materialisation on the reasoner {@link KnowledgeBase}, depending on + * the set {@link Algorithm}. Materialisation implies extending the set of + * explicit facts in the knowledge base with implicit facts inferred by + * knowledge base rules.
+ *
+ * In some cases, reasoning with rules with existentially quantified variables + * {@link ExistentialVariable} may not terminate. We recommend reasoning with + * algorithm {@link Algorithm#RESTRICTED_CHASE}, as it leads to termination in + * more cases.
+ * To avoid non-termination, a reasoning timeout can be set + * ({@link Reasoner#setReasoningTimeout(Integer)}).
+ * + * @return + *
    + *
  • {@code true}, if materialisation reached completion.
  • + *
  • {@code false}, if materialisation has been interrupted before + * completion.
  • + *
+ * @throws IOException if I/O exceptions occur during reasoning. + */ + boolean reason() throws IOException; + + // TODO add examples to query javadoc + /** + * Evaluates an atomic query ({@code query}) on the implicit facts loaded into + * the reasoner and the explicit facts materialised by the reasoner.
+ * An answer to the query is the terms a fact that matches the {@code query}: + * the fact predicate is the same as the {@code query} predicate, the + * {@link Constant} terms of the {@code query} appear in the answer fact at the + * same term position, and the {@link Variable} terms of the {@code query} are + * matched by terms in the fact, either named ({@link Constant}) or anonymous + * ({@link NamedNull}). The same variable name identifies the same term in the + * answer fact.
+ * A query answer is represented by a {@link QueryResult}. A query can have + * multiple, distinct query answers. This method returns an Iterator over these + * answers.
+ * + * Depending on the state of the reasoning (materialisation) and its + * {@link KnowledgeBase}, the answers can have a different {@link Correctness} + * ({@link QueryResultIterator#getCorrectness()}): + *
    + *
  • If {@link Correctness#SOUND_AND_COMPLETE}, materialisation over current + * knowledge base has completed, and the query answers are guaranteed to be + * correct.
  • + *
  • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed + * to be sound, but may be incomplete. This can happen + *
      + *
    • when materialisation has not completed ({@link Reasoner#reason()} returns + * {@code false}),
    • + *
    • or when the knowledge base was modified after reasoning, and the + * materialisation does not reflect the current knowledge base. + * Re-materialisation ({@link Reasoner#reason()}) is required in order to obtain + * complete query answers with respect to the current knowledge base.
    • + *
    + *
  • + *
  • If {@link Correctness#INCORRECT}, the results may be incomplete, and some + * results may be unsound. This can happen when the knowledge base was modified + * and the reasoner materialisation is no longer consistent with the current + * knowledge base. Re-materialisation ({@link Reasoner#reason()}) is required, + * in order to obtain correct query answers. + *
+ * + * + * @param query a {@link PositiveLiteral} representing the query to be + * answered. + * @param includeNulls if {@code true}, {@link QueryResult}s containing terms of + * type {@link NamedNull} (representing anonymous + * individuals introduced to satisfy rule existentially + * quantified variables) will be included. Otherwise, the + * answers will only contain the {@link QueryResult}s with + * terms of type {@link Constant} (representing named + * individuals). + * @return QueryResultIterator that iterates over distinct answers to the query. + * It also contains the {@link Correctness} of the query answers. + */ + QueryResultIterator answerQuery(PositiveLiteral query, boolean includeNulls); + + /** + * * Evaluates an atomic ({@code query}), and counts the number of query answer + * implicit facts loaded into the reasoner and the number of query answer + * explicit facts materialised by the reasoner. + * + * @param query a {@link PositiveLiteral} representing the query to be answered. + * + * @return a {@link QueryAnswerCount} object that contains the query answers + * {@link Correctness} and the number of query answers (i.e. the number + * of facts in the extension of the query), including answers with + * {@link NamedNull} terms that have been introduced during reasoning. + * See also + * {@link Reasoner#countQueryAnswers(PositiveLiteral, boolean)}. + */ + + default QueryAnswerCount countQueryAnswers(final PositiveLiteral query) { + return this.countQueryAnswers(query, true); + } + + // TODO add examples to query javadoc + /** + * Evaluates an atomic ({@code query}), and counts the number of query answer + * implicit facts loaded into the reasoner and the number of query answer + * explicit facts materialised by the reasoner.
+ * An answer to the query is the term set of a fact that matches the + * {@code query}: the fact predicate is the same as the {@code query} predicate, + * the {@link TermType#ABSTRACT_CONSTANT}, {@link TermType#DATATYPE_CONSTANT} + * and {@link TermType#LANGSTRING_CONSTANT} terms of the {@code query} appear in + * the answer fact at the same term position, and the + * {@link TermType#UNIVERSAL_VARIABLE} terms of the {@code query} are matched by + * terms in the fact, either named (any of the three constant types) or + * anonymous ({@link TermType#NAMED_NULL}). The same variable name identifies + * the same term in the answer fact.
+ * + * Depending on the state of the reasoning (materialisation) and its + * {@link KnowledgeBase}, the answers can have a different {@link Correctness} + * ({@link QueryResultIterator#getCorrectness()}): + *
    + *
  • If {@link Correctness#SOUND_AND_COMPLETE}, materialisation over current + * knowledge base has completed, and the query answers are guaranteed to be + * correct.
  • + *
  • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are guaranteed + * to be sound, but may be incomplete. This can happen + *
      + *
    • when materialisation has not completed ({@link Reasoner#reason()} returns + * {@code false}),
    • + *
    • or when the knowledge base was modified after reasoning, and the + * materialisation does not reflect the current knowledge base. + * Re-materialisation ({@link Reasoner#reason()}) is required in order to obtain + * complete query answers with respect to the current knowledge base.
    • + *
    + *
  • + *
  • If {@link Correctness#INCORRECT}, the results may be incomplete, and some + * results may be unsound. This can happen when the knowledge base was modified + * and the reasoner materialisation is no longer consistent with the current + * knowledge base. Re-materialisation ({@link Reasoner#reason()}) is required, + * in order to obtain correct query answers. + *
+ * + * + * @param query a {@link PositiveLiteral} representing the query to be + * answered. + * @param includeNulls if {@code true}, facts with {@link TermType#NAMED_NULL} + * terms will be counted. Otherwise, facts with + * {@link TermType#NAMED_NULL} terms will be ignored. + * + * @return a {@link QueryAnswerCount} object that contains the query answers + * Correctness and the number query answers, i.e. the number of facts in + * the extension of the query. + */ + QueryAnswerCount countQueryAnswers(PositiveLiteral query, boolean includeNulls); + + // TODO add examples to query javadoc + /** + * Evaluates an atomic query ({@code query}) on the implicit facts loaded into + * the reasoner and the explicit facts materialised by the reasoner, and writes + * its answers the .csv file at given path {@code csvFilePath}: + *
+ * An answer to the query is the terms a fact that matches the {@code query}: + * the fact predicate is the same as the {@code query} predicate, the + * {@link Constant} terms of the {@code query} appear in the answer fact at the + * same term position, and the {@link Variable} terms of the {@code query} are + * matched by terms in the fact, either named ({@link Constant}) or anonymous + * ({@link NamedNull}). The same variable name identifies the same term in the + * answer fact.
+ * A query can have multiple, distinct query answers. Each answers is written on + * a separate line in the given file. + * + * @param query a {@link PositiveLiteral} representing the query to be + * answered. + * @param csvFilePath path to a .csv file where the query answers + * will be written. Each line of the .csv file + * represents a query answer, and it will contain the fact + * term names as columns. + * @param includeNulls if {@code true}, answers containing terms of type + * {@link NamedNull} (representing anonymous individuals + * introduced to satisfy rule existentially quantified + * variables) will be included. Otherwise, the answers will + * only contain those with terms of type {@link Constant} + * (representing named individuals). + * + * @throws IOException if an I/O error occurs regarding given file + * ({@code csvFilePath)}. + * @return the correctness of the query answers, depending on the state of the + * reasoning (materialisation) and its {@link KnowledgeBase}: + *
    + *
  • If {@link Correctness#SOUND_AND_COMPLETE}, materialisation over + * current knowledge base has completed, and the query answers are + * guaranteed to be correct.
  • + *
  • If {@link Correctness#SOUND_BUT_INCOMPLETE}, the results are + * guaranteed to be sound, but may be incomplete. This can happen + *
      + *
    • when materialisation has not completed ({@link Reasoner#reason()} + * returns {@code false}),
    • + *
    • or when the knowledge base was modified after reasoning, and the + * materialisation does not reflect the current knowledge base. + * Re-materialisation ({@link Reasoner#reason()}) is required in order + * to obtain complete query answers with respect to the current + * knowledge base.
    • + *
    + *
  • + *
  • If {@link Correctness#INCORRECT}, the results may be incomplete, + * and some results may be unsound. This can happen when the knowledge + * base was modified and the reasoner materialisation is no longer + * consistent with the current knowledge base. Re-materialisation + * ({@link Reasoner#reason()}) is required, in order to obtain correct + * query answers. + *
+ * + */ + Correctness exportQueryAnswersToCsv(PositiveLiteral query, String csvFilePath, boolean includeNulls) + throws IOException; + + /** + * Resets the reasoner. All implicit facts inferred by reasoning are discarded. + */ + void resetReasoner(); + + // TODO Map exportDBToDir(File location); + + @Override + void close(); + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/ReasonerState.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/ReasonerState.java new file mode 100644 index 000000000..365aec7f7 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/ReasonerState.java @@ -0,0 +1,75 @@ +package org.semanticweb.rulewerk.core.reasoner; + +/* + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * Enum for the states a {@link Reasoner} can be in. Certain operations are not + * allowed in some states. + * + * @author Irina Dragoste + * + */ +public enum ReasonerState { + /** + * State a Reasoner is in before loading. Querying is not allowed in this state. + */ + KB_NOT_LOADED("knowledge base not loaded"), + /** + * State a Reasoner is in after loading, and before method + * {@link Reasoner#reason()} has been called. The Reasoner can be queried. + */ + KB_LOADED("knowledge base loaded"), + + /** + * State a Reasoner is in after method {@link Reasoner#reason()} has been + * called. + */ + MATERIALISED("after reasoning"), + + /** + * State in which the knowledge base of an already loaded reasoner has been + * changed. This can occur if the knowledge base has been modified after loading + * (in {@link ReasonerState#KB_LOADED} state), or after reasoning (in + * {@link ReasonerState#MATERIALISED} state). + */ + + KB_CHANGED("knowledge base changed"), + /** + * State a Reasoner is in after method {@link Reasoner#close()} has been called. + * The Reasoner cannot reason again, once it reached this state. Loading, + * reasoning, adding rules, fact and fact data sources, setting the rule + * re-writing strategy, the reasoning algorithm and the reasoning timeout. are + * not allowed in this state. + */ + CLOSED("closed"); + + private final String name; + + private ReasonerState(final String name) { + this.name = name; + } + + @Override + public String toString() { + return this.name; + } + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/RuleRewriteStrategy.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/RuleRewriteStrategy.java similarity index 84% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/RuleRewriteStrategy.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/RuleRewriteStrategy.java index 142d2ea03..8df1012a3 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/RuleRewriteStrategy.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/RuleRewriteStrategy.java @@ -1,32 +1,32 @@ -package org.semanticweb.vlog4j.core.reasoner; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -public enum RuleRewriteStrategy { - /** - * Rules are not re-written - */ - NONE, - /** - * Rule heads are split into head pieces whenever possible - */ - SPLIT_HEAD_PIECES -} +package org.semanticweb.rulewerk.core.reasoner; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +public enum RuleRewriteStrategy { + /** + * Rules are not re-written + */ + NONE, + /** + * Rule heads are split into head pieces whenever possible + */ + SPLIT_HEAD_PIECES +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Timer.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Timer.java new file mode 100644 index 000000000..1f41efa6e --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/Timer.java @@ -0,0 +1,545 @@ +package org.semanticweb.rulewerk.core.reasoner; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.lang.management.ManagementFactory; +import java.lang.management.ThreadMXBean; +import java.util.concurrent.ConcurrentHashMap; + +/** + * Class for keeping CPU and system times. The class has a number of features + * that can be used to measure and aggregate times across many threads and many + * methods. + * + * @implNote This file originates from the ELK Reasoner, where more extensive thread-aware + * timing was required. The file contains commented out functions from that source that + * could be used to activate those features here. + * + * @author Markus Kroetzsch + */ +public class Timer { + + //private static Logger LOGGER = LoggerFactory.getLogger(Timer.class); + + /** Flag for indicating that no times should be taken (just count runs). */ + public static final int RECORD_NONE = 0x00000000; + /** Flag for indicating that CPU time should be taken. */ + public static final int RECORD_CPUTIME = 0x00000001; + /** Flag for indicating that wall clock time should be taken. */ + public static final int RECORD_WALLTIME = 0x00000002; + /** Flag for indicating that all supported times should be taken. */ + public static final int RECORD_ALL = RECORD_CPUTIME | RECORD_WALLTIME; + + static final ThreadMXBean tmxb = ManagementFactory.getThreadMXBean(); + + static final ConcurrentHashMap registeredTimers = new ConcurrentHashMap(); + + protected final String name; + protected final long threadId; + protected final int todoFlags; + + protected long currentStartCpuTime = -1; + protected long currentStartWallTime = -1; + protected boolean isRunning = false; + protected long totalCpuTime = 0; + protected long totalWallTime = 0; + protected int measurements = 0; + protected int threadCount = 0; + + /** + * Constructor. Every timer is identified by three things: a string name, an + * integer for flagging its tasks (todos), and a thread id (long). + * + * Tasks can be flagged by a disjunction of constants like RECORD_CPUTIME and + * RECORD_WALLTIME. Only times for which an according flag is set will be + * recorded. + * + * The thread id can be the actual id of the thread that is measured, or 0 + * (invalid id) to not assign the timer to any thread. In this case, no CPU time + * measurement is possible since Java does not allow us to measure the total CPU + * time across all threads. + * + * @param name + * @param todoFlags + * @param threadId + */ + public Timer(String name, int todoFlags, long threadId) { + this.name = name; + this.todoFlags = todoFlags; + this.threadId = threadId; + + if (!tmxb.isThreadCpuTimeEnabled()) { + tmxb.setThreadCpuTimeEnabled(true); + } + } + + public Timer(String name) { + this(name, RECORD_ALL, Thread.currentThread().getId()); + } + + /** + * + * @param name + * @param todoFlags + * @return a new {@link Timer} for the current thread + */ + static public Timer getTimerForCurrentThread(String name, int todoFlags) { + return new Timer(name, todoFlags, Thread.currentThread().getId()); + } + + /** + * Get the total recorded CPU time in nanoseconds. + * + * @return recorded CPU time in nanoseconds + */ + public long getTotalCpuTime() { + return totalCpuTime; + } + + public long getAvgCpuTime() { + return totalCpuTime > 0 && measurements > 0 ? totalCpuTime / measurements : -1; + } + + /** + * Get the string name of the timer. + * + * @return string name + */ + public String getName() { + return name; + } + + /** + * Get the ID of the thread for which this timer was created. + * + * @return thread ID + */ + public long getThreadId() { + return threadId; + } + + /** + * Get the total recorded wall clock time in nanoseconds. + * + * @return recorded wall time in nanoseconds + */ + public long getTotalWallTime() { + return totalWallTime; + } + + public long getAvgWallTime() { + return totalWallTime > 0 && measurements > 0 ? totalWallTime / measurements : -1; + } + + /** + * Return true if the timer is running. + * + * @return true if running + */ + public boolean isRunning() { + return isRunning; + } + + /** + * Start the timer. + */ + public synchronized void start() { + if ((todoFlags & RECORD_CPUTIME) != 0) { + currentStartCpuTime = getThreadCpuTime(threadId); + } else { + currentStartCpuTime = -1; + } + if ((todoFlags & RECORD_WALLTIME) != 0) { + currentStartWallTime = System.nanoTime(); + } else { + currentStartWallTime = -1; + } + isRunning = true; + } + + /** + * Stop the timer (if running) and reset all recorded values. + */ + public synchronized void reset() { + currentStartCpuTime = -1; + currentStartWallTime = -1; + totalCpuTime = 0; + totalWallTime = 0; + measurements = 0; + isRunning = false; + threadCount = 0; + } + + /** + * Stop the timer and return the CPU time that has passed since it had last been + * started. The total time (both system and CPU) of all start-stop cycles is + * recorded with the timer. + * + * @return CPU time that the timer was running, or -1 if timer not running or + * CPU time unavailable for other reasons + */ + public synchronized long stop() { + long totalTime = -1; + + if ((todoFlags & RECORD_CPUTIME) != 0 && (currentStartCpuTime != -1)) { + long cpuTime = getThreadCpuTime(threadId); + if (cpuTime != -1) { // may fail if thread already dead + totalTime = cpuTime - currentStartCpuTime; + totalCpuTime += totalTime; + } + } + + if ((todoFlags & RECORD_WALLTIME) != 0 && (currentStartWallTime != -1)) { + long wallTime = System.nanoTime(); + totalWallTime += wallTime - currentStartWallTime; + } + + if (isRunning) { + measurements += 1; + isRunning = false; + } + + currentStartWallTime = -1; + currentStartCpuTime = -1; + + return totalTime; + } + +// /** +// * Print logging information for the timer. The log only shows the recorded time +// * of the completed start-stop cycles. If the timer is still running, then it +// * will not be stopped to add the currently measured time to the output but a +// * warning will be logged. +// * +// */ +// public void log() { +// if (LOGGER.isInfoEnabled()) { +// String timerLabel; +// if (threadId != 0) { +// timerLabel = name + " (thread " + threadId + ")"; +// } else if (threadCount > 1) { +// timerLabel = name + " (over " + threadCount + " threads)"; +// } else { +// timerLabel = name; +// } +// +// if (todoFlags == RECORD_NONE) { +// LOGGER.info("Timer " + timerLabel + " recorded " + measurements + " run(s), no times taken"); +// } else { +// String labels = ""; +// String values = ""; +// String separator; +// +// if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { +// labels += "CPU"; +// values += totalCpuTime / 1000000; +// separator = "/"; +// } else { +// separator = ""; +// } +// if ((todoFlags & RECORD_WALLTIME) != 0) { +// labels += separator + "Wall"; +// values += separator + totalWallTime / 1000000; +// } +// if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { +// labels += "/CPU avg"; +// values += "/" + (float) (totalCpuTime) / measurements / 1000000; +// } +// if ((todoFlags & RECORD_WALLTIME) != 0) { +// labels += "/Wall avg"; +// values += "/" + (float) (totalWallTime) / measurements / 1000000; +// } +// if (threadCount > 1) { +// if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { +// labels += "/CPU per thread"; +// values += "/" + (float) (totalCpuTime) / threadCount / 1000000; +// } +// if ((todoFlags & RECORD_WALLTIME) != 0) { +// labels += "/Wall per thread"; +// values += "/" + (float) (totalWallTime) / threadCount / 1000000; +// } +// } +// +// LOGGER.info( +// "Time for " + timerLabel + " for " + measurements + " run(s) " + labels + " (ms): " + values); +// } +// +// if (isRunning) { +// LOGGER.warn("Timer " + timerLabel + " logged while it was still running"); +// } +// } +// } +// +// /** +// * Start a timer of the given string name for all todos and the current thread. +// * If no such timer exists yet, then it will be newly created. +// * +// * @param timerName the name of the timer +// */ +// public static void startNamedTimer(String timerName) { +// getNamedTimer(timerName).start(); +// } +// +// /** +// * Start a timer of the given string name for the current thread. If no such +// * timer exists yet, then it will be newly created. +// * +// * @param timerName the name of the timer +// * @param todoFlags +// */ +// public static void startNamedTimer(String timerName, int todoFlags) { +// getNamedTimer(timerName, todoFlags).start(); +// } +// +// /** +// * Start a timer of the given string name for the current thread. If no such +// * timer exists yet, then it will be newly created. +// * +// * @param timerName the name of the timer +// * @param todoFlags +// * @param threadId of the thread to track, or 0 if only system clock should be +// * tracked +// */ +// public static void startNamedTimer(String timerName, int todoFlags, long threadId) { +// getNamedTimer(timerName, todoFlags, threadId).start(); +// } +// +// /** +// * Stop a timer of the given string name for all todos and the current thread. +// * If no such timer exists, -1 will be returned. Otherwise the return value is +// * the CPU time that was measured. +// * +// * @param timerName the name of the timer +// * @return CPU time if timer existed and was running, and -1 otherwise +// */ +// public static long stopNamedTimer(String timerName) { +// return stopNamedTimer(timerName, RECORD_ALL, Thread.currentThread().getId()); +// } +// +// /** +// * Stop a timer of the given string name for the current thread. If no such +// * timer exists, -1 will be returned. Otherwise the return value is the CPU time +// * that was measured. +// * +// * @param timerName the name of the timer +// * @param todoFlags +// * @return CPU time if timer existed and was running, and -1 otherwise +// */ +// public static long stopNamedTimer(String timerName, int todoFlags) { +// return stopNamedTimer(timerName, todoFlags, Thread.currentThread().getId()); +// } +// +// /** +// * Stop a timer of the given string name for the given thread. If no such timer +// * exists, -1 will be returned. Otherwise the return value is the CPU time that +// * was measured. +// * +// * @param timerName the name of the timer +// * @param todoFlags +// * @param threadId of the thread to track, or 0 if only system clock should be +// * tracked +// * @return CPU time if timer existed and was running, and -1 otherwise +// */ +// public static long stopNamedTimer(String timerName, int todoFlags, long threadId) { +// Timer key = new Timer(timerName, todoFlags, threadId); +// if (registeredTimers.containsKey(key)) { +// return registeredTimers.get(key).stop(); +// } else { +// return -1; +// } +// } +// +// /** +// * Reset a timer of the given string name for all todos and the current thread. +// * If no such timer exists yet, then it will be newly created. +// * +// * @param timerName the name of the timer +// */ +// public static void resetNamedTimer(String timerName) { +// getNamedTimer(timerName).reset(); +// } +// +// /** +// * Reset a timer of the given string name for the current thread. If no such +// * timer exists yet, then it will be newly created. +// * +// * @param timerName the name of the timer +// * @param todoFlags +// */ +// public static void resetNamedTimer(String timerName, int todoFlags) { +// getNamedTimer(timerName, todoFlags).reset(); +// } +// +// /** +// * Reset a timer of the given string name for the given thread. If no such timer +// * exists yet, then it will be newly created. +// * +// * @param timerName the name of the timer +// * @param todoFlags +// * @param threadId of the thread to track, or 0 if only system clock should be +// * tracked +// */ +// public static void resetNamedTimer(String timerName, int todoFlags, long threadId) { +// getNamedTimer(timerName, todoFlags, threadId).reset(); +// } +// +// /** +// * Get a timer of the given string name that takes all possible times (todos) +// * for the current thread. If no such timer exists yet, then it will be newly +// * created. +// * +// * @param timerName the name of the timer +// * @return timer +// */ +// public static Timer getNamedTimer(String timerName) { +// return getNamedTimer(timerName, RECORD_ALL, Thread.currentThread().getId()); +// } +// +// /** +// * Returns all registered timers +// * +// * @return an iterable collection of named timers +// */ +// public static Iterable getNamedTimers() { +// return registeredTimers.keySet(); +// } +// +// /** +// * Get a timer of the given string name and todos for the current thread. If no +// * such timer exists yet, then it will be newly created. +// * +// * @param timerName the name of the timer +// * @param todoFlags +// * @return timer +// */ +// public static Timer getNamedTimer(String timerName, int todoFlags) { +// return getNamedTimer(timerName, todoFlags, Thread.currentThread().getId()); +// } +// +// /** +// * Get a timer of the given string name for the given thread. If no such timer +// * exists yet, then it will be newly created. +// * +// * @param timerName the name of the timer +// * @param todoFlags +// * @param threadId of the thread to track, or 0 if only system clock should be +// * tracked +// * @return timer +// */ +// public static Timer getNamedTimer(String timerName, int todoFlags, long threadId) { +// Timer key = new Timer(timerName, todoFlags, threadId); +// Timer previous = registeredTimers.putIfAbsent(key, key); +// if (previous != null) { +// return previous; +// } +// // else +// return key; +// } +// +// /** +// * Collect the total times measured by all known named timers of the given name. +// * +// * @param timerName +// * @return timer +// */ +// public static Timer getNamedTotalTimer(String timerName) { +// long totalCpuTime = 0; +// long totalSystemTime = 0; +// int measurements = 0; +// int threadCount = 0; +// int todoFlags = RECORD_NONE; +// Timer previousTimer = null; +// for (Map.Entry entry : registeredTimers.entrySet()) { +// if (entry.getValue().name.equals(timerName)) { +// previousTimer = entry.getValue(); +// threadCount += 1; +// totalCpuTime += previousTimer.totalCpuTime; +// totalSystemTime += previousTimer.totalWallTime; +// measurements += previousTimer.measurements; +// todoFlags |= previousTimer.todoFlags; +// } +// } +// +// if (threadCount == 1) { +// return previousTimer; +// } else { +// Timer result = new Timer(timerName, todoFlags, 0); +// result.totalCpuTime = totalCpuTime; +// result.totalWallTime = totalSystemTime; +// result.measurements = measurements; +// result.threadCount = threadCount; +// return result; +// } +// } +// +// public static void logAllNamedTimers(String timerName) { +// for (Map.Entry entry : registeredTimers.entrySet()) { +// if (entry.getValue().name.equals(timerName)) { +// entry.getValue().log(); +// } +// } +// } +// +// @Override +// public int hashCode() { +// // Jenkins hash, see http://www.burtleburtle.net/bob/hash/doobs.html and also +// // http://en.wikipedia.org/wiki/Jenkins_hash_function. +// int hash = name.hashCode(); +// hash += (hash << 10); +// hash ^= (hash >> 6); +// hash += Long.valueOf(threadId).hashCode(); +// hash += (hash << 10); +// hash ^= (hash >> 6); +// hash += Integer.valueOf(todoFlags).hashCode(); +// hash += (hash << 10); +// hash ^= (hash >> 6); +// +// hash += (hash << 3); +// hash ^= (hash >> 11); +// hash += (hash << 15); +// return hash; +// } +// +// @Override +// public boolean equals(Object obj) { +// if (this == obj) { +// return true; +// } else if (obj == null) { +// return false; +// } else if (getClass() != obj.getClass()) { +// return false; +// } else if (threadId == ((Timer) obj).threadId && todoFlags == ((Timer) obj).todoFlags +// && name.equals(((Timer) obj).name)) { +// return true; +// } else { +// return false; +// } +// } + + protected static long getThreadCpuTime(long threadId) { + if (threadId == 0) { // generally invalid + return 0; + } else { + return tmxb.getThreadCpuTime(threadId); + } + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java new file mode 100644 index 000000000..66fcf39c9 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSource.java @@ -0,0 +1,87 @@ +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.util.Arrays; + +/** + * An {@code CsvFileDataSource} stores facts in the CSV format inside a file of + * the extension {@code .csv}. These fact tuples can be associated with a single + * predicate of the same arity as the length of these tuples. + *

+ * The required format looks like this: + * + *

+ * {@code
+ * term11, term12, term13, ... term1n
+ * term21, term22, term23, ... term2n
+ * ...
+ * termM1, termM2, termM3, ... termMn
+ * }
+ * 
+ * + * where {@code n} is the arity of the predicate and {@code M} is the number of + * facts. Gzipped files of the extension {@code .csv.gz} are also supported. + * + * @author Christian Lewe + * @author Irina Dragoste + * + */ +public class CsvFileDataSource extends FileDataSource { + + /** + * The name of the predicate used for declarations of data sources of this type. + */ + public static final String declarationPredicateName = "load-csv"; + + private static final Iterable possibleExtensions = Arrays.asList(".csv", ".csv.gz"); + + /** + * Constructor. + * + * @param csvFile path to a file of a {@code .csv} or {@code .csv.gz} extension + * and a valid CSV format. + * @throws IOException if the path of the given {@code csvFile} is + * invalid. + * @throws IllegalArgumentException if the extension of the given + * {@code csvFile} does not occur in + * {@link #possibleExtensions}. + */ + public CsvFileDataSource(final String csvFile) throws IOException { + super(csvFile, possibleExtensions); + } + + @Override + public String toString() { + return "CsvFileDataSource [csvFile=" + getFile() + "]"; + } + + @Override + public void accept(DataSourceConfigurationVisitor visitor) throws IOException { + visitor.visit(this); + } + + @Override + String getDeclarationPredicateName() { + return declarationPredicateName; + } +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java new file mode 100644 index 000000000..cb227662c --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/DataSourceConfigurationVisitor.java @@ -0,0 +1,69 @@ +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +/** + * A visitor to generate (reasoner-specific) configuration for the various data + * sources. + * + * @author Maximilian Marx + */ +public interface DataSourceConfigurationVisitor { + /** + * Configure the reasoner for a {@link CsvFileDataSource}. + * + * @param dataSource the data source to configure + * @throws IOException when an IO error occurs during configuration + */ + public void visit(CsvFileDataSource dataSource) throws IOException; + + /** + * Configure the reasoner for a {@link RdfFileDataSource}. + * + * @param dataSource the data source to configure + * @throws IOException when an IO error occurs during configuration + */ + public void visit(RdfFileDataSource dataSource) throws IOException; + + /** + * Configure the reasoner for a {@link TridentDataSource}. + * + * @param dataSource the data source to configure + * @throws IOException when an IO error occurs during configuration + */ + public void visit(TridentDataSource dataSource) throws IOException; + + /** + * Configure the reasoner for a {@link SparqlQueryResultDataSource}. + * + * @param dataSource the data source to configure + */ + public void visit(SparqlQueryResultDataSource dataSource); + + /** + * Configure the reasoner for a {@link InMemoryDataSource}. + * + * @param dataSource the data source to configure + */ + public void visit(InMemoryDataSource dataSource); +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/EmptyQueryResultIterator.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/EmptyQueryResultIterator.java new file mode 100644 index 000000000..3c3df5d9b --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/EmptyQueryResultIterator.java @@ -0,0 +1,60 @@ +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; + +/** + * Iterator that represents an empty query result. + * + * @author Markus Kroetzsch + * + */ +public class EmptyQueryResultIterator implements QueryResultIterator { + + final Correctness correctness; + + public EmptyQueryResultIterator(Correctness correctness) { + this.correctness = correctness; + } + + @Override + public void close() { + // nothing to do + } + + @Override + public boolean hasNext() { + return false; + } + + @Override + public QueryResult next() { + return null; + } + + public Correctness getCorrectness() { + return this.correctness; + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java new file mode 100644 index 000000000..1fbb8ee2a --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSource.java @@ -0,0 +1,139 @@ +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.io.IOException; +import java.util.Optional; +import java.util.stream.Stream; +import java.util.stream.StreamSupport; + +import org.apache.commons.lang3.Validate; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +/** + * A {@code FileDataSource} is an abstract implementation of a storage for fact + * terms in a file of some format. The exact syntax of this storage is + * determined by the individual extensions of this class. + * + * @author Christian Lewe + * @author Irina Dragoste + * + */ +public abstract class FileDataSource implements ReasonerDataSource { + private final File file; + private final String filePath; + private final String fileName; + private final String extension; + + /** + * Constructor. + * + * @param filePath path to a file that will serve as storage for fact + * terms. + * @param possibleExtensions a list of extensions that the files could have. + * Extensions are tried in the given order, no extension + * in the list can be a suffix of a later extension. + * @throws IOException if the path of the given {@code file} is + * invalid. + * @throws IllegalArgumentException if the extension of the given {@code file} + * does not occur in + * {@code possibleExtensions}. + */ + public FileDataSource(final String filePath, final Iterable possibleExtensions) throws IOException { + Validate.notBlank(filePath, "Data source file path cannot be blank!"); + + this.file = new File(filePath); + this.filePath = filePath; // unmodified file path, necessary for correct serialisation + this.fileName = this.file.getName(); + this.extension = getValidExtension(this.fileName, possibleExtensions); + file.getCanonicalPath(); // make sure that the path is valid. + } + + private String getValidExtension(final String fileName, final Iterable possibleExtensions) { + // use a sequential stream here to avoid a potential race + // condition with extensions that are suffixes of one another. + final Stream extensionsStream = StreamSupport.stream(possibleExtensions.spliterator(), false); + final Optional potentialExtension = extensionsStream.filter(fileName::endsWith).findFirst(); + + if (!potentialExtension.isPresent()) { + throw new IllegalArgumentException("Expected one of the following extensions for the data source file " + + fileName + ": " + String.join(", ", possibleExtensions) + "."); + } + + return potentialExtension.get(); + } + + public File getFile() { + return this.file; + } + + public String getPath() { + return this.filePath; + } + + public String getName() { + return this.fileName; + } + + public String getExtension() { + return this.extension; + } + + @Override + public int hashCode() { + return this.file.hashCode(); + } + + @Override + public boolean equals(final Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (!(obj instanceof FileDataSource)) { + return false; + } + final FileDataSource other = (FileDataSource) obj; + return this.file.equals(other.getFile()); + } + + @Override + public Fact getDeclarationFact() { + Predicate predicate = Expressions.makePredicate(getDeclarationPredicateName(), 1); + return Expressions.makeFact(predicate, + Expressions.makeDatatypeConstant(getPath(), PrefixDeclarationRegistry.XSD_STRING)); + } + + /** + * Returns the name of the predicate that is used to define a declaration of + * this data source. + * + * @return + */ + abstract String getDeclarationPredicateName(); + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java new file mode 100644 index 000000000..bdf244ac5 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/InMemoryDataSource.java @@ -0,0 +1,69 @@ +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.Fact; + +/** + * A {@link DataSource} for representing a large number of facts that were + * generated in Java. Rather than creating {@link Fact} objects for all of them, + * the object will directly accept tuples of constant names that are internally + * stored in a form that can be passed to the reasoner directly, thereby saving + * memory and loading time. + * + * @author Markus Kroetzsch + * + */ +public abstract class InMemoryDataSource implements ReasonerDataSource { + + protected int capacity; + protected final int arity; + + /** + * Create a new in-memory data source for facts of the specified arity. The + * given capacity is the initial size of the space allocated. For best + * efficiency, the actual number of facts should exactly correspond to this + * capacity. + * + * @param arity the number of parameters in a fact from this source + * @param initialCapacity the planned number of facts + */ + public InMemoryDataSource(final int arity, final int initialCapacity) { + this.capacity = initialCapacity; + this.arity = arity; + } + + /** + * Adds a fact to this data source. The number of constant names must agree with + * the arity of this data source. + * + * @param constantNames the string names of the constants in this fact + */ + public abstract void addTuple(final String... constantNames); + + protected void validateArity(final String... constantNames) { + if (constantNames.length != this.arity) { + throw new IllegalArgumentException("This data source holds tuples of arity " + this.arity + + ". Adding a tuple of size " + constantNames.length + " is not possible."); + } + } +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountImpl.java new file mode 100644 index 000000000..c433758ee --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryAnswerCountImpl.java @@ -0,0 +1,60 @@ +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.QueryAnswerCount; + +public class QueryAnswerCountImpl implements QueryAnswerCount { + + final private Correctness correctness; + final private long count; + + /** + * Constructor of QueryAnswerSize + * + * @param correctness of the evaluated query. See {@link Correctness}. + * + * @param size number of query answers, i.e. number of facts in the + * extension of the query. + */ + + public QueryAnswerCountImpl(Correctness correctness, long size) { + this.correctness = correctness; + this.count = size; + } + + @Override + public Correctness getCorrectness() { + return this.correctness; + } + + @Override + public long getCount() { + return this.count; + } + + @Override + public String toString() { + return this.count + " (" + this.correctness.toString() + ")"; + } + +} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultImpl.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java similarity index 76% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultImpl.java rename to rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java index b02bf4153..f7f933253 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultImpl.java +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImpl.java @@ -1,10 +1,10 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.core.reasoner.implementation; /* * #%L - * VLog4j Core Components + * Rulewerk Core Components * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,19 +22,20 @@ import java.util.List; -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Term; /** - * Implements {@link QueryResult}s. + * Implements {@link QueryResult}s. + * * @author Irina Dragoste * */ -final class QueryResultImpl implements QueryResult { +public final class QueryResultImpl implements QueryResult { private final List terms; - QueryResultImpl(List terms) { + public QueryResultImpl(List terms) { this.terms = terms; } @@ -72,7 +73,7 @@ public boolean equals(Object obj) { @Override public String toString() { - return "QueryResult [terms=" + this.terms + "]"; + return this.terms.toString(); } } diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java new file mode 100644 index 000000000..18fe4b181 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSource.java @@ -0,0 +1,91 @@ +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.util.Arrays; +import java.util.Optional; + +/** + * An {@code RdfFileDataSource} stores facts in the RDF N-Triples format inside + * a file of the extension {@code .nt}. These fact triples can be associated + * with a single predicate of arity 3. + *

+ * The required format is given in the W3C specification. A simple example + * file could look like this: + * + *

+ * {@code
+ *    .
+ *    .
+ * }
+ * 
+ * + * Gzipped files of the extension {@code .nt.gz} are also supported. + * + * @author Christian Lewe + * + */ +public class RdfFileDataSource extends FileDataSource { + + /** + * The name of the predicate used for declarations of data sources of this type. + */ + public static final String declarationPredicateName = "load-rdf"; + + private final static Iterable possibleExtensions = Arrays.asList(".nt", ".nt.gz"); + + /** + * Constructor. + * + * @param rdfFile path to a file of a {@code .nt} or {@code .nt.gz} extension + * and a valid N-Triples format. + * @throws IOException if the path of the given {@code rdfFile} is + * invalid. + * @throws IllegalArgumentException if the extension of the given + * {@code rdfFile} does not occur in + * {@link #possibleExtensions}. + */ + public RdfFileDataSource(final String rdfFile) throws IOException { + super(rdfFile, possibleExtensions); + } + + @Override + public String toString() { + return "RdfFileDataSource [rdfFile=" + this.getFile() + "]"; + } + + @Override + public Optional getRequiredArity() { + return Optional.of(3); + } + + @Override + public void accept(DataSourceConfigurationVisitor visitor) throws IOException { + visitor.visit(this); + } + + @Override + String getDeclarationPredicateName() { + return declarationPredicateName; + } +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ReasonerDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ReasonerDataSource.java new file mode 100644 index 000000000..96020fceb --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/ReasonerDataSource.java @@ -0,0 +1,38 @@ +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.semanticweb.rulewerk.core.model.api.DataSource; + +/** + * An interface for DataSources that can be used with a Reasoner. + */ +public interface ReasonerDataSource extends DataSource { + /** + * Accept a {@link DataSourceConfigurationVisitor} to configure a + * reasoner to load this data source. + * + * @param visitor the visitor. + */ + public void accept(DataSourceConfigurationVisitor visitor) throws IOException; +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java new file mode 100644 index 000000000..4f03dde77 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/Skolemization.java @@ -0,0 +1,157 @@ +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.ByteArrayOutputStream; +import java.util.UUID; + +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; +import org.semanticweb.rulewerk.core.model.implementation.TermFactory; + +/** + * A class that implements skolemization and collision-free renaming of named + * nulls. The same name will always be renamed in the same way when using the + * same instance of {@link Skolemization}, but it is extremely unlikely that + * different names or different instances will ever produce the same name. + * + * This can be used to rename apart named nulls from different input sources to + * avoid clashes. There is also code for creating skolem constants with + * appropriate absolute IRIs. + * + * @author Maximilian Marx + */ +public class Skolemization { + + /** + * IRI prefix used for IRIs skolem constants in Rulewerk. + */ + public final static String SKOLEM_IRI_PREFIX = "https://rulewerk.semantic-web.org/.well-known/genid/"; + /** + * Prefix used to ensure that UUID-based local names do not start with a number. + */ + private final static String SKOLEM_UUID_START = "B-"; + + /** + * The namespace to use for skolemizing named null names. + */ + private final byte[] namedNullNamespace = UUID.randomUUID().toString().getBytes(); + + /** + * Creates a named null with a renamed name that is determined by the given + * original name. The result is a {@link RenamedNamedNull} to allow other code + * to recognise that no further renaming is necessary. + * + * @param name the name of the {@link NamedNull} to be renamed here (or any + * other string for which to create a unique renaming) + * @return a {@link RenamedNamedNull} with a new name that is specific to this + * instance and {@code name}. + */ + public RenamedNamedNull getRenamedNamedNull(final String name) { + return new RenamedNamedNull(this.getFreshName(name)); + } + + /** + * Creates a skolem constant that is determined by the given original name. + * + * @param name the name of the {@link NamedNull} to skolemize (or any + * other string for which to create a unique renaming) + * @param termFactory the {@link TermFactory} that is used to create the + * constant + * @return a {@link AbstractConstant} with an IRI that is specific to this + * instance and {@code name}. + */ + public AbstractConstant getSkolemConstant(final String name, final TermFactory termFactory) { + return termFactory.makeAbstractConstant(this.getSkolemConstantName(name)); + } + + /** + * Creates a skolem constant that is determined by the given {@link NamedNull}. + * The method ensures that a new unique name is generated unless the given + * object is already a {@link RenamedNamedNull}. + * + * @param namedNull the {@link NamedNull} to skolemize + * @param termFactory the {@link TermFactory} that is used to create the + * constant + * @return a {@link AbstractConstant} with an IRI that is specific to this + * instance and {@code namedNull}. + */ + public AbstractConstant getSkolemConstant(final NamedNull namedNull, final TermFactory termFactory) { + return termFactory.makeAbstractConstant(this.getSkolemConstantName(namedNull)); + + } + + /** + * Returns the name (IRI string) of a skolem constant for skolemising a named + * null of the given name. + * + * @param name the name of the {@link NamedNull} to be renamed here (or any + * other string for which to create a unique renaming) + * @return string that is an IRI for a skolem constant + */ + public String getSkolemConstantName(final String name) { + return this.getSkolemConstantNameFromUniqueName(this.getFreshName(name).toString()); + } + + /** + * Returns the name (IRI string) of a skolem constant for skolemising the given + * named {@link NamedNull}. The method ensures that a new unique name is + * generated unless the given object is already a {@link RenamedNamedNull}. + * + * @param namedNull the name of the {@link NamedNull} to be renamed here (or any + * other string for which to create a unique renaming) + * @return string that is an IRI for a skolem constant + */ + public String getSkolemConstantName(final NamedNull namedNull) { + if (namedNull instanceof RenamedNamedNull) { + return this.getSkolemConstantNameFromUniqueName(namedNull.getName()); + } else { + return this.getSkolemConstantName(namedNull.getName()); + } + } + + /** + * Returns a full skolem constant IRI string from its local id part. + * + * @param name local id of skolem constant + * @return IRI string + */ + private String getSkolemConstantNameFromUniqueName(final String name) { + return SKOLEM_IRI_PREFIX + SKOLEM_UUID_START + name; + } + + /** + * Creates a fresh UUID based on the given string. The UUID is determined by the + * string and the instance of {@link Skolemization}. Other strings or instances + * are extremely unlikely to produce the same string. + * + * @param name the string to be renamed + * @return a UUID for the new name + */ + public UUID getFreshName(final String name) { + final byte[] nameBytes = name.getBytes(); + final ByteArrayOutputStream stream = new ByteArrayOutputStream(); + stream.write(this.namedNullNamespace, 0, this.namedNullNamespace.length); + stream.write(nameBytes, 0, nameBytes.length); + return UUID.nameUUIDFromBytes(stream.toByteArray()); + } +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java new file mode 100644 index 000000000..da80ea3b4 --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSource.java @@ -0,0 +1,182 @@ +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.net.URL; +import java.util.Iterator; +import java.util.LinkedHashSet; +import java.util.Optional; + +import org.apache.commons.lang3.Validate; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +/** + * A SparqlQueryResultDataSource provide the results of a SPARQL query on a + * given web endpoint. + * + * @author Irina Dragoste + * + */ +public class SparqlQueryResultDataSource implements ReasonerDataSource { + + /** + * The name of the predicate used for declarations of data sources of this type. + */ + public static final String declarationPredicateName = "sparql"; + + private final URL endpoint; + private final String queryVariables; + private final String queryBody; + + /** + * Creates a data source from answers to a remote SPARQL query. + * + * @param endpoint web location of the resource the query will be + * evaluated on + * @param queryVariables comma-separated list of SPARQL variable names (without + * leading ? or $) + * @param queryBody content of the WHERE clause in the SPARQL query + */ + // TODO add examples to javadoc + // TODO add illegal argument exceptions to javadoc + public SparqlQueryResultDataSource(final URL endpoint, final String queryVariables, final String queryBody) { + Validate.notNull(endpoint, "Endpoint cannot be null."); + Validate.notNull(queryVariables, "Query variables string cannot be null."); + Validate.notEmpty(queryVariables, "There must be at least one query variable."); + Validate.notBlank(queryBody, "Query body cannot be null or blank [{}].", queryBody); + // TODO validate query body syntax (for example, new line character) + // TODO validate early that the arity coincides with the assigned predicate + this.endpoint = endpoint; + this.queryVariables = queryVariables.replace(" ", ""); + this.queryBody = queryBody.replace("\n", " "); + } + + /** + * Creates a data source from answers to a remote SPARQL query. + * + * @param endpoint the web location of the resource the query will be + * evaluated on. + * @param queryVariables the variables of the query, in the given order. The + * variable at each position in the ordered set will be + * mapped to its correspondent query answer term at the + * same position. + * @param queryBody the content of the WHERE clause in the SPARQL + * query. Must not contain {@code newline} characters + * ({@code "\n")}. + */ + // TODO add examples to javadoc + // TODO add illegal argument exceptions to javadoc + public SparqlQueryResultDataSource(final URL endpoint, final LinkedHashSet queryVariables, + final String queryBody) { + Validate.notNull(endpoint, "Endpoint cannot be null."); + Validate.notNull(queryVariables, "Query variables ordered set cannot be null."); + Validate.noNullElements(queryVariables, "Query variables cannot be null or contain null elements."); + Validate.notEmpty(queryVariables, "There must be at least one query variable."); + Validate.notBlank(queryBody, "Query body cannot be null or blank [{}].", queryBody); + // TODO validate query body syntax (for example, new line character) + // TODO validate early that the arity coincides with the assigned predicate + this.endpoint = endpoint; + this.queryVariables = getQueryVariablesList(queryVariables); + this.queryBody = queryBody; + } + + public URL getEndpoint() { + return this.endpoint; + } + + public String getQueryBody() { + return this.queryBody; + } + + public String getQueryVariables() { + return this.queryVariables; + } + + static String getQueryVariablesList(LinkedHashSet queryVariables) { + final StringBuilder sb = new StringBuilder(); + final Iterator iterator = queryVariables.iterator(); + while (iterator.hasNext()) { + sb.append(iterator.next().getName()); + if (iterator.hasNext()) { + sb.append(","); + } + } + return sb.toString(); + } + + @Override + public Optional getRequiredArity() { + return Optional.of(this.queryVariables.split(",").length); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + this.endpoint.hashCode(); + result = prime * result + this.queryBody.hashCode(); + result = prime * result + this.queryVariables.hashCode(); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final SparqlQueryResultDataSource other = (SparqlQueryResultDataSource) obj; + return this.endpoint.equals(other.getEndpoint()) && this.queryVariables.equals(other.getQueryVariables()) + && this.queryBody.equals(other.getQueryBody()); + } + + @Override + public String toString() { + return "SparqlQueryResultDataSource [endpoint=" + this.endpoint + ", queryVariables=" + this.queryVariables + + ", queryBody=" + this.queryBody + "]"; + } + + @Override + public void accept(DataSourceConfigurationVisitor visitor) { + visitor.visit(this); + } + + @Override + public Fact getDeclarationFact() { + Predicate predicate = Expressions.makePredicate(declarationPredicateName, 3); + Term endpointTerm = Expressions.makeAbstractConstant(getEndpoint().toString()); + Term variablesTerm = Expressions.makeDatatypeConstant(getQueryVariables(), + PrefixDeclarationRegistry.XSD_STRING); + Term patternTerm = Expressions.makeDatatypeConstant(getQueryBody(), PrefixDeclarationRegistry.XSD_STRING); + return Expressions.makeFact(predicate, endpointTerm, variablesTerm, patternTerm); + } + +} diff --git a/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java new file mode 100644 index 000000000..8f698e45b --- /dev/null +++ b/rulewerk-core/src/main/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSource.java @@ -0,0 +1,104 @@ +package org.semanticweb.rulewerk.core.reasoner.implementation; + +import java.io.File; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.apache.commons.lang3.Validate; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +/** + * Data source for loading data from a database created with the + * Trident RDF indexing and + * storage utility. This is the recommended data source for large RDF + * datasets in the VLog reasoner. Trident databases are generated from RDF input + * files in a batch process using the Trident tool. + * + * @author Markus Kroetzsch + * + */ +public class TridentDataSource implements ReasonerDataSource { + + /** + * The name of the predicate used for declarations of data sources of this type. + */ + public static final String declarationPredicateName = "trident"; + + final String filePath; + final String fileName; + + public TridentDataSource(final String filePath) throws IOException { + Validate.notBlank(filePath, "Path to Trident database cannot be blank!"); + this.filePath = filePath; // unmodified file path, necessary for correct serialisation + this.fileName = new File(filePath).getCanonicalPath(); + } + + public String getPath() { + return this.filePath; + } + + public String getName() { + return this.fileName; + } + + @Override + public Fact getDeclarationFact() { + Predicate predicate = Expressions.makePredicate(declarationPredicateName, 1); + return Expressions.makeFact(predicate, + Expressions.makeDatatypeConstant(filePath, PrefixDeclarationRegistry.XSD_STRING)); + } + + @Override + public String toString() { + return "[TridentDataSource [tridentFile=" + this.fileName + "]"; + } + + @Override + public void accept(DataSourceConfigurationVisitor visitor) throws IOException { + visitor.visit(this); + } + + @Override + public int hashCode() { + return this.filePath.hashCode(); + } + + @Override + public boolean equals(final Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (!(obj instanceof TridentDataSource)) { + return false; + } + final TridentDataSource other = (TridentDataSource) obj; + return this.fileName.equals(other.getName()); + } + +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/ArgumentTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/ArgumentTest.java new file mode 100644 index 000000000..66ae2f550 --- /dev/null +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/ArgumentTest.java @@ -0,0 +1,87 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +import static org.junit.Assert.*; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +public class ArgumentTest { + private static final Term TERM = Expressions.makeDatatypeConstant("some string", + PrefixDeclarationRegistry.XSD_STRING); + private static final PositiveLiteral LITERAL = Expressions.makePositiveLiteral("p", TERM); + private static final Rule RULE = Expressions.makeRule(LITERAL, LITERAL); + + private static final Argument TERM_ARGUMENT = Argument.term(TERM); + private static final Argument LITERAL_ARGUMENT = Argument.positiveLiteral(LITERAL); + private static final Argument RULE_ARGUMENT = Argument.rule(RULE); + + @Test + public void equals_null_returnsFalse() { + assertFalse(LITERAL_ARGUMENT.equals(null)); + assertFalse(RULE_ARGUMENT.equals(null)); + assertFalse(TERM_ARGUMENT.equals(null)); + } + + @Test + public void equals_self_returnsTrue() { + assertTrue(RULE_ARGUMENT.equals(RULE_ARGUMENT)); + assertTrue(LITERAL_ARGUMENT.equals(LITERAL_ARGUMENT)); + assertTrue(TERM_ARGUMENT.equals(TERM_ARGUMENT)); + } + + @Test + public void equals_equal_returnsTrue() { + assertTrue(RULE_ARGUMENT.equals(Argument.rule(RULE))); + assertTrue(LITERAL_ARGUMENT.equals(Argument.positiveLiteral(LITERAL))); + assertTrue(TERM_ARGUMENT.equals(Argument.term(TERM))); + } + + @Test + public void equals_notEqualButSameType_returnsFalse() { + assertFalse(RULE_ARGUMENT.equals(Argument.rule(Expressions.makeRule(LITERAL, LITERAL, LITERAL)))); + assertFalse(LITERAL_ARGUMENT.equals(Argument.positiveLiteral(Expressions.makePositiveLiteral("q", TERM)))); + assertFalse(TERM_ARGUMENT + .equals(Argument.term(Expressions.makeDatatypeConstant("another string", "https://example.com")))); + } + + @Test + public void equals_differentType_returnsFalse() { + assertFalse(RULE_ARGUMENT.equals(LITERAL_ARGUMENT)); + assertFalse(RULE_ARGUMENT.equals(TERM_ARGUMENT)); + assertFalse(LITERAL_ARGUMENT.equals(RULE_ARGUMENT)); + assertFalse(LITERAL_ARGUMENT.equals(TERM_ARGUMENT)); + assertFalse(TERM_ARGUMENT.equals(RULE_ARGUMENT)); + assertFalse(TERM_ARGUMENT.equals(LITERAL_ARGUMENT)); + } + + @Test + public void equals_String_returnsFalse() { + assertFalse(RULE_ARGUMENT.equals((Object) "test")); + assertFalse(LITERAL_ARGUMENT.equals((Object) "test")); + assertFalse(TERM_ARGUMENT.equals((Object) "test")); + } +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImplTest.java new file mode 100644 index 000000000..9acc89f28 --- /dev/null +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/ConjunctionImplTest.java @@ -0,0 +1,201 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; + +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + +import org.junit.Test; +import org.mockito.internal.util.collections.Sets; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.NegativeLiteral; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.ConjunctionImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +public class ConjunctionImplTest { + + @Test + public void testGettersLiterals() { + final Variable x = Expressions.makeUniversalVariable("X"); + final Variable y = Expressions.makeUniversalVariable("Y"); + final Variable z = Expressions.makeExistentialVariable("Z"); + final Constant c = Expressions.makeAbstractConstant("c"); + final Constant d = Expressions.makeAbstractConstant("d"); + final Literal positiveLiteral1 = Expressions.makePositiveLiteral("p", x, c); + final NegativeLiteral negativeLiteral2 = Expressions.makeNegativeLiteral("p", y, x); + final Literal positiveLiteral3 = Expressions.makePositiveLiteral("q", x, d); + final Literal positiveLiteral4 = Expressions.makePositiveLiteral("q", y, d, z); + final List literalList = Arrays.asList(positiveLiteral1, negativeLiteral2, positiveLiteral3, + positiveLiteral4); + + final Conjunction conjunction = new ConjunctionImpl<>(literalList); + + assertEquals(literalList, conjunction.getLiterals()); + assertEquals(Sets.newSet(x, y, z), conjunction.getVariables().collect(Collectors.toSet())); + assertEquals(Sets.newSet(x, y), conjunction.getUniversalVariables().collect(Collectors.toSet())); + assertEquals(Sets.newSet(z), conjunction.getExistentialVariables().collect(Collectors.toSet())); + assertEquals(Sets.newSet(), conjunction.getNamedNulls().collect(Collectors.toSet())); + assertEquals(Sets.newSet(c, d), conjunction.getAbstractConstants().collect(Collectors.toSet())); + } + + @Test + public void testEqualsPositiveLiterals() { + final Variable x = Expressions.makeUniversalVariable("X"); + final Variable y = Expressions.makeUniversalVariable("Y"); + final Constant c = Expressions.makeAbstractConstant("c"); + final Constant d = Expressions.makeAbstractConstant("d"); + final PositiveLiteral positiveLiteral1 = Expressions.makePositiveLiteral("p", x, c); + final PositiveLiteral positiveLiteral2 = Expressions.makePositiveLiteral("p", y, x); + final PositiveLiteral positiveLiteral3 = Expressions.makePositiveLiteral("q", x, d); + final List positiveLiteralList = Arrays.asList(positiveLiteral1, positiveLiteral2, + positiveLiteral3); + final Conjunction conjunction1 = new ConjunctionImpl<>(positiveLiteralList); + final Conjunction conjunction2 = Expressions.makePositiveConjunction(positiveLiteral1, + positiveLiteral2, positiveLiteral3); + final Conjunction conjunction3 = Expressions.makeConjunction(positiveLiteral1, positiveLiteral2, + positiveLiteral3); + final Conjunction conjunction4 = Expressions.makePositiveConjunction(positiveLiteral1, + positiveLiteral3, positiveLiteral2); + + assertEquals(conjunction1, conjunction1); + assertEquals(conjunction2, conjunction1); + assertEquals(conjunction3, conjunction1); + assertEquals(conjunction2.hashCode(), conjunction1.hashCode()); + assertEquals(conjunction3.hashCode(), conjunction1.hashCode()); + assertNotEquals(conjunction4, conjunction1); + assertNotEquals(conjunction4.hashCode(), conjunction1.hashCode()); + assertFalse(conjunction1.equals(null)); + assertFalse(conjunction1.equals(c)); + } + + @Test + public void testEqualsNegativeLiterals() { + final Variable x = Expressions.makeUniversalVariable("X"); + final Variable y = Expressions.makeUniversalVariable("Y"); + final Constant c = Expressions.makeAbstractConstant("c"); + final Constant d = Expressions.makeAbstractConstant("d"); + final NegativeLiteral negativeLiteral1 = Expressions.makeNegativeLiteral("p", x, c); + final NegativeLiteral negativeLiteral2 = Expressions.makeNegativeLiteral("p", y, x); + final NegativeLiteral negativeLiteral3 = Expressions.makeNegativeLiteral("q", x, d); + final List negativeLiteralList = Arrays.asList(negativeLiteral1, negativeLiteral2, + negativeLiteral3); + final Conjunction conjunction1 = new ConjunctionImpl<>(negativeLiteralList); + final Conjunction conjunction2 = Expressions.makeConjunction(negativeLiteral1, negativeLiteral2, + negativeLiteral3); + final Conjunction conjunction3 = Expressions.makeConjunction(negativeLiteral1, negativeLiteral3, + negativeLiteral2); + + assertEquals(conjunction1, conjunction1); + assertEquals(conjunction2, conjunction1); + assertEquals(conjunction2.hashCode(), conjunction1.hashCode()); + assertNotEquals(conjunction3, conjunction1); + assertNotEquals(conjunction3.hashCode(), conjunction1.hashCode()); + assertFalse(conjunction1.equals(null)); + assertFalse(conjunction1.equals(c)); + } + + @Test + public void testEqualsLiterals() { + final Variable x = Expressions.makeUniversalVariable("X"); + final Constant c = Expressions.makeAbstractConstant("c"); + + final PositiveLiteral positiveLiteral1 = Expressions.makePositiveLiteral("p", x, c); + final NegativeLiteral negativeLiteral1 = Expressions.makeNegativeLiteral("p", x, c); + final ConjunctionImpl conjunction1 = new ConjunctionImpl<>( + Arrays.asList(positiveLiteral1, negativeLiteral1)); + + final Literal positiveLiteral2 = Expressions.makePositiveLiteral("p", x, c); + final Literal negativeLiteral2 = Expressions.makeNegativeLiteral("p", x, c); + final ConjunctionImpl conjunction2 = new ConjunctionImpl<>( + Arrays.asList(positiveLiteral2, negativeLiteral2)); + + assertEquals(conjunction1, conjunction1); + assertEquals(conjunction2, conjunction1); + assertEquals(conjunction2.hashCode(), conjunction1.hashCode()); + + } + + @Test(expected = NullPointerException.class) + public void literalsNotNull() { + new ConjunctionImpl(null); + } + + @Test(expected = NullPointerException.class) + public void positiveLiteralsNotNull() { + new ConjunctionImpl(null); + } + + @Test(expected = IllegalArgumentException.class) + public void positiveLiteralsNoNullElements() { + final Variable x = Expressions.makeUniversalVariable("X"); + final PositiveLiteral positiveLiteral = Expressions.makePositiveLiteral("p", x); + final List positiveLiteralList = Arrays.asList(positiveLiteral, null); + Expressions.makeConjunction(positiveLiteralList); + } + + @Test(expected = IllegalArgumentException.class) + public void literalsNoNullElements() { + final Variable x = Expressions.makeUniversalVariable("X"); + final NegativeLiteral negativeLiteral = Expressions.makeNegativeLiteral("p", x); + final PositiveLiteral positiveLiteral = Expressions.makePositiveLiteral("p", x); + final List literalList = Arrays.asList(negativeLiteral, positiveLiteral, null); + Expressions.makeConjunction(literalList); + } + + @Test(expected = NullPointerException.class) + public void negativeLiteralsNotNull() { + new ConjunctionImpl(null); + } + + @Test(expected = IllegalArgumentException.class) + public void negativeLiteralsNoNullElements() { + final Variable x = Expressions.makeUniversalVariable("X"); + final NegativeLiteral negativeLiteral = Expressions.makeNegativeLiteral("p", x); + final List negativeLiteralList = Arrays.asList(negativeLiteral, null); + Expressions.makeConjunction(negativeLiteralList); + } + + @Test + public void conjunctionToStringTest() { + final Variable x = Expressions.makeUniversalVariable("X"); + final Variable y = Expressions.makeUniversalVariable("Y"); + final Constant c = Expressions.makeAbstractConstant("c"); + final Constant d = Expressions.makeAbstractConstant("d"); + final PositiveLiteral positiveLiteral1 = Expressions.makePositiveLiteral("p", x, c); + final PositiveLiteral positiveLiteral2 = Expressions.makePositiveLiteral("p", y, x); + final PositiveLiteral positiveLiteral3 = Expressions.makePositiveLiteral("q", x, d); + final NegativeLiteral NegativeLiteral = Expressions.makeNegativeLiteral("r", x, d); + final PositiveLiteral PositiveLiteral4 = Expressions.makePositiveLiteral("s", c, d); + final List LiteralList = Arrays.asList(positiveLiteral1, positiveLiteral2, positiveLiteral3, + NegativeLiteral, PositiveLiteral4); + final Conjunction conjunction1 = new ConjunctionImpl<>(LiteralList); + assertEquals("p(?X, c), p(?Y, ?X), q(?X, d), ~r(?X, d), s(c, d)", conjunction1.toString()); + } + +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationTest.java new file mode 100644 index 000000000..9df9cd3e0 --- /dev/null +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/DataSourceDeclarationTest.java @@ -0,0 +1,119 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; + +public class DataSourceDeclarationTest { + + @Test + public void testEquality() throws MalformedURLException { + final DataSource dataSource1 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", + "?var wdt:P31 wd:Q5 ."); + final Predicate predicate1 = Expressions.makePredicate("p", 3); + final DataSourceDeclaration dataSourceDeclaration1 = new DataSourceDeclarationImpl(predicate1, dataSource1); + final DataSource dataSource2 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var", + "?var wdt:P31 wd:Q5 ."); + final Predicate predicate2 = Expressions.makePredicate("p", 3); + final DataSourceDeclaration dataSourceDeclaration2 = new DataSourceDeclarationImpl(predicate2, dataSource2); + + final DataSource dataSource3 = new SparqlQueryResultDataSource(new URL("https://example.org/"), "var2", + "?var2 wdt:P31 wd:Q5 ."); + final DataSourceDeclaration dataSourceDeclaration3 = new DataSourceDeclarationImpl(predicate2, dataSource3); + + final Predicate predicate4 = Expressions.makePredicate("q", 1); + final DataSourceDeclaration dataSourceDeclaration4 = new DataSourceDeclarationImpl(predicate4, dataSource2); + + assertEquals(dataSourceDeclaration1, dataSourceDeclaration1); + assertEquals(dataSourceDeclaration1, dataSourceDeclaration2); + assertEquals(dataSourceDeclaration1.hashCode(), dataSourceDeclaration2.hashCode()); + assertNotEquals(dataSourceDeclaration1, dataSource1); + assertNotEquals(dataSourceDeclaration1, dataSourceDeclaration3); + assertNotEquals(dataSourceDeclaration1, dataSourceDeclaration4); + assertFalse(dataSourceDeclaration1.equals(null)); // written like this for recording coverage properly + } + + @Test + public void toString_SparqlQueryResultDataSource() throws IOException { + final Predicate predicate = Expressions.makePredicate("p", 3); + final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource( + new URL("https://example.org/sparql"), "var", "?var wdt:P31 wd:Q5 ."); + + final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, dataSource); + assertEquals("@source p[3]: sparql(, \"var\", \"?var wdt:P31 wd:Q5 .\") .", + dataSourceDeclaration.toString()); + + } + + @Test + public void toString_CsvFileDataSource() throws IOException { + final Predicate predicate2 = Expressions.makePredicate("q", 1); + final String relativeDirName = "dir/"; + final String fileName = "file.csv"; + + final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(relativeDirName + fileName); + final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate2, + unzippedCsvFileDataSource); + + final String expectedFilePath = "\"" + relativeDirName + fileName + "\""; + assertEquals("@source q[1]: load-csv(" + expectedFilePath + ") .", dataSourceDeclaration.toString()); + } + + @Test + public void toString_CsvFileDataSource_absolutePath_windowsPathSeparator() throws IOException { + final Predicate predicate = Expressions.makePredicate("q", 1); + final String absoluteFilePathWindows = "D:\\input\\file.csv"; + final String escapedPath = absoluteFilePathWindows.replace("\\", "\\\\"); + final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(absoluteFilePathWindows); + final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, + unzippedCsvFileDataSource); + assertEquals("@source q[1]: load-csv(\"" + escapedPath + "\") .", dataSourceDeclaration.toString()); + } + + @Test + public void toString_RdfFileDataSource_relativePath() throws IOException { + final Predicate predicate = Expressions.makePredicate("q", 1); + final String relativeDirName = "dir/"; + final String fileName = "file.nt"; + final String unzippedRdfFile = relativeDirName + fileName; + final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); + final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate, + unzippedRdfFileDataSource); + + final String expectedFilePath = "\"" + relativeDirName + fileName + "\""; + assertEquals("@source q[1]: load-rdf(" + expectedFilePath + ") .", dataSourceDeclaration.toString()); + } +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/FactTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/FactTest.java new file mode 100644 index 000000000..1aa8017de --- /dev/null +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/FactTest.java @@ -0,0 +1,65 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +import static org.junit.Assert.assertEquals; + +import java.util.Arrays; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.FactImpl; + +public class FactTest { + + @Test + public void factsConstructor() { + final Predicate p = Expressions.makePredicate("p", 2); + final Constant c = Expressions.makeAbstractConstant("c"); + final Constant d = Expressions.makeAbstractConstant("d"); + final Fact f1 = Expressions.makeFact(p, Arrays.asList(c, d)); + final Fact f2 = Expressions.makeFact("p", Arrays.asList(c, d)); + final Fact f3 = new FactImpl(p, Arrays.asList(c, d)); + assertEquals(f1, f2); + assertEquals(f1, f3); + assertEquals(f2, f3); + } + + @Test(expected = IllegalArgumentException.class) + public void factsOnlyContainConstants() { + final Predicate p = Expressions.makePredicate("p", 1); + final Variable x = Expressions.makeUniversalVariable("X"); + new FactImpl(p, Arrays.asList(x)); + } + + @Test + public void factToStringTest() { + final Predicate p = Expressions.makePredicate("p", 2); + final Constant c = Expressions.makeAbstractConstant("c"); + final Constant d = Expressions.makeAbstractConstant("d"); + final Fact f1 = Expressions.makeFact(p, Arrays.asList(c, d)); + assertEquals("p(c, d) .", f1.toString()); + } + +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistryTest.java new file mode 100644 index 000000000..946d2bf17 --- /dev/null +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/MergingPrefixDeclarationRegistryTest.java @@ -0,0 +1,238 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; + +import java.util.stream.StreamSupport; + +import org.junit.Before; +import org.junit.Test; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.implementation.MergingPrefixDeclarationRegistry; + +public class MergingPrefixDeclarationRegistryTest { + private MergingPrefixDeclarationRegistry prefixDeclarations; + + private static final String BASE = "https://example.org/"; + private static final String UNRELATED = "https://example.com/"; + private static final String MORE_SPECIFIC = BASE + "example/"; + private static final String EVEN_MORE_SPECIFIC = MORE_SPECIFIC + "relative/"; + private static final String RELATIVE = "relative/test"; + + @Before + public void init() { + prefixDeclarations = new MergingPrefixDeclarationRegistry(); + } + + @Test + public void setBaseIri_changingBase_succeeds() { + prefixDeclarations.setBaseIri(BASE); + assertEquals(BASE, prefixDeclarations.getBaseIri()); + prefixDeclarations.setBaseIri(MORE_SPECIFIC); + assertEquals(MORE_SPECIFIC, prefixDeclarations.getBaseIri()); + } + + @Test + public void setBaseIri_redeclareSameBase_succeeds() { + prefixDeclarations.setBaseIri(BASE); + assertEquals(BASE, prefixDeclarations.getBaseIri()); + prefixDeclarations.setBaseIri(BASE); + assertEquals(BASE, prefixDeclarations.getBaseIri()); + } + + @Test + public void absolutizeIri_noBase_identical() throws PrefixDeclarationException { + assertEquals(RELATIVE, prefixDeclarations.absolutizeIri(RELATIVE)); + } + + @Test + public void absolutizeIri_base_absoluteIri() throws PrefixDeclarationException { + prefixDeclarations.setBaseIri(BASE); + assertEquals(BASE + RELATIVE, prefixDeclarations.absolutizeIri(RELATIVE)); + } + + @Test + public void absolutizeIri_absoluteIri_identical() throws PrefixDeclarationException { + assertEquals(BASE, prefixDeclarations.absolutizeIri(BASE)); + } + + @Test(expected = PrefixDeclarationException.class) + public void resolvePrefixedName_undeclaredPrefix_throws() throws PrefixDeclarationException { + prefixDeclarations.resolvePrefixedName("eg:" + RELATIVE); + } + + @Test + public void resolvePrefixedName_knownPrefix_succeeds() throws PrefixDeclarationException { + prefixDeclarations.setPrefixIri("eg:", BASE); + assertEquals(BASE + RELATIVE, prefixDeclarations.resolvePrefixedName("eg:" + RELATIVE)); + } + + @Test + public void resolvePrefixedName_unresolveAbsoluteIri_doesRoundTrip() throws PrefixDeclarationException { + String prefix = "eg:"; + prefixDeclarations.setPrefixIri(prefix, BASE); + String resolved = BASE + RELATIVE; + String unresolved = prefixDeclarations.unresolveAbsoluteIri(resolved, false); + assertEquals(resolved, prefixDeclarations.resolvePrefixedName(unresolved)); + } + + @Test + public void setPrefixIri_redeclarePrefix_succeeds() throws PrefixDeclarationException { + prefixDeclarations.setPrefixIri("eg:", BASE); + prefixDeclarations.setPrefixIri("eg:", MORE_SPECIFIC); + assertEquals(BASE, prefixDeclarations.getPrefixIri("eg:")); + assertEquals(2, StreamSupport.stream(prefixDeclarations.spliterator(), false).count()); + } + + @Test + public void clearPrefix_succeeds() throws PrefixDeclarationException { + prefixDeclarations.setPrefixIri("eg:", BASE); + prefixDeclarations.setPrefixIri("another:", MORE_SPECIFIC); + prefixDeclarations.clear(); + assertEquals(0, StreamSupport.stream(prefixDeclarations.spliterator(), false).count()); + } + + @Test + public void setPrefixIri_setSamePrefix_succeeds() throws PrefixDeclarationException { + prefixDeclarations.setPrefixIri("eg:", BASE); + prefixDeclarations.setPrefixIri("eg:", BASE); + assertEquals(BASE, prefixDeclarations.getPrefixIri("eg:")); + assertEquals(1, StreamSupport.stream(prefixDeclarations.spliterator(), false).count()); + } + + @Test + public void getFreshPrefix_registeredPrefix_returnsFreshPrefix() throws PrefixDeclarationException { + String prefix = "rw_gen"; + prefixDeclarations.setPrefixIri(prefix + "0:", BASE + "generated/"); + prefixDeclarations.setPrefixIri("eg:", BASE); + prefixDeclarations.setPrefixIri("eg:", MORE_SPECIFIC); + + assertEquals(MORE_SPECIFIC, prefixDeclarations.getPrefixIri(prefix + "1:")); + } + + @Test + public void mergingPrefixDeclarationRegistry_constructor_succeeds() throws PrefixDeclarationException { + this.prefixDeclarations.setPrefixIri("eg:", MORE_SPECIFIC); + MergingPrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry( + this.prefixDeclarations); + assertEquals(MORE_SPECIFIC, prefixDeclarations.getPrefixIri("eg:")); + } + + @Test + public void mergePrefixDeclarations_conflictingPrefixName_renamesConflictingPrefixName() + throws PrefixDeclarationException { + this.prefixDeclarations.setPrefixIri("eg:", BASE); + PrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); + prefixDeclarations.setPrefixIri("eg:", MORE_SPECIFIC); + this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); + assertEquals(BASE, this.prefixDeclarations.getPrefixIri("eg:")); + assertEquals(MORE_SPECIFIC, this.prefixDeclarations.getPrefixIri("rw_gen0:")); + } + + @Test + public void unresolveAbsoluteIri_default_identical() { + assertEquals(BASE, prefixDeclarations.unresolveAbsoluteIri(BASE, false)); + } + + @Test + public void unresolveAbsoluteIri_declaredPrefix_succeeds() { + assertEquals(MORE_SPECIFIC, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC, false)); + prefixDeclarations.setPrefixIri("eg:", BASE); + assertEquals("eg:example/", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC, false)); + } + + @Test + public void unresolveAbsoluteIri_unrelatedPrefix_identical() { + prefixDeclarations.setPrefixIri("eg:", UNRELATED); + assertEquals(MORE_SPECIFIC, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC, false)); + } + + @Test + public void unresolveAbsoluteIri_unrelatedAndRelatedPrefixes_succeeds() { + prefixDeclarations.setPrefixIri("ex:", UNRELATED); + prefixDeclarations.setPrefixIri("eg:", BASE); + assertEquals("eg:example/", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC, false)); + } + + @Test + public void unresolveAbsoluteIri_multipleMatchingPrefixes_longestMatchWins() { + prefixDeclarations.setPrefixIri("eg:", BASE); + prefixDeclarations.setPrefixIri("ex:", MORE_SPECIFIC); + assertEquals("ex:" + RELATIVE, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE, false)); + prefixDeclarations.setPrefixIri("er:", EVEN_MORE_SPECIFIC); + assertEquals("er:test", prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE, false)); + } + + @Test + public void unresolveAbsoluteIri_exactPrefixMatch_identical() { + prefixDeclarations.setPrefixIri("eg:", BASE); + assertEquals(BASE, prefixDeclarations.unresolveAbsoluteIri(BASE, false)); + } + + @Test + public void unresolveAbsoluteIri_baseIsMoreSpecific_baseWins() { + prefixDeclarations.setBaseIri(MORE_SPECIFIC); + prefixDeclarations.setPrefixIri("eg:", BASE); + assertEquals(RELATIVE, prefixDeclarations.unresolveAbsoluteIri(MORE_SPECIFIC + RELATIVE, false)); + } + + @Test + public void unresolveAbsoluteIri_resolvePrefixedName_doesRoundTrip() throws PrefixDeclarationException { + String prefix = "eg:"; + prefixDeclarations.setPrefixIri(prefix, BASE); + String unresolved = prefix + RELATIVE; + String resolved = prefixDeclarations.resolvePrefixedName(unresolved); + assertEquals(unresolved, prefixDeclarations.unresolveAbsoluteIri(resolved, false)); + } + + @Test + public void unresolveAbsoluteIri_relativeIriAfterMergeWithNewBase_staysRelative() + throws PrefixDeclarationException { + String relativeIri = this.prefixDeclarations.absolutizeIri(RELATIVE); + PrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); + prefixDeclarations.setBaseIri(BASE); + this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); + assertEquals(relativeIri, this.prefixDeclarations.unresolveAbsoluteIri(relativeIri, false)); + } + + @Test + public void unresolveAbsoluteIri_absoluteIriMergedOntoEmptyBase_staysAbsolute() throws PrefixDeclarationException { + assertEquals("", this.prefixDeclarations.getBaseIri()); // FIXME: why test this? + + PrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); + prefixDeclarations.setBaseIri(BASE); + String absoluteIri = prefixDeclarations.absolutizeIri(RELATIVE); + this.prefixDeclarations.mergePrefixDeclarations(prefixDeclarations); + String resolvedIri = this.prefixDeclarations.unresolveAbsoluteIri(absoluteIri, false); + + assertNotEquals(RELATIVE, resolvedIri); + assertEquals("rw_gen0:" + RELATIVE, resolvedIri); + } + + @Test + public void unresolveAbsoluteIri_relativeIriAfterClear_succeeds() throws PrefixDeclarationException { + prefixDeclarations.clear(); + assertEquals(RELATIVE, prefixDeclarations.unresolveAbsoluteIri(RELATIVE, true)); + } +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImplTest.java new file mode 100644 index 000000000..475f1ab8b --- /dev/null +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/NegativeLiteralImplTest.java @@ -0,0 +1,156 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertTrue; + +import java.util.Arrays; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.NegativeLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.NegativeLiteralImpl; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.rulewerk.core.model.implementation.PredicateImpl; + +public class NegativeLiteralImplTest { + + @Test + public void testGetters() { + final Variable x = Expressions.makeUniversalVariable("X"); + final Variable y = Expressions.makeUniversalVariable("Y"); + final Constant c = Expressions.makeAbstractConstant("c"); + final Constant d = Expressions.makeAbstractConstant("d"); + final NegativeLiteral atomP = Expressions.makeNegativeLiteral("p", x, c, d, y); + final NegativeLiteral atomQ = Expressions.makeNegativeLiteral("q", c, d); + + assertEquals("p", atomP.getPredicate().getName()); + assertEquals(atomP.getArguments().size(), atomP.getPredicate().getArity()); + + assertEquals(Arrays.asList(x, c, d, y), atomP.getArguments()); + + assertEquals("q", atomQ.getPredicate().getName()); + assertEquals(atomQ.getArguments().size(), atomQ.getPredicate().getArity()); + + assertEquals(Arrays.asList(c, d), atomQ.getArguments()); + + assertTrue(atomP.isNegated()); + assertTrue(atomQ.isNegated()); + } + + @Test + public void testEquals() { + final Variable x = Expressions.makeUniversalVariable("X"); + final Constant c = Expressions.makeAbstractConstant("c"); + + final Predicate predicateP = new PredicateImpl("p", 2); + final Predicate predicateQ = new PredicateImpl("q", 2); + + final Literal atom1 = Expressions.makeNegativeLiteral("p", Arrays.asList(x, c)); + final Literal atom2 = Expressions.makeNegativeLiteral("p", x, c); + final Literal atom3 = new NegativeLiteralImpl(predicateP, Arrays.asList(x, c)); + final Literal atom4 = new NegativeLiteralImpl(predicateQ, Arrays.asList(x, c)); + final Literal atom5 = new NegativeLiteralImpl(predicateP, Arrays.asList(c, x)); + + assertEquals(atom1, atom1); + assertEquals(atom1, atom2); + assertEquals(atom1, atom3); + assertEquals(atom1.hashCode(), atom1.hashCode()); + assertNotEquals(atom4, atom1); + assertNotEquals(atom4.hashCode(), atom1.hashCode()); + assertNotEquals(atom5, atom1); + assertNotEquals(atom5.hashCode(), atom1.hashCode()); + assertFalse(atom1.equals(null)); + assertFalse(atom1.equals(c)); + + assertNotEquals(atom1, new PositiveLiteralImpl(atom1.getPredicate(), atom1.getArguments())); + assertNotEquals(atom2, new PositiveLiteralImpl(atom2.getPredicate(), atom2.getArguments())); + assertNotEquals(atom3, new PositiveLiteralImpl(atom3.getPredicate(), atom3.getArguments())); + assertNotEquals(atom4, new PositiveLiteralImpl(atom4.getPredicate(), atom4.getArguments())); + assertNotEquals(atom5, new PositiveLiteralImpl(atom5.getPredicate(), atom5.getArguments())); + } + + @Test(expected = NullPointerException.class) + public void termsNotNull() { + final Predicate predicate1 = Expressions.makePredicate("p", 1); + new NegativeLiteralImpl(predicate1, null); + } + + @Test(expected = IllegalArgumentException.class) + public void termsNoNullElements() { + final Predicate predicate1 = Expressions.makePredicate("p", 1); + final Variable x = Expressions.makeUniversalVariable("X"); + new NegativeLiteralImpl(predicate1, Arrays.asList(x, null)); + } + + @Test(expected = IllegalArgumentException.class) + public void termsNonEmpty() { + Expressions.makeNegativeLiteral("p"); + } + + @Test(expected = NullPointerException.class) + public void predicateNotNull() { + final Predicate nullPredicate = null; + Expressions.makeNegativeLiteral(nullPredicate, Expressions.makeAbstractConstant("c")); + } + + @Test(expected = NullPointerException.class) + public void predicateNameNotNull() { + final String nullPredicateName = null; + Expressions.makeNegativeLiteral(nullPredicateName, Expressions.makeAbstractConstant("c")); + } + + @Test(expected = IllegalArgumentException.class) + public void predicateNameNotEmpty() { + Expressions.makeNegativeLiteral("", Expressions.makeAbstractConstant("c")); + } + + @Test(expected = IllegalArgumentException.class) + public void predicateNameNotWhitespace() { + Expressions.makeNegativeLiteral(" ", Expressions.makeAbstractConstant("c")); + } + + @Test(expected = IllegalArgumentException.class) + public void termSizeMatchesPredicateArity() { + final Predicate predicateArity1 = Expressions.makePredicate("p", 1); + Expressions.makeNegativeLiteral(predicateArity1, Expressions.makeAbstractConstant("c"), + Expressions.makeUniversalVariable("X")); + } + + @Test + public void negativeLiteralTostringTest() { + final Variable x = Expressions.makeUniversalVariable("X"); + final Constant c = Expressions.makeAbstractConstant("c"); + final Predicate predicateP = new PredicateImpl("p", 2); + final Literal atom2 = Expressions.makeNegativeLiteral("p", x, c); + final Literal atom3 = new NegativeLiteralImpl(predicateP, Arrays.asList(x, c)); + assertEquals("~p(?X, c)", atom2.toString()); + assertEquals("~p(?X, c)", atom3.toString()); + + } +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImplTest.java new file mode 100644 index 000000000..c0613996c --- /dev/null +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/PositiveLiteralImplTest.java @@ -0,0 +1,155 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; + +import java.util.Arrays; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.NegativeLiteralImpl; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.rulewerk.core.model.implementation.PredicateImpl; + +public class PositiveLiteralImplTest { + + @Test + public void testGetters() { + final Variable x = Expressions.makeUniversalVariable("X"); + final Variable y = Expressions.makeUniversalVariable("Y"); + final Constant c = Expressions.makeAbstractConstant("c"); + final Constant d = Expressions.makeAbstractConstant("d"); + final PositiveLiteral atomP = Expressions.makePositiveLiteral("p", x, c, d, y); + final PositiveLiteral atomQ = Expressions.makePositiveLiteral("q", c, d); + + assertEquals("p", atomP.getPredicate().getName()); + assertEquals(atomP.getArguments().size(), atomP.getPredicate().getArity()); + + assertEquals(Arrays.asList(x, c, d, y), atomP.getArguments()); + + assertEquals("q", atomQ.getPredicate().getName()); + assertEquals(atomQ.getArguments().size(), atomQ.getPredicate().getArity()); + + assertEquals(Arrays.asList(c, d), atomQ.getArguments()); + + assertFalse(atomP.isNegated()); + assertFalse(atomQ.isNegated()); + } + + @Test + public void testEquals() { + final Variable x = Expressions.makeUniversalVariable("X"); + final Constant c = Expressions.makeAbstractConstant("c"); + + final Predicate predicateP = new PredicateImpl("p", 2); + final Predicate predicateQ = new PredicateImpl("q", 2); + + final Literal atom1 = Expressions.makePositiveLiteral("p", Arrays.asList(x, c)); + final Literal atom2 = Expressions.makePositiveLiteral("p", x, c); + final Literal atom3 = new PositiveLiteralImpl(predicateP, Arrays.asList(x, c)); + final Literal atom4 = new PositiveLiteralImpl(predicateQ, Arrays.asList(x, c)); + final Literal atom5 = new PositiveLiteralImpl(predicateP, Arrays.asList(c, x)); + + assertEquals(atom1, atom1); + assertEquals(atom1, atom2); + assertEquals(atom1, atom3); + assertEquals(atom1.hashCode(), atom1.hashCode()); + assertNotEquals(atom4, atom1); + assertNotEquals(atom4.hashCode(), atom1.hashCode()); + assertNotEquals(atom5, atom1); + assertNotEquals(atom5.hashCode(), atom1.hashCode()); + assertFalse(atom1.equals(null)); + assertFalse(atom1.equals(c)); + + assertNotEquals(atom1, new NegativeLiteralImpl(atom1.getPredicate(), atom1.getArguments())); + assertNotEquals(atom2, new NegativeLiteralImpl(atom2.getPredicate(), atom2.getArguments())); + assertNotEquals(atom3, new NegativeLiteralImpl(atom3.getPredicate(), atom3.getArguments())); + assertNotEquals(atom4, new NegativeLiteralImpl(atom4.getPredicate(), atom4.getArguments())); + assertNotEquals(atom5, new NegativeLiteralImpl(atom5.getPredicate(), atom5.getArguments())); + } + + @Test(expected = NullPointerException.class) + public void termsNotNull() { + final Predicate predicate1 = Expressions.makePredicate("p", 1); + new PositiveLiteralImpl(predicate1, null); + } + + @Test(expected = IllegalArgumentException.class) + public void termsNoNullElements() { + final Predicate predicate1 = Expressions.makePredicate("p", 1); + final Variable x = Expressions.makeUniversalVariable("X"); + new PositiveLiteralImpl(predicate1, Arrays.asList(x, null)); + } + + @Test(expected = IllegalArgumentException.class) + public void termsNonEmpty() { + Expressions.makePositiveLiteral("p"); + } + + @Test(expected = NullPointerException.class) + public void predicateNotNull() { + final Predicate nullPredicate = null; + Expressions.makePositiveLiteral(nullPredicate, Expressions.makeAbstractConstant("c")); + } + + @Test(expected = NullPointerException.class) + public void predicateNameNotNull() { + final String nullPredicateName = null; + Expressions.makePositiveLiteral(nullPredicateName, Expressions.makeAbstractConstant("c")); + } + + @Test(expected = IllegalArgumentException.class) + public void predicateNameNotEmpty() { + Expressions.makePositiveLiteral("", Expressions.makeAbstractConstant("c")); + } + + @Test(expected = IllegalArgumentException.class) + public void predicateNameNotWhitespace() { + Expressions.makePositiveLiteral(" ", Expressions.makeAbstractConstant("c")); + } + + @Test(expected = IllegalArgumentException.class) + public void termSizeMatchesPredicateArity() { + final Predicate predicateArity1 = Expressions.makePredicate("p", 1); + Expressions.makePositiveLiteral(predicateArity1, Expressions.makeAbstractConstant("c"), + Expressions.makeUniversalVariable("X")); + } + + @Test + public void positiveLiteralTostringTest() { + final Variable x = Expressions.makeUniversalVariable("X"); + final Constant c = Expressions.makeAbstractConstant("c"); + final Predicate predicateP = new PredicateImpl("p", 2); + final Literal atom2 = Expressions.makePositiveLiteral("p", x, c); + final Literal atom3 = new PositiveLiteralImpl(predicateP, Arrays.asList(x, c)); + assertEquals("p(?X, c)", atom2.toString()); + assertEquals("p(?X, c)", atom3.toString()); + + } + +} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImplTest.java similarity index 80% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImplTest.java index 30862eb5e..f77d4f24f 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/PredicateImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/PredicateImplTest.java @@ -1,76 +1,82 @@ -package org.semanticweb.vlog4j.core.model; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; - -import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; - -public class PredicateImplTest { - - @Test - public void testEquals() { - final Predicate p1 = new PredicateImpl("p", 1); - final Predicate p1too = Expressions.makePredicate("p", 1); - final Predicate p2 = new PredicateImpl("p", 2); - final Predicate q1 = new PredicateImpl("q", 1); - - assertEquals(p1, p1); - assertEquals(p1too, p1); - assertNotEquals(p2, p1); - assertNotEquals(q1, p1); - assertNotEquals(p2.hashCode(), p1.hashCode()); - assertNotEquals(q1.hashCode(), p1.hashCode()); - assertFalse(p1.equals(null)); // written like this for recording coverage properly - assertFalse(p1.equals("p")); // written like this for recording coverage properly - } - - @Test(expected = NullPointerException.class) - public void predicateNameNotNull() { - new PredicateImpl(null, 2); - } - - @Test(expected = IllegalArgumentException.class) - public void predicateNameNotEmpty() { - new PredicateImpl("", 1); - } - - @Test(expected = IllegalArgumentException.class) - public void predicateNameNotWhitespace() { - new PredicateImpl(" ", 1); - } - - @Test(expected = IllegalArgumentException.class) - public void arityNegative() { - new PredicateImpl("p", -1); - } - - @Test(expected = IllegalArgumentException.class) - public void arityZero() { - new PredicateImpl("p", 0); - } - -} +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.PredicateImpl; + +public class PredicateImplTest { + + @Test + public void testEquals() { + final Predicate p1 = new PredicateImpl("p", 1); + final Predicate p1too = Expressions.makePredicate("p", 1); + final Predicate p2 = new PredicateImpl("p", 2); + final Predicate q1 = new PredicateImpl("q", 1); + + assertEquals(p1, p1); + assertEquals(p1too, p1); + assertNotEquals(p2, p1); + assertNotEquals(q1, p1); + assertNotEquals(p2.hashCode(), p1.hashCode()); + assertNotEquals(q1.hashCode(), p1.hashCode()); + assertFalse(p1.equals(null)); // written like this for recording coverage properly + assertFalse(p1.equals("p")); // written like this for recording coverage properly + } + + @Test(expected = NullPointerException.class) + public void predicateNameNotNull() { + new PredicateImpl(null, 2); + } + + @Test(expected = IllegalArgumentException.class) + public void predicateNameNotEmpty() { + new PredicateImpl("", 1); + } + + @Test(expected = IllegalArgumentException.class) + public void predicateNameNotWhitespace() { + new PredicateImpl(" ", 1); + } + + @Test(expected = IllegalArgumentException.class) + public void arityNegative() { + new PredicateImpl("p", -1); + } + + @Test(expected = IllegalArgumentException.class) + public void arityZero() { + new PredicateImpl("p", 0); + } + + @Test + public void predicateToStringTest() { + final Predicate p1 = new PredicateImpl("p", 1); + assertEquals("p[1]", p1.toString()); + } + +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/RuleImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/RuleImplTest.java new file mode 100644 index 000000000..6d234958c --- /dev/null +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/RuleImplTest.java @@ -0,0 +1,176 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; + +import java.util.Arrays; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.NegativeLiteral; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.ConjunctionImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.RuleImpl; + +public class RuleImplTest { + + @Test + public void testGetters() { + final Variable x = Expressions.makeUniversalVariable("X"); + final Variable y = Expressions.makeExistentialVariable("Y"); + final Variable z = Expressions.makeUniversalVariable("Z"); + final Constant c = Expressions.makeAbstractConstant("c"); + final Constant d = Expressions.makeAbstractConstant("d"); + final Literal atom1 = Expressions.makePositiveLiteral("p", x, c); + final Literal atom2 = Expressions.makePositiveLiteral("p", x, z); + final PositiveLiteral atom3 = Expressions.makePositiveLiteral("q", x, y); + final PositiveLiteral atom4 = Expressions.makePositiveLiteral("r", x, d); + final Conjunction body = Expressions.makeConjunction(atom1, atom2); + final Conjunction head = Expressions.makePositiveConjunction(atom3, atom4); + final Rule rule = Expressions.makeRule(head, body); + + assertEquals(body, rule.getBody()); + assertEquals(head, rule.getHead()); + } + + @Test + public void testEquals() { + final Variable x = Expressions.makeUniversalVariable("X"); + final Variable y = Expressions.makeExistentialVariable("Y"); + final Variable z = Expressions.makeUniversalVariable("Z"); + final Constant c = Expressions.makeAbstractConstant("c"); + + final PositiveLiteral atom1 = Expressions.makePositiveLiteral("p", x, c); + final PositiveLiteral atom2 = Expressions.makePositiveLiteral("p", x, z); + final PositiveLiteral headAtom1 = Expressions.makePositiveLiteral("q", x, y); + + final Conjunction bodyLiterals = Expressions.makeConjunction(atom1, atom2); + final Conjunction headPositiveLiterals = Expressions.makePositiveConjunction(headAtom1); + + final Conjunction bodyPositiveLiterals = Expressions.makePositiveConjunction(atom1, atom2); + + final Rule rule1 = new RuleImpl(headPositiveLiterals, bodyLiterals); + final Rule rule2 = Expressions.makeRule(headAtom1, atom1, atom2); + + final Rule rule6 = Expressions.makeRule(headAtom1, atom1, atom2); + final Rule rule7 = Expressions.makeRule(headAtom1, atom1, atom2); + final Rule rule8 = Expressions.makePositiveLiteralsRule(headPositiveLiterals, bodyPositiveLiterals); + + assertEquals(rule1, rule1); + assertEquals(rule2, rule1); + assertEquals(rule2.hashCode(), rule1.hashCode()); + + assertEquals(rule6, rule1); + assertEquals(rule6.hashCode(), rule1.hashCode()); + assertEquals(rule7, rule1); + assertEquals(rule7.hashCode(), rule1.hashCode()); + assertEquals(rule8, rule1); + assertEquals(rule8.hashCode(), rule1.hashCode()); + + final Rule rule4 = new RuleImpl(bodyPositiveLiterals, bodyLiterals); + final Rule rule5 = new RuleImpl(bodyPositiveLiterals, bodyLiterals); + + assertNotEquals(rule4, rule1); + assertNotEquals(rule5, rule1); + assertFalse(rule1.equals(null)); + assertFalse(rule1.equals(c)); + + } + + @Test(expected = IllegalArgumentException.class) + public void bodyNonEmpty() { + Expressions.makeRule(Expressions.makePositiveLiteral("p", Expressions.makeUniversalVariable("X"))); + } + + @Test(expected = NullPointerException.class) + public void bodyNotNull() { + final Conjunction head = Expressions + .makePositiveConjunction(Expressions.makePositiveLiteral("p", Expressions.makeUniversalVariable("X"))); + Expressions.makeRule(head, null); + } + + @Test(expected = IllegalArgumentException.class) + public void headNonEmpty() { + final Literal literal = Expressions.makePositiveLiteral("p", Expressions.makeUniversalVariable("X")); + final Conjunction body = Expressions.makeConjunction(literal); + Expressions.makeRule(Expressions.makePositiveConjunction(), body); + } + + @Test(expected = NullPointerException.class) + public void headNotNull() { + final Literal literal = Expressions.makePositiveLiteral("p", Expressions.makeUniversalVariable("X")); + final Conjunction body = Expressions.makeConjunction(literal); + Expressions.makeRule(null, body); + } + + @Test(expected = IllegalArgumentException.class) + public void noExistentialInBody() { + final Literal literal1 = Expressions.makePositiveLiteral("p", Expressions.makeExistentialVariable("X")); + final PositiveLiteral literal2 = Expressions.makePositiveLiteral("q", Expressions.makeUniversalVariable("Y")); + Expressions.makeRule(literal2, literal1); + } + + @Test(expected = IllegalArgumentException.class) + public void noUnsafeVariables() { + final PositiveLiteral literal1 = Expressions.makePositiveLiteral("p", Expressions.makeUniversalVariable("X")); + final Literal literal2 = Expressions.makePositiveLiteral("q", Expressions.makeUniversalVariable("Y")); + Expressions.makeRule(literal1, literal2); + } + + @Test + public void ruleToStringTest() { + final Variable x = Expressions.makeUniversalVariable("X"); + final Variable y = Expressions.makeExistentialVariable("Y"); + final Variable z = Expressions.makeUniversalVariable("Z"); + final Variable y2 = Expressions.makeUniversalVariable("Y"); + final Constant d = Expressions.makeAbstractConstant("d"); + final Constant c = Expressions.makeAbstractConstant("c"); + final LanguageStringConstantImpl s = new LanguageStringConstantImpl("Test", "en"); + final PositiveLiteral atom1 = Expressions.makePositiveLiteral("p", x, c); + final PositiveLiteral atom2 = Expressions.makePositiveLiteral("p", x, z); + final PositiveLiteral headAtom1 = Expressions.makePositiveLiteral("q", x, y); + final PositiveLiteral positiveLiteral1 = Expressions.makePositiveLiteral("p", x, c); + final PositiveLiteral positiveLiteral2 = Expressions.makePositiveLiteral("p", y2, x); + final PositiveLiteral positiveLiteral3 = Expressions.makePositiveLiteral("q", x, d); + final NegativeLiteral NegativeLiteral = Expressions.makeNegativeLiteral("r", x, d); + final PositiveLiteral PositiveLiteral4 = Expressions.makePositiveLiteral("s", c, s); + final List LiteralList = Arrays.asList(positiveLiteral1, positiveLiteral2, positiveLiteral3, + NegativeLiteral, PositiveLiteral4); + final Conjunction bodyLiterals = Expressions.makeConjunction(atom1, atom2); + final Conjunction headPositiveLiterals = Expressions.makePositiveConjunction(headAtom1); + final Conjunction bodyConjunction = new ConjunctionImpl<>(LiteralList); + final Rule rule1 = new RuleImpl(headPositiveLiterals, bodyLiterals); + final Rule rule2 = new RuleImpl(headPositiveLiterals, bodyConjunction); + assertEquals("q(?X, !Y) :- p(?X, c), p(?X, ?Z) .", rule1.toString()); + assertEquals("q(?X, !Y) :- p(?X, c), p(?Y, ?X), q(?X, d), ~r(?X, d), s(c, \"Test\"@en) .", rule2.toString()); + + } + +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/SerializerTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/SerializerTest.java new file mode 100644 index 000000000..977160aeb --- /dev/null +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/SerializerTest.java @@ -0,0 +1,299 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.IOException; +import java.io.StringWriter; +import java.io.Writer; +import java.net.URL; +import java.util.ArrayList; + +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.MergingPrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.implementation.Serializer; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; + +public class SerializerTest { + + static Term abstractConstant = Expressions.makeAbstractConstant("http://example.org/test"); + static Term abstractConstantShort = Expressions.makeAbstractConstant("c"); + static Term existentialVariable = Expressions.makeExistentialVariable("X"); + static Term universalVariable = Expressions.makeUniversalVariable("X"); + static Term languageStringConstant = Expressions.makeLanguageStringConstant("abc", "de"); + static Term datatypeConstantGeneral = Expressions.makeDatatypeConstant("abc", "http://example.org/test"); + static Term datatypeConstantString = Expressions.makeDatatypeConstant("abc", PrefixDeclarationRegistry.XSD_STRING); + static Term datatypeConstantInteger = Expressions.makeDatatypeConstant("123", + PrefixDeclarationRegistry.XSD_INTEGER); + static Term namedNull = new NamedNullImpl("n1"); + + static Predicate p1 = Expressions.makePredicate("p1", 1); + static Predicate p2 = Expressions.makePredicate("p2", 2); + static Predicate p3 = Expressions.makePredicate("p3", 3); + + static Fact fact = Expressions.makeFact(p1, abstractConstantShort); + static PositiveLiteral l1 = Expressions.makePositiveLiteral(p1, universalVariable); + static Literal l2 = Expressions.makePositiveLiteral(p2, universalVariable, abstractConstantShort); + static Rule rule = Expressions.makeRule(l1, l2, fact); + static Literal ln1 = Expressions.makeNegativeLiteral(p1, existentialVariable); + + StringWriter writer; + Serializer serializer; + + @Before + public void init() { + writer = new StringWriter(); + serializer = new Serializer(writer); + } + + private Serializer getThrowingSerializer() throws IOException { + Writer writerMock = Mockito.mock(Writer.class); + Mockito.doThrow(IOException.class).when(writerMock).write(Mockito.anyString()); + return new Serializer(writerMock); + } + + @Test + public void serializeDatatypeConstant() throws IOException { + serializer.writeTerm(datatypeConstantGeneral); + assertEquals("\"abc\"^^", writer.toString()); + } + + @Test + public void serializeDatatypeConstantString() throws IOException { + serializer.writeTerm(datatypeConstantString); + assertEquals("\"abc\"", writer.toString()); + } + + @Test + public void serializeDatatypeConstantInteger() throws IOException { + serializer.writeTerm(datatypeConstantInteger); + assertEquals("123", writer.toString()); + } + + @Test + public void serializeExistentialVariable() throws IOException { + serializer.writeTerm(existentialVariable); + assertEquals("!X", writer.toString()); + } + + @Test + public void serializeUniversalVariable() throws IOException { + serializer.writeTerm(universalVariable); + assertEquals("?X", writer.toString()); + } + + @Test + public void serializeLanguageStringConstant() throws IOException { + serializer.writeTerm(languageStringConstant); + assertEquals("\"abc\"@de", writer.toString()); + } + + @Test + public void serializeNamedNull() throws IOException { + serializer.writeTerm(namedNull); + assertEquals("_:n1", writer.toString()); + } + + @Test + public void serializeFact() throws IOException { + serializer.writeStatement(fact); + assertEquals("p1(c) .", writer.toString()); + assertEquals("p1(c) .", Serializer.getSerialization(serializer -> serializer.writeFact(fact))); + } + + @Test + public void serializeRule() throws IOException { + serializer.writeStatement(rule); + assertEquals("p1(?X) :- p2(?X, c), p1(c) .", writer.toString()); + } + + @Test + public void serializeCsvDataSourceDeclaration() throws IOException { + DataSourceDeclaration csvSourceDecl = new DataSourceDeclarationImpl(p1, new CsvFileDataSource("test.csv")); + serializer.writeStatement(csvSourceDecl); + assertEquals("@source p1[1]: load-csv(\"test.csv\") .", writer.toString()); + } + + @Test + public void serializeRdfDataSourceDeclaration() throws IOException { + DataSourceDeclaration rdfSourceDecl = new DataSourceDeclarationImpl(p3, new RdfFileDataSource("test.nt")); + serializer.writeStatement(rdfSourceDecl); + assertEquals("@source p3[3]: load-rdf(\"test.nt\") .", writer.toString()); + } + + @Test + public void serializeSparqlDataSourceDeclaration() throws IOException { + DataSourceDeclaration sparqlSourceDecl = new DataSourceDeclarationImpl(p1, + new SparqlQueryResultDataSource(new URL("http://example.org"), "var", "?var ")); + serializer.writeStatement(sparqlSourceDecl); + assertEquals("@source p1[1]: sparql(, \"var\", \"?var \") .", writer.toString()); + } + + @Test + public void serializePositiveLiteral() throws IOException { + serializer.writeLiteral(l1); + assertEquals("p1(?X)", writer.toString()); + } + + @Test + public void serializePositiveLiteralFromTerms() throws IOException { + serializer.writePositiveLiteral(l1.getPredicate(), l1.getArguments()); + assertEquals("p1(?X)", writer.toString()); + } + + @Test + public void serializeNegativeLiteral() throws IOException { + serializer.writeLiteral(ln1); + assertEquals("~p1(!X)", writer.toString()); + } + + @Test + public void serializeAbstractConstantWithPrefixDeclarations() throws IOException { + final MergingPrefixDeclarationRegistry prefixes = new MergingPrefixDeclarationRegistry(); + prefixes.setPrefixIri("eg:", "http://example.org/"); + Serializer prefSerializer = new Serializer(writer, prefixes); + + prefSerializer.writeTerm(abstractConstant); + assertEquals("eg:test", writer.toString()); + } + + @Test + public void serializeDatatypeConstantWithPrefixDeclarations() throws IOException { + final MergingPrefixDeclarationRegistry prefixes = new MergingPrefixDeclarationRegistry(); + prefixes.setPrefixIri("eg:", "http://example.org/"); + Serializer prefSerializer = new Serializer(writer, prefixes); + + prefSerializer.writeTerm(datatypeConstantGeneral); + assertEquals("\"abc\"^^eg:test", writer.toString()); + } + + @Test + public void serializePrefixDeclarations() throws IOException { + final MergingPrefixDeclarationRegistry prefixes = new MergingPrefixDeclarationRegistry(); + prefixes.setBaseIri("http://example.org/base"); + prefixes.setPrefixIri("eg:", "http://example.org/"); + Serializer prefSerializer = new Serializer(writer, prefixes); + + boolean result = prefSerializer.writePrefixDeclarationRegistry(prefixes); + assertEquals("@base .\n@prefix eg: .\n", writer.toString()); + assertTrue(result); + } + + @Test + public void serializeEmptyPrefixDeclarations() throws IOException { + final MergingPrefixDeclarationRegistry prefixes = new MergingPrefixDeclarationRegistry(); + Serializer prefSerializer = new Serializer(writer, prefixes); + + boolean result = prefSerializer.writePrefixDeclarationRegistry(prefixes); + assertEquals("", writer.toString()); + assertFalse(result); + } + + @Test + public void serializeCommand() throws IOException { + ArrayList arguments = new ArrayList<>(); + arguments.add(Argument.term(abstractConstant)); + arguments.add(Argument.positiveLiteral(fact)); + arguments.add(Argument.rule(rule)); + Command command = new Command("command", arguments); + + serializer.writeCommand(command); + assertEquals("@command p1(c) p1(?X) :- p2(?X, c), p1(c) .", writer.toString()); + } + + @Test + public void createThrowingSerializer_succeeds() throws IOException { + getThrowingSerializer(); + } + + @Test(expected = IOException.class) + public void serializeAbstractConstant_fails() throws IOException { + getThrowingSerializer().writeTerm(abstractConstant); + } + + @Test(expected = IOException.class) + public void serializeDatatypeConstant_fails() throws IOException { + getThrowingSerializer().writeTerm(datatypeConstantGeneral); + } + + @Test(expected = IOException.class) + public void serializeExistentialVariable_fails() throws IOException { + getThrowingSerializer().writeTerm(existentialVariable); + } + + @Test(expected = IOException.class) + public void serializeUniversalVariable_fails() throws IOException { + getThrowingSerializer().writeTerm(universalVariable); + } + + @Test(expected = IOException.class) + public void serializeLanguageStringConstant_fails() throws IOException { + getThrowingSerializer().writeTerm(languageStringConstant); + } + + @Test(expected = IOException.class) + public void serializeNamedNull_fails() throws IOException { + getThrowingSerializer().writeTerm(namedNull); + } + + @Test(expected = IOException.class) + public void serializeFact_fails() throws IOException { + getThrowingSerializer().writeStatement(fact); + } + + @Test(expected = IOException.class) + public void serializeRule_fails() throws IOException { + getThrowingSerializer().writeStatement(rule); + } + + @Test(expected = IOException.class) + public void serializeDataSourceDeclaration_fails() throws IOException { + DataSourceDeclaration csvSourceDecl = new DataSourceDeclarationImpl(p1, new CsvFileDataSource("test.csv")); + getThrowingSerializer().writeStatement(csvSourceDecl); + } + + @Test(expected = IOException.class) + public void serializePrefixDeclarations_fails() throws IOException { + final MergingPrefixDeclarationRegistry prefixes = new MergingPrefixDeclarationRegistry(); + prefixes.setBaseIri("http://example.org/base"); + prefixes.setPrefixIri("eg:", "http://example.org/"); + getThrowingSerializer().writePrefixDeclarationRegistry(prefixes); + } + +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermFactoryTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermFactoryTest.java new file mode 100644 index 000000000..7719f9efa --- /dev/null +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermFactoryTest.java @@ -0,0 +1,120 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.DatatypeConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.ExistentialVariableImpl; +import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.TermFactory; +import org.semanticweb.rulewerk.core.model.implementation.UniversalVariableImpl; + +public class TermFactoryTest { + + @Test + public void universalVariable_reused() { + TermFactory termFactory = new TermFactory(); + Term term1 = termFactory.makeUniversalVariable("X"); + Term term2 = termFactory.makeUniversalVariable("Y"); + Term term3 = termFactory.makeUniversalVariable("X"); + Term term4 = new UniversalVariableImpl("X"); + + assertNotEquals(term1, term2); + assertTrue(term1 == term3); + assertEquals(term1, term4); + } + + @Test + public void existentialVariable_reused() { + TermFactory termFactory = new TermFactory(); + Term term1 = termFactory.makeExistentialVariable("X"); + Term term2 = termFactory.makeExistentialVariable("Y"); + Term term3 = termFactory.makeExistentialVariable("X"); + Term term4 = new ExistentialVariableImpl("X"); + + assertNotEquals(term1, term2); + assertTrue(term1 == term3); + assertEquals(term1, term4); + } + + @Test + public void abstractConstant_reused() { + TermFactory termFactory = new TermFactory(); + Term term1 = termFactory.makeAbstractConstant("X"); + Term term2 = termFactory.makeAbstractConstant("Y"); + Term term3 = termFactory.makeAbstractConstant("X"); + Term term4 = new AbstractConstantImpl("X"); + + assertNotEquals(term1, term2); + assertTrue(term1 == term3); + assertEquals(term1, term4); + } + + @Test + public void predicate_reused() { + TermFactory termFactory = new TermFactory(); + Predicate pred1 = termFactory.makePredicate("p", 1); + Predicate pred2 = termFactory.makePredicate("q", 1); + Predicate pred3 = termFactory.makePredicate("p", 2); + Predicate pred4 = termFactory.makePredicate("p", 1); + + assertNotEquals(pred1, pred2); + assertNotEquals(pred1, pred3); + assertTrue(pred1 == pred4); + } + + @Test + public void datatypeConstant_succeeds() { + TermFactory termFactory = new TermFactory(); + Term term1 = termFactory.makeDatatypeConstant("abc", "http://test"); + Term term2 = new DatatypeConstantImpl("abc", "http://test"); + + assertEquals(term1, term2); + } + + @Test + public void languageConstant_succeeds() { + TermFactory termFactory = new TermFactory(); + Term term1 = termFactory.makeLanguageStringConstant("abc", "de"); + Term term2 = new LanguageStringConstantImpl("abc", "de"); + + assertEquals(term1, term2); + } + + @Test + public void lruCache_works() { + TermFactory.SimpleLruMap map = new TermFactory.SimpleLruMap<>(1, 3); + map.put("a", "test"); + map.put("b", "test"); + map.put("c", "test"); + map.put("c", "test2"); + + assertTrue(map.containsKey("b")); + assertTrue(map.containsKey("c")); + assertFalse(map.containsKey("a")); + } + +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermImplTest.java new file mode 100644 index 000000000..a2fb5e0a0 --- /dev/null +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/model/implementation/TermImplTest.java @@ -0,0 +1,210 @@ +package org.semanticweb.rulewerk.core.model.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.TermType; +import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.DatatypeConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.ExistentialVariableImpl; +import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.implementation.UniversalVariableImpl; + +public class TermImplTest { + + @Test + public void abstractConstantImplEqualityTest() { + Term c = new AbstractConstantImpl("c"); + Term ctoo = new AbstractConstantImpl("c"); + Term a = new AbstractConstantImpl("a"); + Term v = new UniversalVariableImpl("c"); + + assertEquals(c, c); + assertEquals(ctoo, c); + assertNotEquals(a, c); + assertNotEquals(v, c); + assertEquals(c.hashCode(), ctoo.hashCode()); + assertFalse(c.equals(null)); // written like this for recording coverage properly + } + + @Test + public void datatypeConstantImplEqualityTest() { + Term c = new DatatypeConstantImpl("c", "http://example.org/mystring"); + Term ctoo = new DatatypeConstantImpl("c", "http://example.org/mystring"); + Term a = new DatatypeConstantImpl("a", "http://example.org/mystring"); + Term b = new DatatypeConstantImpl("c", "http://example.org/mystring2"); + Term v = new UniversalVariableImpl("c"); + + assertEquals(c, c); + assertEquals(ctoo, c); + assertNotEquals(a, c); + assertNotEquals(b, c); + assertNotEquals(v, c); + assertEquals(c.hashCode(), ctoo.hashCode()); + assertFalse(c.equals(null)); // written like this for recording coverage properly + } + + @Test + public void languageStringConstantImplEqualityTest() { + Term c = new LanguageStringConstantImpl("Test", "en"); + Term ctoo = new LanguageStringConstantImpl("Test", "en"); + Term a = new LanguageStringConstantImpl("Test2", "en"); + Term b = new LanguageStringConstantImpl("Test", "de"); + Term v = new UniversalVariableImpl("c"); + + assertEquals(c, c); + assertEquals(ctoo, c); + assertNotEquals(a, c); + assertNotEquals(b, c); + assertNotEquals(v, c); + assertEquals(c.hashCode(), ctoo.hashCode()); + assertFalse(c.equals(null)); // written like this for recording coverage properly + assertFalse(c.equals("c")); // written like this for recording coverage properly + } + + @Test + public void abstractConstantGetterTest() { + Term c = new AbstractConstantImpl("c"); + assertEquals("c", c.getName()); + assertEquals(TermType.ABSTRACT_CONSTANT, c.getType()); + } + + @Test + public void datatypeConstantGetterTest() { + DatatypeConstant c = new DatatypeConstantImpl("c", "http://example.org/type"); + assertEquals("c", c.getLexicalValue()); + assertEquals("http://example.org/type", c.getDatatype()); + assertEquals("\"c\"^^", c.getName()); + assertEquals(TermType.DATATYPE_CONSTANT, c.getType()); + } + + @Test + public void languageStringConstantGetterTest() { + LanguageStringConstant c = new LanguageStringConstantImpl("Test", "en"); + assertEquals("Test", c.getString()); + assertEquals("en", c.getLanguageTag()); + assertEquals("\"Test\"@en", c.getName()); + assertEquals(TermType.LANGSTRING_CONSTANT, c.getType()); + } + + @Test + public void universalVariableGetterTest() { + Term v = new UniversalVariableImpl("v"); + assertEquals("v", v.getName()); + assertEquals(TermType.UNIVERSAL_VARIABLE, v.getType()); + } + + @Test + public void existentialVariableGetterTest() { + Term v = new ExistentialVariableImpl("v"); + assertEquals("v", v.getName()); + assertEquals(TermType.EXISTENTIAL_VARIABLE, v.getType()); + } + + @Test + public void namedNullGetterTest() { + Term n = new NamedNullImpl("123"); + assertEquals("123", n.getName()); + assertEquals(TermType.NAMED_NULL, n.getType()); + } + + @Test + public void abstractConstantToStringTest() { + AbstractConstantImpl c = new AbstractConstantImpl("c"); + assertEquals("c", c.toString()); + } + + @Test + public void datatypeConstantToStringTest() { + DatatypeConstantImpl c = new DatatypeConstantImpl("c", PrefixDeclarationRegistry.XSD_STRING); + assertEquals("\"c\"", c.toString()); + assertEquals("\"c\"", c.getRdfLiteralString(true)); + assertEquals("\"c\"^^<" + PrefixDeclarationRegistry.XSD_STRING + ">", c.getRdfLiteralString(false)); + } + + @Test + public void languageStringConstantToStringTest() { + LanguageStringConstantImpl c = new LanguageStringConstantImpl("Test", "en"); + assertEquals("\"Test\"@en", c.toString()); + } + + @Test + public void universalVariableToStringTest() { + UniversalVariableImpl v = new UniversalVariableImpl("v"); + assertEquals("?v", v.toString()); + } + + @Test + public void existentialVariableToStringTest() { + ExistentialVariableImpl v = new ExistentialVariableImpl("v"); + assertEquals("!v", v.toString()); + } + + @Test + public void namedNullToStringTest() { + NamedNullImpl n = new NamedNullImpl("123"); + assertEquals("_:123", n.toString()); + } + + @Test(expected = NullPointerException.class) + public void constantNameNonNullTest() { + new AbstractConstantImpl((String) null); + } + + @Test(expected = IllegalArgumentException.class) + public void constantNameNonEmptyTest() { + new AbstractConstantImpl(""); + } + + @Test(expected = IllegalArgumentException.class) + public void constantNameNonWhitespaceTest() { + new AbstractConstantImpl(" "); + } + + @Test(expected = IllegalArgumentException.class) + public void languageTagNonEmptyTest() { + new LanguageStringConstantImpl("test", ""); + } + + @Test(expected = NullPointerException.class) + public void languageStringNameNonNull() { + new LanguageStringConstantImpl(null, ""); + } + + @Test(expected = IllegalArgumentException.class) + public void datatypeNonEmptyTest() { + new DatatypeConstantImpl("test", ""); + } + + @Test(expected = NullPointerException.class) + public void lexicalValueNonNull() { + new DatatypeConstantImpl(null, ""); + } + +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java new file mode 100644 index 000000000..6fa079bce --- /dev/null +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/KnowledgeBaseTest.java @@ -0,0 +1,168 @@ +package org.semanticweb.rulewerk.core.reasoner; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.IOException; +import java.io.StringWriter; +import java.net.URL; +import java.util.Arrays; + +import org.junit.Before; +import org.junit.Test; +import org.mockito.internal.util.collections.Sets; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.MergingPrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; + +public class KnowledgeBaseTest { + + private KnowledgeBase kb; + private final Fact fact1 = Expressions.makeFact("P", Expressions.makeAbstractConstant("c")); + private final Fact fact2 = Expressions.makeFact("P", Expressions.makeAbstractConstant("d")); + private final Fact fact3 = Expressions.makeFact("Q", Expressions.makeAbstractConstant("c")); + private final PositiveLiteral literal1 = Expressions.makePositiveLiteral("P", + Expressions.makeUniversalVariable("X")); + private final PositiveLiteral literal2 = Expressions.makePositiveLiteral("Q", + Expressions.makeUniversalVariable("X")); + private final Rule rule = Expressions.makeRule(literal1, literal2); + + @Before + public void initKB() { + this.kb = new KnowledgeBase(); + this.kb.addStatements(this.fact1, this.fact2, this.fact3); + } + + @Test + public void testDoRemoveStatementExistent() { + final boolean removed = this.kb.doRemoveStatement(this.fact1); + + assertTrue(removed); + assertEquals(Arrays.asList(this.fact2, this.fact3), this.kb.getFacts()); + assertEquals(Sets.newSet(this.fact2), this.kb.getFactsByPredicate().get(this.fact1.getPredicate())); + } + + @Test + public void testDoRemoveStatementOnlyExistentWithPredicate() { + final boolean removed = this.kb.doRemoveStatement(this.fact3); + + assertTrue(removed); + assertEquals(Arrays.asList(this.fact1, this.fact2), this.kb.getFacts()); + assertEquals(null, this.kb.getFactsByPredicate().get(this.fact3.getPredicate())); + } + + @Test + public void testDoRemoveStatementInexistent() { + final Fact fact = Expressions.makeFact("P", Expressions.makeAbstractConstant("e")); + final boolean removed = this.kb.doRemoveStatement(fact); + + assertFalse(removed); + assertEquals(Arrays.asList(this.fact1, this.fact2, this.fact3), this.kb.getFacts()); + assertEquals(Sets.newSet(this.fact1, this.fact2), this.kb.getFactsByPredicate().get(fact.getPredicate())); + + assertEquals(Sets.newSet(this.fact1, this.fact2), this.kb.getFactsByPredicate().get(this.fact1.getPredicate())); + assertEquals(Sets.newSet(this.fact1, this.fact2), this.kb.getFactsByPredicate().get(this.fact2.getPredicate())); + assertEquals(Sets.newSet(this.fact3), this.kb.getFactsByPredicate().get(this.fact3.getPredicate())); + } + + @Test + public void testDoRemoveStatementInexistentPredicate() { + + final Fact fact = Expressions.makeFact("R", Expressions.makeAbstractConstant("e")); + final boolean removed = this.kb.doRemoveStatement(fact); + + assertFalse(removed); + assertEquals(Arrays.asList(this.fact1, this.fact2, this.fact3), this.kb.getFacts()); + assertEquals(null, this.kb.getFactsByPredicate().get(fact.getPredicate())); + + assertEquals(Sets.newSet(this.fact1, this.fact2), this.kb.getFactsByPredicate().get(this.fact1.getPredicate())); + assertEquals(Sets.newSet(this.fact1, this.fact2), this.kb.getFactsByPredicate().get(this.fact2.getPredicate())); + assertEquals(Sets.newSet(this.fact3), this.kb.getFactsByPredicate().get(this.fact3.getPredicate())); + } + + @Test + public void getBase_default_hasEmptyBase() { + assertEquals("", this.kb.getBaseIri()); + } + + @Test(expected = PrefixDeclarationException.class) + public void getPrefix_defaultUndeclaredPrefix_throws() throws PrefixDeclarationException { + this.kb.getPrefixIri("ex:"); + } + + @Test(expected = PrefixDeclarationException.class) + public void resolvePrefixedName_defaultUndeclaredPrefix_throws() throws PrefixDeclarationException { + this.kb.resolvePrefixedName("ex:test"); + } + + @Test + public void mergePrefixDeclarations_merge_succeeds() throws PrefixDeclarationException { + String iri = "https://example.org/"; + MergingPrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); + prefixDeclarations.setPrefixIri("ex:", iri); + this.kb.mergePrefixDeclarations(prefixDeclarations); + assertEquals(this.kb.getPrefixIri("ex:"), iri); + assertEquals(this.kb.resolvePrefixedName("ex:test"), iri + "test"); + assertEquals(this.kb.unresolveAbsoluteIri(iri + "test"), "ex:test"); + } + + @Test + public void writeKnowledgeBase_justFacts_succeeds() throws IOException { + StringWriter writer = new StringWriter(); + this.kb.writeKnowledgeBase(writer); + assertEquals("P(c) .\nP(d) .\nQ(c) .\n", writer.toString()); + } + + @Test(expected = RulewerkRuntimeException.class) + public void writeKnowledgeBase_withBase_fails() throws IOException { + String baseIri = "https://example.org/"; + MergingPrefixDeclarationRegistry prefixDeclarations = new MergingPrefixDeclarationRegistry(); + prefixDeclarations.setBaseIri(baseIri); + this.kb.mergePrefixDeclarations(prefixDeclarations); + StringWriter writer = new StringWriter(); + this.kb.writeKnowledgeBase(writer); + //// This would be incorrect, since parsing this would lead to another KB + //// that uses IRIs like : + // assertEquals("@base <" + baseIri + "> .\nP(c) .\nP(d) .\nQ(c) .\n", + // writer.toString()); + } + + @Test + public void writeKnowledgeBase_alsoRuleAndDataSource_succeeds() throws IOException { + String sparqlIri = "https://example.org/sparql"; + String sparqlBgp = "?X ?p []"; + this.kb.addStatement(rule); + this.kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("S", 1), + new SparqlQueryResultDataSource(new URL(sparqlIri), "?X", sparqlBgp))); + + StringWriter writer = new StringWriter(); + this.kb.writeKnowledgeBase(writer); + assertEquals("@source S[1]: sparql(<" + sparqlIri + ">, \"?X\", \"" + sparqlBgp + + "\") .\n\nP(c) .\nP(d) .\nQ(c) .\n\nP(?X) :- Q(?X) .\n", writer.toString()); + } +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java new file mode 100644 index 000000000..119e8057d --- /dev/null +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/CsvFileDataSourceTest.java @@ -0,0 +1,52 @@ +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.io.IOException; + +import org.junit.Test; + +public class CsvFileDataSourceTest { + + private final String ntFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.nt"; + private final String csvFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.csv"; + private final String gzFile = csvFile + ".gz"; + + @Test(expected = NullPointerException.class) + public void testConstructorNullFile() throws IOException { + new CsvFileDataSource(null); + } + + @Test(expected = IllegalArgumentException.class) + public void testConstructorFalseExtension() throws IOException { + new CsvFileDataSource(ntFile); + } + + @Test + public void testConstructor() throws IOException { + final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(csvFile); + final CsvFileDataSource zippedCsvFileDataSource = new CsvFileDataSource(gzFile); + + FileDataSourceTestUtils.testConstructor(unzippedCsvFileDataSource, new File(csvFile).getName()); + FileDataSourceTestUtils.testConstructor(zippedCsvFileDataSource, new File(gzFile).getName()); + } +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java new file mode 100644 index 000000000..057ceba16 --- /dev/null +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/FileDataSourceTestUtils.java @@ -0,0 +1,52 @@ +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; + +import java.io.IOException; + +/** + * Utility class for reading from and writing to data source files. + * + * @author Christian Lewe + * @author Irina Dragoste + * + */ +public final class FileDataSourceTestUtils { + + public static final String INPUT_FOLDER = "src/test/data/input/"; + + /* + * This is a utility class. Therefore, it is best practice to do the following: + * (1) Make the class final, (2) make its constructor private, (3) make all its + * fields and methods static. This prevents the classes instantiation and + * inheritance. + */ + private FileDataSourceTestUtils() { + + } + + public static void testConstructor(final FileDataSource fileDataSource, final String expectedFileName) + throws IOException { + assertEquals(expectedFileName, fileDataSource.getName()); + } +} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultImplTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImplTest.java similarity index 73% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultImplTest.java rename to rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImplTest.java index adbad845e..38d57c214 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultImplTest.java +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/QueryResultImplTest.java @@ -1,60 +1,59 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; - -import java.util.Arrays; -import java.util.List; - -import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultImpl; - -public class QueryResultImplTest { - - @Test - public void testEquals() { - final Constant c1 = Expressions.makeConstant("C"); - final Constant c2 = Expressions.makeConstant("ddd"); - final List constantList = Arrays.asList(c1, c1, c2); - - final QueryResult queryResult1 = new QueryResultImpl(constantList); - final QueryResult queryResult2 = new QueryResultImpl(Arrays.asList(c1, c1, c2)); - final QueryResult queryResult3 = new QueryResultImpl(Arrays.asList(c1, c2, c1)); - - assertEquals(queryResult1, queryResult1); - assertEquals(queryResult2, queryResult1); - assertEquals(queryResult2.hashCode(), queryResult1.hashCode()); - assertNotEquals(queryResult3, queryResult1); - assertNotEquals(queryResult3.hashCode(), queryResult1.hashCode()); - assertNotEquals(new QueryResultImpl(null), queryResult1); - assertEquals(new QueryResultImpl(null), new QueryResultImpl(null)); - assertFalse(queryResult1.equals(null)); - assertFalse(queryResult1.equals(constantList)); - } - -} +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; + +import java.util.Arrays; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +public class QueryResultImplTest { + + @Test + public void testEquals() { + final Constant c1 = Expressions.makeAbstractConstant("C"); + final Constant c2 = Expressions.makeAbstractConstant("ddd"); + final List constantList = Arrays.asList(c1, c1, c2); + + final QueryResult queryResult1 = new QueryResultImpl(constantList); + final QueryResult queryResult2 = new QueryResultImpl(Arrays.asList(c1, c1, c2)); + final QueryResult queryResult3 = new QueryResultImpl(Arrays.asList(c1, c2, c1)); + + assertEquals(queryResult1, queryResult1); + assertEquals(queryResult2, queryResult1); + assertEquals(queryResult2.hashCode(), queryResult1.hashCode()); + assertNotEquals(queryResult3, queryResult1); + assertNotEquals(queryResult3.hashCode(), queryResult1.hashCode()); + assertNotEquals(new QueryResultImpl(null), queryResult1); + assertEquals(new QueryResultImpl(null), new QueryResultImpl(null)); + assertFalse(queryResult1.equals(null)); + assertFalse(queryResult1.equals(constantList)); + } + +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java new file mode 100644 index 000000000..3bb39bc8d --- /dev/null +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/RdfFileDataSourceTest.java @@ -0,0 +1,51 @@ +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.io.IOException; + +import org.junit.Test; + +public class RdfFileDataSourceTest { + + private final String unzippedRdfFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.nt"; + private final String zippedRdfFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.nt.gz"; + + @Test(expected = NullPointerException.class) + public void testConstructorNullFile() throws IOException { + new RdfFileDataSource(null); + } + + @Test(expected = IllegalArgumentException.class) + public void testConstructorFalseExtension() throws IOException { + new RdfFileDataSource(FileDataSourceTestUtils.INPUT_FOLDER + "file.csv"); + } + + @Test + public void testConstructor() throws IOException { + final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); + final RdfFileDataSource zippedRdfFileDataSource = new RdfFileDataSource(zippedRdfFile); + + FileDataSourceTestUtils.testConstructor(unzippedRdfFileDataSource, new File(unzippedRdfFile).getName()); + FileDataSourceTestUtils.testConstructor(zippedRdfFileDataSource, new File(zippedRdfFile).getName()); + } +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java new file mode 100644 index 000000000..d87cd495f --- /dev/null +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SkolemizationTest.java @@ -0,0 +1,102 @@ +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk Core Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import org.junit.Before; +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.implementation.TermFactory; + +public class SkolemizationTest { + private Skolemization skolemization; + private final static String name1 = "_:1"; + private final static String name2 = "_:2"; + + @Before + public void init() { + this.skolemization = new Skolemization(); + } + + @Test + public void skolemizeNamedNull_sameName_mapsToSameNamedNull() { + NamedNull null1 = skolemization.getRenamedNamedNull(name1); + NamedNull null2 = skolemization.getRenamedNamedNull(name1); + + assertEquals(null1.getName(), null2.getName()); + } + + @Test + public void skolemizeNamedNull_differentName_mapsToDifferentNamedNull() { + NamedNull null1 = skolemization.getRenamedNamedNull(name1); + NamedNull null2 = skolemization.getRenamedNamedNull(name2); + + assertNotEquals(null1.getName(), null2.getName()); + } + + @Test + public void skolemizeNamedNull_differentInstances_mapsToDifferentNamedNull() { + NamedNull null1 = skolemization.getRenamedNamedNull(name1); + Skolemization other = new Skolemization(); + NamedNull null2 = other.getRenamedNamedNull(name1); + + assertNotEquals(null1.getName(), null2.getName()); + } + + @Test + public void skolemizeNamedNull_differentInstancesDifferentNames_mapsToDifferentNamedNull() { + NamedNull null1 = skolemization.getRenamedNamedNull(name1); + Skolemization other = new Skolemization(); + NamedNull null2 = other.getRenamedNamedNull(name2); + + assertNotEquals(null1.getName(), null2.getName()); + assertEquals(null1.getName(), skolemization.getRenamedNamedNull(name1).getName()); + assertEquals(null2.getName(), other.getRenamedNamedNull(name2).getName()); + } + + @Test + public void skolemConstant_succeeds() { + TermFactory termFactory = new TermFactory(); + AbstractConstant skolem = skolemization.getSkolemConstant(name1, termFactory); + assertTrue(skolem.getName().startsWith(Skolemization.SKOLEM_IRI_PREFIX)); + } + + @Test + public void skolemConstantFromNamedNull_succeeds() { + TermFactory termFactory = new TermFactory(); + NamedNull null1 = new NamedNullImpl(name1); + AbstractConstant skolem1 = skolemization.getSkolemConstant(null1, termFactory); + AbstractConstant skolem2 = skolemization.getSkolemConstant(name1, termFactory); + assertEquals(skolem2, skolem1); + } + + @Test + public void skolemConstantFromRenamedNamedNull_succeeds() { + TermFactory termFactory = new TermFactory(); + NamedNull null1 = skolemization.getRenamedNamedNull(name1); + AbstractConstant skolem1 = skolemization.getSkolemConstant(null1, termFactory); + AbstractConstant skolem2 = skolemization.getSkolemConstant(name1, termFactory); + assertEquals(skolem2, skolem1); + } +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java new file mode 100644 index 000000000..6e99a539f --- /dev/null +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java @@ -0,0 +1,54 @@ +package org.semanticweb.rulewerk.core.reasoner.implementation; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.Arrays; +import java.util.LinkedHashSet; + +import org.apache.commons.lang3.StringUtils; +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +public class SparqlQueryResultDataSourceTest { + + final URL endpoint = new URL("http://query.wikidata.org/sparql"); + + public SparqlQueryResultDataSourceTest() throws MalformedURLException { + } + + @Test(expected = IllegalArgumentException.class) + public void testEmptyQueryBodyList() throws IOException { + + final LinkedHashSet queryVariables = new LinkedHashSet<>( + Arrays.asList(Expressions.makeUniversalVariable("a"))); + new SparqlQueryResultDataSource(endpoint, queryVariables, StringUtils.SPACE); + } + + @Test(expected = IllegalArgumentException.class) + public void testEmptyQueryBody() throws IOException { + new SparqlQueryResultDataSource(endpoint, "a", StringUtils.SPACE); + } + +} diff --git a/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSourceTest.java b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSourceTest.java new file mode 100644 index 000000000..faf6d640b --- /dev/null +++ b/rulewerk-core/src/test/java/org/semanticweb/rulewerk/core/reasoner/implementation/TridentDataSourceTest.java @@ -0,0 +1,81 @@ +package org.semanticweb.rulewerk.core.reasoner.implementation; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.junit.Test; +import org.mockito.Mockito; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +public class TridentDataSourceTest { + + @Test(expected = NullPointerException.class) + public void nullFile_fails() throws IOException { + new TridentDataSource(null); + } + + @Test + public void get_succeeds() throws IOException { + final TridentDataSource tridentDataSource = new TridentDataSource("trident/path"); + assertEquals("trident/path", tridentDataSource.getPath()); + } + + @Test + public void getDeclarationFact_succeeds() throws IOException { + final TridentDataSource tridentDataSource = new TridentDataSource("trident/path"); + Fact fact = tridentDataSource.getDeclarationFact(); + assertEquals(TridentDataSource.declarationPredicateName, fact.getPredicate().getName()); + assertEquals(1, fact.getPredicate().getArity()); + assertEquals(Expressions.makeDatatypeConstant("trident/path", PrefixDeclarationRegistry.XSD_STRING), + fact.getArguments().get(0)); + } + + @Test + public void visit_succeeds() throws IOException { + final DataSourceConfigurationVisitor visitor = Mockito.spy(DataSourceConfigurationVisitor.class); + final TridentDataSource tridentDataSource = new TridentDataSource("trident/path"); + + tridentDataSource.accept(visitor); + + Mockito.verify(visitor).visit(tridentDataSource); + } + + @Test + public void hashEquals_succeed() throws IOException { + final TridentDataSource tridentDataSource1 = new TridentDataSource("trident/path"); + final TridentDataSource tridentDataSource2 = new TridentDataSource("trident/path"); + final TridentDataSource tridentDataSource3 = new TridentDataSource("trident/anotherpath"); + + assertEquals(tridentDataSource1, tridentDataSource2); + assertEquals(tridentDataSource1.hashCode(), tridentDataSource2.hashCode()); + assertNotEquals(tridentDataSource1, tridentDataSource3); + assertEquals(tridentDataSource1, tridentDataSource1); + assertFalse(tridentDataSource1.equals(null)); + assertFalse(tridentDataSource1.equals("trident/path")); + } +} diff --git a/rulewerk-examples/LICENSE.txt b/rulewerk-examples/LICENSE.txt new file mode 100644 index 000000000..261eeb9e9 --- /dev/null +++ b/rulewerk-examples/LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/rulewerk-examples/README.md b/rulewerk-examples/README.md new file mode 100644 index 000000000..0bc31121e --- /dev/null +++ b/rulewerk-examples/README.md @@ -0,0 +1,6 @@ +This project contains examples of different use-cases of **rulewerk** functionality. +- reasoning termination for various algorithms - **The Skolem** and **The Restricted Chase** : *SkolemVsRestrictedChaseTermination.java* +- adding facts from a **CSV file**; exporting query results to CSV: *AddDataFromCSVFile.java* +- adding facts from the result of a **SPARQL query** on a remote endpoint: *AddDataFromSparqlQueryResults.java* +- converting an **OWL ontology** into rules and facts; reasoning on an **OWL ontology** : *owlapi.OwlOntologyToRulesAndFacts.java* +- converting an **RDF resource** into facts: *rdf.AddDataFromRDFModel.java* diff --git a/rulewerk-examples/pom.xml b/rulewerk-examples/pom.xml new file mode 100644 index 000000000..a50e17336 --- /dev/null +++ b/rulewerk-examples/pom.xml @@ -0,0 +1,99 @@ + + + + 4.0.0 + + + org.semanticweb.rulewerk + rulewerk-parent + 0.10.0-SNAPSHOT + + + rulewerk-examples + jar + + Rulewerk Examples + Contains examples and usage instructions describing the basic functionality of Rulewerk + + + + ${project.groupId} + rulewerk-core + ${project.version} + + + ${project.groupId} + rulewerk-owlapi + ${project.version} + + + ${project.groupId} + rulewerk-rdf + ${project.version} + + + ${project.groupId} + rulewerk-graal + ${project.version} + + + ${project.groupId} + rulewerk-parser + ${project.version} + + + ${project.groupId} + rulewerk-vlog + ${project.version} + + + ${project.groupId} + rulewerk-commands + ${project.version} + + + org.slf4j + slf4j-log4j12 + ${slf4j.version} + + + + + org.openrdf.sesame + sesame-rio-turtle + + ${openrdf.sesame.version} + + + + + org.openrdf.sesame + sesame-rio-rdfxml + + ${openrdf.sesame.version} + + + + fr.lirmm.graphik + graal-io-dlgp + ${graal.version} + + + + + + + + + org.codehaus.mojo + cobertura-maven-plugin + 2.7 + + true + true + + + + + diff --git a/vlog4j-examples/src/main/data/.gitignore b/rulewerk-examples/src/main/data/.gitignore similarity index 100% rename from vlog4j-examples/src/main/data/.gitignore rename to rulewerk-examples/src/main/data/.gitignore diff --git a/rulewerk-examples/src/main/data/input/bicycleEDB.csv.gz b/rulewerk-examples/src/main/data/input/bicycleEDB.csv.gz new file mode 100644 index 000000000..a8cede271 Binary files /dev/null and b/rulewerk-examples/src/main/data/input/bicycleEDB.csv.gz differ diff --git a/rulewerk-examples/src/main/data/input/counting-triangles.rls b/rulewerk-examples/src/main/data/input/counting-triangles.rls new file mode 100644 index 000000000..90cf145bb --- /dev/null +++ b/rulewerk-examples/src/main/data/input/counting-triangles.rls @@ -0,0 +1,17 @@ +@prefix wdqs: . + +% From Wikidata, get all countries (items with P31 relation to Q6256, or subclasses thereof: P279*) +% that border (P47) each other: +@source borders[2]: sparql(wdqs:sparql, "country1,country2", + '''?country1 wdt:P31/wdt:P279* wd:Q6256 . + ?country2 wdt:P31/wdt:P279* wd:Q6256 . + ?country1 wdt:P47 ?country2 .''') . + +% Compute the symmetric closure of borders: +shareBorder(?X, ?Y) :- borders(?X, ?Y) . +shareBorder(?X, ?Y) :- borders(?Y, ?X) . + +country(?X) :- shareBorder(?X, ?Y) . + +% Compute all triangles: +triangle(?X,?Y,?Z) :- shareBorder(?X,?Y), shareBorder(?Y,?Z), shareBorder(?Z,?X) . diff --git a/rulewerk-examples/src/main/data/input/doid.nt.gz b/rulewerk-examples/src/main/data/input/doid.nt.gz new file mode 100644 index 000000000..be74c5f53 Binary files /dev/null and b/rulewerk-examples/src/main/data/input/doid.nt.gz differ diff --git a/rulewerk-examples/src/main/data/input/doid.rls b/rulewerk-examples/src/main/data/input/doid.rls new file mode 100644 index 000000000..e50e4e3ef --- /dev/null +++ b/rulewerk-examples/src/main/data/input/doid.rls @@ -0,0 +1,36 @@ +@prefix rdfs: . +@prefix wdqs: . + +@source doidTriple[3]: load-rdf("src/main/data/input/doid.nt.gz") . +@source diseaseId[2]: sparql(wdqs:sparql, "disease,doid", "?disease wdt:P699 ?doid .") . +@source recentDeaths[1]: sparql(wdqs:sparql, "human", + '''?human wdt:P31 wd:Q5; + wdt:P570 ?deathDate . + FILTER (YEAR(?deathDate) = 2018)''') . +@source recentDeathsCause[2]: sparql(wdqs:sparql, "human,causeOfDeath", + '''?human wdt:P31 wd:Q5; + wdt:P570 ?deathDate ; + wdt:P509 ?causeOfDeath . + FILTER (YEAR(?deathDate) = 2018)''') . + +% Combine recent death data (infer "unknown" cause if no cause given): +deathCause(?X, ?Z) :- recentDeathsCause(?X, ?Z) . +deathCause(?X, !Z) :- recentDeaths(?X) . + +% Mark Wikidata diseases that have a DOID: +hasDoid(?X) :- diseaseId(?X, ?DoidId) . + +% Relate DOID string ID (used on Wikidata) to DOID IRI (used in DOID ontology) +doid(?Iri, ?DoidId) :- doidTriple(?Iri, ,?DoidId) . + +% Compute transitive closure of DOID subclass hierarchy +diseaseHierarchy(?X, ?Y) :- doidTriple(?X, rdfs:subClassOf, ?Y) . +diseaseHierarchy(?X, ?Z) :- diseaseHierarchy(?X, ?Y), doidTriple(?Y, rdfs:subClassOf, ?Z) . + +% Find DOID ids for all subclasses of cancer: +cancerDisease(?Xdoid) :- diseaseHierarchy(?X, ?Y), doid(?Y, "DOID:162"), doid(?X, ?Xdoid) . + +% Compute who died of cancer and who died of something else (including diseases unknown to DOID): +humansWhoDiedOfCancer(?X) :- deathCause(?X, ?Y), diseaseId(?Y, ?Z), cancerDisease(?Z) . +humansWhoDiedOfNoncancer(?X) :- deathCause(?X, ?Y), diseaseId(?Y, ?Z), ~cancerDisease(?Z) . +humansWhoDiedOfNoncancer(?X) :- deathCause(?X, ?Y), ~hasDoid(?Y) . diff --git a/rulewerk-examples/src/main/data/input/graal/doid-example.dlgp b/rulewerk-examples/src/main/data/input/graal/doid-example.dlgp new file mode 100644 index 000000000..1c6d21e61 --- /dev/null +++ b/rulewerk-examples/src/main/data/input/graal/doid-example.dlgp @@ -0,0 +1,20 @@ +% Data sources with predicate names: +% doidTriple(S,P,O) -- triples from DOID ontology +% recentDeaths(Human) -- human who died recently +% recentDeathsCause(human, deathCause) -- cause of death of a human (both Wikidata IRIs) +% diseaseId(diseaseIri, doid) -- disease from Wikidata with DOID + +@rules + +deathCause(X, Z) :- recentDeathsCause(X, Z). +deathCause(X, Z) :- recentDeaths(X). + +doid(Iri,DoidId) :- doidTriple(Iri,,DoidId). +hasDoid(X) :- diseaseId(X,DoidId) . + +diseaseHierarchy(X,Y) :- doidTriple(X,,Y). +diseaseHierarchy(X,Z) :- diseaseHierarchy(X,Y), doidTriple(Y,,Z). + +cancerDisease(Xdoid) :- diseaseHierarchy(X,Y), doid(Y, "DOID:162"), doid(X, Xdoid). + +humansWhoDiedOfCancer(X) :- deathCause(X,Y), diseaseId(Y,Z), cancerDisease(Z). diff --git a/rulewerk-examples/src/main/data/input/graal/example.dlgp b/rulewerk-examples/src/main/data/input/graal/example.dlgp new file mode 100755 index 000000000..a98d95d18 --- /dev/null +++ b/rulewerk-examples/src/main/data/input/graal/example.dlgp @@ -0,0 +1,28 @@ +@facts +bicycleEDB(redBike). +bicycleEDB(blueBike). +bicycleEDB(blackBike). + +wheelEDB(redWheel). +wheelEDB(blueWheel). +wheelEDB(greyWheel). + +hasPartEDB(redBike, redWheel). +hasPartEDB(blueBike, blueWheel). + +@rules +% Importing from external database predicates +bicycleIDB(X) :- bicycleEDB(X). +wheelIDB(X) :- wheelEDB(X). +hasPartIDB(X, Y) :- hasPartEDB(X, Y). + +% Existential rules +hasPartIDB(X, Y), wheelIDB(Y) :- bicycleIDB(X). +isPartOfIDB(X, Y) :- wheelIDB(X). + +% Inverse relationships +isPartOfIDB(X, Y) :- hasPartIDB(Y, X). +hasPartIDB(X, Y) :- isPartOfIDB(Y, X). + +@queries +?(B, W) :- bicycleIDB(B), wheelIDB(W), isPartOfIDB(W, B). diff --git a/rulewerk-examples/src/main/data/input/hasPartEDB.csv.gz b/rulewerk-examples/src/main/data/input/hasPartEDB.csv.gz new file mode 100644 index 000000000..1e94e7da8 Binary files /dev/null and b/rulewerk-examples/src/main/data/input/hasPartEDB.csv.gz differ diff --git a/rulewerk-examples/src/main/data/input/owl/bike.owl b/rulewerk-examples/src/main/data/input/owl/bike.owl new file mode 100644 index 000000000..56e753ed6 --- /dev/null +++ b/rulewerk-examples/src/main/data/input/owl/bike.owl @@ -0,0 +1,84 @@ +@prefix : . +@prefix owl: . +@prefix rdf: . +@prefix xml: . +@prefix xsd: . +@prefix rdfs: . +@base . + + rdf:type owl:Ontology . + +################################################################# +# Object Properties +################################################################# + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#hasPart +:hasPart rdf:type owl:ObjectProperty ; + owl:inverseOf :isPartOf . + + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#hasPartSource +:hasPartSource rdf:type owl:ObjectProperty ; + rdfs:subPropertyOf :hasPart . + + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#isPartOf +:isPartOf rdf:type owl:ObjectProperty . + + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#isPartOfSource +:isPartOfSource rdf:type owl:ObjectProperty ; + rdfs:subPropertyOf :isPartOf . + + +################################################################# +# Classes +################################################################# + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#Bike +:Bike rdf:type owl:Class ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onProperty :hasPart ; + owl:someValuesFrom :Wheel + ] . + + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#BikeSource +:BikeSource rdf:type owl:Class ; + rdfs:subClassOf :Bike . + + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#Wheel +:Wheel rdf:type owl:Class ; + rdfs:subClassOf [ rdf:type owl:Restriction ; + owl:onProperty :isPartOf ; + owl:someValuesFrom :Bike + ] . + + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#WheelSource +:WheelSource rdf:type owl:Class ; + rdfs:subClassOf :Wheel . + + +################################################################# +# Individuals +################################################################# + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#b1 +:b1 rdf:type owl:NamedIndividual , + :BikeSource ; + :hasPartSource :w1 . + + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#b2 +:b2 rdf:type owl:NamedIndividual , + :BikeSource . + + +### http://www.semanticweb.org/carralma/ontologies/2018/3/untitled-ontology-2#w1 +:w1 rdf:type owl:NamedIndividual , + :WheelSource . + + +### Generated by the OWL API (version 4.2.8.20170104-2310) https://github.com/owlcs/owlapi diff --git a/rulewerk-examples/src/main/data/input/rdf/iswc-2016-complete-alignments.rdf b/rulewerk-examples/src/main/data/input/rdf/iswc-2016-complete-alignments.rdf new file mode 100644 index 000000000..2c198b73a --- /dev/null +++ b/rulewerk-examples/src/main/data/input/rdf/iswc-2016-complete-alignments.rdf @@ -0,0 +1,47752 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + d5cdedb99ce0566bc81fbb85b3d590c8f2ccb819 + + Floriano Scioscia + + + + + + Floriano Scioscia + + Floriano Scioscia + + + + + + + + + + + + + + + + + + + + + + + + + Elena Simperl + + + + + + + + + + + Elena Simperl + + Elena Simperl + + + The University of Adelaide + + + The University of Adelaide + + + + The University of Adelaide + + + + + + + + + + + + + + + + + + + + + + + + + + + + user interfaces + + + large high-resolution displays + + + + ontology alignment + Applications of Large Displays: Advancing User Support in Large Scale Ontology Alignment + + Applications of Large Displays: Advancing User Support in Large Scale Ontology Alignment + user interfaces + + + Producing alignments of highest quality requires ‘humans in the loop’, however, user involvement is currently one of the challenges for the ontology alignment community. Ontology alignment is a cognitively intensive task and could be efficiently supported by user interfaces encompassing well-designed visualizations and interaction techniques. This work investigates the application of large, high-resolution displays to improve users’ cognitive support and identifies several promising directions for their application—improving ontologies’ and alignments’ navigation, supporting users’ thinking process and collaboration. + Producing alignments of highest quality requires ‘humans in the loop’, however, user involvement is currently one of the challenges for the ontology alignment community. Ontology alignment is a cognitively intensive task and could be efficiently supported by user interfaces encompassing well-designed visualizations and interaction techniques. This work investigates the application of large, high-resolution displays to improve users’ cognitive support and identifies several promising directions for their application—improving ontologies’ and alignments’ navigation, supporting users’ thinking process and collaboration. + large high-resolution displays + + + + + + Applications of Large Displays: Advancing User Support in Large Scale Ontology Alignment + ontology alignment + + + ecf25732b95195eb27e8003f30499cd78dd609bf + + + Eamonn Clinton + + + Eamonn Clinton + + + + Eamonn Clinton + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Freddy Brasileiro + cb4a2d442357edba0b13d7e29b0c1ac976ea1ca5 + Freddy Brasileiro + Freddy Brasileiro + + + + + + + + + + + Property Paths + Query containment is one of the building block of query optimization techniques. In the relational world, query containment is a well-studied problem. At the same time it is well-understood that relational queries are not enough to cope with graph-structured data, where one is interested in expressing queries that capture navigation in the graph. This paper contributes a study on the problem of query containment for an expressive class of navigational queries called Extended Property Paths (EPPs). EPPs are more expressive than previous navigational extensions of SPARQL like property paths and nested regular expressions, for which containment has already been studied. We attack the problem of EPPs (and SPARQL with EPPs) containment and provide complexity bounds. + + + Graph Navigational Languages + + Graph Navigational Languages + Property Paths + Containment of Expressive SPARQL Navigational Queries + + Query Containment + Containment of Expressive SPARQL Navigational Queries + Query Containment + + + + Containment of Expressive SPARQL Navigational Queries + + + + + + + + + + Query containment is one of the building block of query optimization techniques. In the relational world, query containment is a well-studied problem. At the same time it is well-understood that relational queries are not enough to cope with graph-structured data, where one is interested in expressing queries that capture navigation in the graph. This paper contributes a study on the problem of query containment for an expressive class of navigational queries called Extended Property Paths (EPPs). EPPs are more expressive than previous navigational extensions of SPARQL like property paths and nested regular expressions, for which containment has already been studied. We attack the problem of EPPs (and SPARQL with EPPs) containment and provide complexity bounds. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-20T13:50:00 + 2016-10-20T13:50:00 + Explicit Query Interpretation and Diversification for Context-driven Concept Search across Ontologies + + 2016-10-20T13:30:00 + 2016-10-20T13:50:00 + 2016-10-20T13:30:00 + 2016-10-20T13:50:00 + Explicit Query Interpretation and Diversification for Context-driven Concept Search across Ontologies + Chetana Gavankar, Yuan-Fang Li and Ganesh Ramakrishnan + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Alessandra Mileo + Alessandra Mileo + + Alessandra Mileo + + 1447da5207e74c3cb4416cf4b23d7a722621c110 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 08214cd2b712c0f904f1e0e4188c7f9214abd64a + Menna Bakry + + + + + + + Menna Bakry + + + + Menna Bakry + + + + + + + + + + + + + + + + + + + + + + + + + Vidar Klungre + + + Vidar Klungre + + + Vidar Klungre + 9d90d4c92adf2083b473ca5109203c60d0291206 + + + + + + + ABB Corporate Research + ABB Corporate Research + + + + ABB Corporate Research + + + + + + + + + + c9da11def5690717ea77dfaae7d654e32f27aa6b + + Syed Muhammad Ali Hasnain + Syed Muhammad Ali Hasnain + + + + Syed Muhammad Ali Hasnain + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + e6af9124752769b83d7517d127d8e8c6b6faa440 + Andrea Mauri + + + Andrea Mauri + + Andrea Mauri + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Machine Learning + + Scalability + Refinement Operators + + + Refinement Operators + + + Planning + + + Linked Data + Link Discovery + Scalable Link Discovery for Modern Data-Driven Applications + + Linked Data + Partial Recall + Machine Learning + + Modern data-driven applications often have to integrate and process large volumes of high-velocity data. To this end, they require fast and accurate Link Discovery solutions. Most Link Discovery frameworks rely on complex link specifications to determine candidates for links. Hence, the main focus of this work lies in the conception, development, implementation and evaluation of time-efficient and scalable Link Discovery approaches based on the link specification paradigm. We address the aforementioned challenges by presenting approaches for (1) time-constrained linking and (2) for the efficient computation and (3) scalable execution of link specifications with applications to periodically updated knowledge bases. The overall result of this thesis will be an open-source framework for link discovery on large volumes of RDF data streams. + Partial Recall + + + Planning + Modern data-driven applications often have to integrate and process large volumes of high-velocity data. To this end, they require fast and accurate Link Discovery solutions. Most Link Discovery frameworks rely on complex link specifications to determine candidates for links. Hence, the main focus of this work lies in the conception, development, implementation and evaluation of time-efficient and scalable Link Discovery approaches based on the link specification paradigm. We address the aforementioned challenges by presenting approaches for (1) time-constrained linking and (2) for the efficient computation and (3) scalable execution of link specifications with applications to periodically updated knowledge bases. The overall result of this thesis will be an open-source framework for link discovery on large volumes of RDF data streams. + + Scalability + Scalable Link Discovery for Modern Data-Driven Applications + Link Discovery + Scalable Link Discovery for Modern Data-Driven Applications + + + + + + + + + + + + + Motoyuki Takaai + + + + + + + + e79c193c77e024e98494681723e3a26df4b3e66a + + + + Motoyuki Takaai + Motoyuki Takaai + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Djellel Eddine Difallah + + + + + Djellel Eddine Difallah + + dad4e489a8bbf598b5133e3f6f129aeee75b8a23 + + + + + Djellel Eddine Difallah + + + + + + + + + + + 2016-10-21T14:50:00 + 2016-10-21T14:50:00 + 2016-10-21T14:50:00 + 2016-10-21T14:50:00 + + + AUFX-O: Novel Methods for the Representation of Audio Processing Workflows + AUFX-O: Novel Methods for the Representation of Audio Processing Workflows + 2016-10-21T14:30:00 + Thomas Wilmering, György Fazekas and Mark B. Sandler + + 2016-10-21T14:30:00 + + + + + + + + + + + + + + + György Fazekas + + György Fazekas + + 280f9450d37d0a714fe82df4809b7207d8a5daf2 + + + + György Fazekas + + + + + + + + + + + + + + + Paul Buitelaar + + + + + + + + Paul Buitelaar + Paul Buitelaar + + + + + 26abe4bbe6cae6339b5814a89a4cd6aa6786e0e4 + + + + + Natasha Noy + + + + + + + Natasha Noy + + + + Natasha Noy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Flash storage + Flash storage + RDF data processing + + + Linked Data processing for Embedded Devices + + Linked Data processing for Embedded Devices + + Our PhD work aims to a comprehensive, scalable and resourced-awareness +software framework to process RDF data for embedded devices. +In this proposal, we introduce a system architecture supporting RDF storage, +SPARQL query, RDF reasoning and continuous query for RDF stream. The ar- +chitecture is designed to be applicable to embedded systems. For the efficient +performance and scalability, we propose data management techniques adapt- +ing to hardware characteristics of embedded devices. Since computing resources +on embedded devices are constraint, their usage should be context dependent. +Therefore, we work on a resource adaptation model that supports trading off +system performance and device resources depending on their availability. The +adaptation model is based on the resource cost model of the data management +techniques. + + + + Linked Data processing for Embedded Devices + + + Embedded devices + + + RDF data processing + Our PhD work aims to a comprehensive, scalable and resourced-awareness +software framework to process RDF data for embedded devices. +In this proposal, we introduce a system architecture supporting RDF storage, +SPARQL query, RDF reasoning and continuous query for RDF stream. The ar- +chitecture is designed to be applicable to embedded systems. For the efficient +performance and scalability, we propose data management techniques adapt- +ing to hardware characteristics of embedded devices. Since computing resources +on embedded devices are constraint, their usage should be context dependent. +Therefore, we work on a resource adaptation model that supports trading off +system performance and device resources depending on their availability. The +adaptation model is based on the resource cost model of the data management +techniques. + + + Embedded devices + + + + + + Raphaël Troncy + 76b8645ac23d412d99c23dd95e0fbbe092d3f730 + + + + + + + Raphaël Troncy + Raphaël Troncy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 4bdf3d0baced19863517c145b78ba6ebda5bc170 + + + + Florian Haag + + Florian Haag + Florian Haag + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + b68f577fe797be26e8b19e52415aebef09b080c8 + + + Minh Pham + Minh Pham + Minh Pham + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Sheffield Hallam University + + Sheffield Hallam University + + + + + + + Sheffield Hallam University + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Pasquale Lisena + ef5bf1abcd71032d60cf5daa137efd555cd1f73b + + + Pasquale Lisena + + + + + + + + Pasquale Lisena + + + + + + + + + + Steven de Rooij + 090edbd034d896cce7902e1897fa648a67bdc9fe + + + Steven de Rooij + + Steven de Rooij + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + aa792ac2251df0d04c734b9db50c92c72e96eb77 + + + + + Makoto Urakawa + + + + Makoto Urakawa + Makoto Urakawa + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Universidad de Talca + + + + + Universidad de Talca + Universidad de Talca + + + + + + Seiji Okajima + + Seiji Okajima + + + + Seiji Okajima + + + + 2e6a2d3efab08563a3213d847fb43eb67524798f + + + + + + + Hong Fang + Hong Fang + Hong Fang + + + + + + + + + + 2ecc4053698dcfe9ec434598ba358e8af5ba2341 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Quan Z. Sheng + + + + Quan Z. Sheng + + + + a5e8af6357bab05259ef6ea1f0f1700d19ccac9c + + Quan Z. Sheng + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Katalin Ternai + + + + + Katalin Ternai + Katalin Ternai + + + 76fc03dc8f50972d1cd8d8538e20ae5c0015838c + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Vania Dimitrova + + + 44593204b3df5c26b8db090e41972254e32fbcf7 + + Vania Dimitrova + + + + Vania Dimitrova + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Tchap.me + + + + + Tchap.me + + + + + Tchap.me + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Bielefeld + University of Bielefeld + + + University of Bielefeld + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T11:00:00 + 2016-10-19T12:20:00 + 2016-10-19T12:20:00 + 2016-10-19T12:20:00 + 2016-10-19T11:00:00 + 2016-10-19T12:20:00 + Knowledge Graph + Knowledge Graph + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + acf6ff6e1d431ba72252ebf6ddf752b69ab7d661 + + + + + + Zhenyu Song + Zhenyu Song + Zhenyu Song + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-18T15:45:00 + 2016-10-18T16:00:00 + 2016-10-18T16:00:00 + 2016-10-18T16:00:00 + Coffee Break + 2016-10-18T15:45:00 + Coffee Break + 2016-10-18T16:00:00 + + + + + + + + + + + + + + + + + + + + + + + Dataset about iswc2016-alignments. + Wed Oct 19 02:38:49 CEST 2016 + + + + + + + + + + + University of Zurich + + University of Zurich + + + + University of Zurich + + + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Gregoire Burel, Lara Piccolo and Harith Alani + 2016-10-19T21:00:00 + Monitoring, Discussing and Publishing Energy Consumption Data using EnergyUse + + Monitoring, Discussing and Publishing Energy Consumption Data using EnergyUse + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + + + Sarah Komla-Ebri + Sarah Komla-Ebri + + + + + + + + + + Sarah Komla-Ebri + + + + + Derek Magee + + + + + + Derek Magee + Derek Magee + + + + + 89c90a68bb231a1353a1718170419d174898621e + + + + + Univiersitat Politecnica de Catalunya + + + + + + + + + + Univiersitat Politecnica de Catalunya + Univiersitat Politecnica de Catalunya + + + + + + + + + + + + + + + Data stream applications are becoming increasingly popular on the web. In these applications, one query pattern is especially prominent: a join between a continuous data stream and some background data (BGD). Oftentimes, the target BGD is large, maintained externally, changing slowly, and costly to query (both in terms of time and money). Hence, practical applications usually maintain a local (cached) view of the relevant BGD. Given that these caches are not updated as part of the transaction modifying the original BGD, they should be maintained under realistic budget constraints (in terms of latency, computation time, and possibly financial cost) to avoid stale data leading to wrong answers. + +This paper proposes to model the join between streams and the BGD as a bipartite graph. By exploiting the graph structure, we keep the quality of results good enough without refreshing the entire cache for each evaluation. We also introduce two extensions to this method: first, we consider both the sliding window (specifying the currently relevant section of the data stream) and the change rate of the BGD to focus on updates that have the longest effect. Second, by considering the future impact of a query to the BGD we propose to sometimes delay updates to provide more fresher answers in future. + +Using an implemented system we empirically show that we can improve result freshness by 93% over baseline algorithms such as Random Selection or Least Recently Updated. + + + + Join + + Data stream applications are becoming increasingly popular on the web. In these applications, one query pattern is especially prominent: a join between a continuous data stream and some background data (BGD). Oftentimes, the target BGD is large, maintained externally, changing slowly, and costly to query (both in terms of time and money). Hence, practical applications usually maintain a local (cached) view of the relevant BGD. Given that these caches are not updated as part of the transaction modifying the original BGD, they should be maintained under realistic budget constraints (in terms of latency, computation time, and possibly financial cost) to avoid stale data leading to wrong answers. + +This paper proposes to model the join between streams and the BGD as a bipartite graph. By exploiting the graph structure, we keep the quality of results good enough without refreshing the entire cache for each evaluation. We also introduce two extensions to this method: first, we consider both the sliding window (specifying the currently relevant section of the data stream) and the change rate of the BGD to focus on updates that have the longest effect. Second, by considering the future impact of a query to the BGD we propose to sometimes delay updates to provide more fresher answers in future. + +Using an implemented system we empirically show that we can improve result freshness by 93% over baseline algorithms such as Random Selection or Least Recently Updated. + + + Budget + + + Budget + Remote Data Access + Planning Ahead: Stream-Driven Linked-Data Access under Update-Budget Constraints + Stream Processing + Remote Data Access + + Planning Ahead: Stream-Driven Linked-Data Access under Update-Budget Constraints + Freshness + Join + Freshness + + Stream Processing + + + + + + Planning Ahead: Stream-Driven Linked-Data Access under Update-Budget Constraints + + + + + + + + + Hassan Saif + + + cfd1b87509b48cac8cacda96266298dd3dee5e0b + + Hassan Saif + + + + + Hassan Saif + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + SPORTAL: Searching for Public SPARQL Endpoints + Syed Muhammad Ali Hasnain, Qaiser Mehmood, Syeda Sana E Zainab and Aidan Hogan + SPORTAL: Searching for Public SPARQL Endpoints + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + Télécom ParisTech University + Télécom ParisTech University + + + + Télécom ParisTech University + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + DCC, Pontificia Universidad Católica de Chile + DCC, Pontificia Universidad Católica de Chile + + + + + + + + + DCC, Pontificia Universidad Católica de Chile + + + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + Incorporating API data into SPARQL query answers + 2016-10-19T21:00:00 + Matias Junemann, Juan L. Reutter, Adrian Soto and Domagoj Vrgoc + + 2016-10-19T18:00:00 + + 2016-10-19T18:00:00 + Incorporating API data into SPARQL query answers + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 019eb78d2972455d2a0f957cb9f883d357f9ceeb + + Besnik Fetahu + + + + + Besnik Fetahu + Besnik Fetahu + + + + + + + + + 2016-10-18T10:30:00 + 2016-10-18T11:00:00 + 2016-10-18T11:00:00 + 2016-10-18T11:00:00 + Coffee Break + 2016-10-18T11:00:00 + 2016-10-18T10:30:00 + Coffee Break + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Peter Woollard + + Peter Woollard + + Peter Woollard + + 4bc85d80d63d2d8272da0d51c92f6703d5c283a9 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Fujitsu R&D Center Co.,LTD + + + Fujitsu R&D Center Co.,LTD + Fujitsu R&D Center Co.,LTD + + + + + + + + + + + + + + + + + + + + + Robert Meusel + + + + 68ff194fb648073f974e8f6fcabf9dc0440db69b + + + + + Robert Meusel + + Robert Meusel + + + + + + + + + + + + + + + + + + + + + + + + + + question-answering + + natural language processing + + + In Sir Tim Berners-Lee’s seminal article that introduce his vision of the semantic web, one of the use-cases described was a health- related example where health consumers utilized intelligent hand-held devices that aggregated and exchanged health data from the semantic web. Presently, majority of health consumers and patients rely on personal technology and the web to find information and to make personal health decisions. This proposal aims to contribute towards that use-case, specifically in the “hot-bed” issue of human papillomavirus (HPV) vac- cine. The HPV vaccine targets young adults and teens to protect against life-threatening cancers, yet a segment of the public has reservations against the vaccine. I propose an interactive dialogue agent that harness patient-level vaccine information encoded in an ontology that can be “talked to” with a natural language interface using utterances. I aim to pilot this technology in a clinic to assess if patient knowledge about HPV and the vaccine is increased, and if their attitude toward the vaccine is modified as a result of using the interactive agent. + dialogue system + + Ontology-based dialogue systems for improved patient HPV vaccine knowledge and perception + + + question-answering + Ontology-based dialogue systems for improved patient HPV vaccine knowledge and perception + dialogue system + + ontology learning + + conversational agents + vaccine + + In Sir Tim Berners-Lee’s seminal article that introduce his vision of the semantic web, one of the use-cases described was a health- related example where health consumers utilized intelligent hand-held devices that aggregated and exchanged health data from the semantic web. Presently, majority of health consumers and patients rely on personal technology and the web to find information and to make personal health decisions. This proposal aims to contribute towards that use-case, specifically in the “hot-bed” issue of human papillomavirus (HPV) vac- cine. The HPV vaccine targets young adults and teens to protect against life-threatening cancers, yet a segment of the public has reservations against the vaccine. I propose an interactive dialogue agent that harness patient-level vaccine information encoded in an ontology that can be “talked to” with a natural language interface using utterances. I aim to pilot this technology in a clinic to assess if patient knowledge about HPV and the vaccine is increased, and if their attitude toward the vaccine is modified as a result of using the interactive agent. + + natural language processing + ontology + conversational agents + + + Ontology-based dialogue systems for improved patient HPV vaccine knowledge and perception + ontology learning + + ontology + vaccine + + + + + + + + + + + Veronique Volders + + Veronique Volders + + Veronique Volders + 8d314d0023648fdac8f36a93f324aba48ac1525c + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Seiya Inagi + + + + + + 26088d83ed5cf55fdfd25e4034b1713d6a88e504 + Seiya Inagi + Seiya Inagi + + + + + + George Drosatos + + + George Drosatos + + + + + + + George Drosatos + + + + + + + Anthony Cohn + + + Anthony Cohn + + fc24c428e8ed4d044040e259830f74cf8cd92971 + + + + + Anthony Cohn + + + + + + Xiang Nan Ren + + + Xiang Nan Ren + + + + + + Xiang Nan Ren + 4e67340de2275d16373425555cb401a3006329ef + + + + + + National and Kapodistrian University of Athens + + + National and Kapodistrian University of Athens + + + + National and Kapodistrian University of Athens + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T14:20:00 + 2016-10-19T14:00:00 + 2016-10-19T14:00:00 + + 2016-10-19T14:20:00 + YAGO: a multilingual knowledge base from Wikipedia, Wordnet, and Geonames + + YAGO: a multilingual knowledge base from Wikipedia, Wordnet, and Geonames + + 2016-10-19T14:20:00 + 2016-10-19T14:20:00 + Thomas Rebele, Fabian M. Suchanek, Johannes Hoffart, Joanna Biega, Erdal Kuzey and Gerhard Weikum + + + + + + + + + + + + + + + + + + + + + + + + + + + Université jean Monnet + + + + Université jean Monnet + + Université jean Monnet + + + + + + 90363e0e39c4eef6511649eb6664e2452d6838e9 + + + Zakia Kazi-Aoul + + + + + + Zakia Kazi-Aoul + + + + + Zakia Kazi-Aoul + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + SemFacet: Faceted Search over Ontology Enhanced Knowledge Graphs + 2016-10-19T21:00:00 + SemFacet: Faceted Search over Ontology Enhanced Knowledge Graphs + + 2016-10-19T18:00:00 + Bernardo Cuenca Grau, Evgeny Kharlamov, Sarunas Marciuska, Dmitriy Zheleznyakov and Marcelo Arenas + + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + 2016-10-20T15:30:00 + 2016-10-20T16:50:00 + 2016-10-20T16:50:00 + 2016-10-20T16:50:00 + Ontologies (I) + 2016-10-20T15:30:00 + Ontologies (I) + 2016-10-20T16:50:00 + + + + + + + + + + + + + + + Yuzhong Qu + + + + + + + + 57682429d3d2a18d6a9c2c2b0559a2105ca034a1 + + Yuzhong Qu + + Yuzhong Qu + + + + + + + + + + + + + + + + + + + + + + + Daxin Liu + + 9d396783ee8b86ee384709f5fd61c670c62f2bc6 + + + + + Daxin Liu + + + + Daxin Liu + + + + + + + + + + + + + + + + + + + + + + + + Catherine Faron Zucker + + Catherine Faron Zucker + + + ee710ff7a176af85a72d485b18eebf2cd5d37bcb + Catherine Faron Zucker + + + Femke De Backere + Femke De Backere + + + + 429b07376b671ac75b7d20af309bb56042b5a212 + + + Femke De Backere + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Institutt for informatikk, Universitetet i Oslo + + Institutt for informatikk, Universitetet i Oslo + Institutt for informatikk, Universitetet i Oslo + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Eugene Siow, Thanassis Tiropanis and Wendy Hall + 2016-10-20T14:10:00 + 2016-10-20T14:10:00 + 2016-10-20T14:10:00 + 2016-10-20T13:50:00 + SPARQL-to-SQL on Internet of Things Databases and Streams + + + SPARQL-to-SQL on Internet of Things Databases and Streams + 2016-10-20T13:50:00 + 2016-10-20T14:10:00 + + + + + + + + + + + + + + + Ontolonomy, LLC. + + + Ontolonomy, LLC. + + + Ontolonomy, LLC. + + + + + + + + + + + + + + + + + + + + + Federal University of Espirito Santo + + Federal University of Espírito Santo + Federal University of Espirito Santo + Federal University of Espirito Santo + + + + + + Federal University of Espírito Santo + Federal University of Espírito Santo + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Masahide Naemura + + + + + + e412957400cef06b258ce055f7357fc38c9c2ceb + Masahide Naemura + Masahide Naemura + + + + + + + + + + + + + + + + Ontology learning has been an important research area in the Semantic Web field in the last 20 years. Ontology learning systems generate domain models from data (typically text) using a combination of sophisticated methods. In this poster, we study the use of Google's word2vec to emulate a simple ontology learning system, and compare the results to an existing "traditional" ontology learning system. + + + + Using word2vec to Build a Simple Ontology Learning System + + Ontology learning has been an important research area in the Semantic Web field in the last 20 years. Ontology learning systems generate domain models from data (typically text) using a combination of sophisticated methods. In this poster, we study the use of Google's word2vec to emulate a simple ontology learning system, and compare the results to an existing "traditional" ontology learning system. + + + ontology learning + term extraction + ontology learning + Using word2vec to Build a Simple Ontology Learning System + + Using word2vec to Build a Simple Ontology Learning System + + term extraction + + + + word2vec + + + word2vec + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KeywDB: A System for Keyword-Driven Ontology-to-RDB Mapping Construction + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + KeywDB: A System for Keyword-Driven Ontology-to-RDB Mapping Construction + Dmitriy Zheleznyakov, Evgeny Kharlamov, Vidar Klungre, Martin G. Skjæveland, Dag Hovland, Martin Giese, Ian Horrocks and Arild Waaler + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 598e7b5cf0a728d043ae5f2549620571d9acf1d6 + + + Takahira Yamaguchi + + + + + + Takahira Yamaguchi + Takahira Yamaguchi + + + + + + + + + + + + + + + + 39a490ff48174beb1442f2023b5352ba604c0949 + + + + + + + Stijn De Pestel + + Stijn De Pestel + + Stijn De Pestel + + + + + + + Diego Calvanese + + + + + Diego Calvanese + + + + Diego Calvanese + 9f41f6f9767c215c484e883dfa80e694c03aef54 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2cdeff9ba575d5e67689bae4f0f86c844d69eca0 + Henning Agt-Rickauer + + + + + Henning Agt-Rickauer + + + + + Henning Agt-Rickauer + + + + + + + 2016-10-20T13:30:00 + 2016-10-20T14:50:00 + 2016-10-20T14:50:00 + 2016-10-20T14:50:00 + 2016-10-20T13:30:00 + Linked Data Measurement + 2016-10-20T14:50:00 + Linked Data Measurement + + + + + + + + + + + + + + + + + + + + + + + + + + + Giuseppe Loseto + + + Giuseppe Loseto + + 9cceef5443af3aa049422b477bb5b9207decc74f + + Giuseppe Loseto + + + + + + + + + + + + RDF + + RDF4J + Big data + Hadoop + Hadoop + Eclipse RDF4J (formerly known as Sesame) is an open source Java framework for processing RDF data. RDF4J framework is extensible through its Storage And Inference Layer (SAIL) to support various RDF stores and inference engines. Apache HBase is the Hadoop database, a distributed and scalable big data store. It is designed to scale up from single servers to thousands of machines. We have connected RDF4J and HBase to receive an extremely scalable RDF store. + Sesame + HBase + SPARQL + HBase + Scalability + Triplestore + Eclipse RDF4J (formerly known as Sesame) is an open source Java framework for processing RDF data. RDF4J framework is extensible through its Storage And Inference Layer (SAIL) to support various RDF stores and inference engines. Apache HBase is the Hadoop database, a distributed and scalable big data store. It is designed to scale up from single servers to thousands of machines. We have connected RDF4J and HBase to receive an extremely scalable RDF store. + + + How to feed Apache HBase with petabytes of RDF data: An extremely scalable RDF store based on Eclipse RDF4J framework and Apache HBase database + RDF + How to feed Apache HBase with petabytes of RDF data: An extremely scalable RDF store based on Eclipse RDF4J framework and Apache HBase database + Sesame + + + + SPARQL + Big data + Triplestore + + How to feed Apache HBase with petabytes of RDF data: An extremely scalable RDF store based on Eclipse RDF4J framework and Apache HBase database + + + + Scalability + + + RDF4J + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + Local Council Decisions as Linked Data: a proof of concept + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + Raf Buyle, Pieter Colpaert, Mathias Van Compernolle, Peter Mechant, Veronique Volders, Ruben Verborgh and Erik Mannens + Local Council Decisions as Linked Data: a proof of concept + 2016-10-19T21:00:00 + + + + 2016-10-18T09:00:00 + 2016-10-18T10:30:00 + 2016-10-18T10:30:00 + 2016-10-18T10:30:00 + 2016-10-18T09:00:00 + Opening + 2016-10-18T10:30:00 + Opening + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Mihael Arcan + be0fefb0beeda37059e2be8c52db9fe5736886b4 + + + + + + + + Mihael Arcan + + Mihael Arcan + + + + + RIKEN BioResource Center + + + + + RIKEN BioResource Center + + + RIKEN BioResource Center + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + In this paper, we propose the rst system, so-called Open Programming Knowledge Extraction (OPKE), to automatically extract knowledge from programming Question-Answering (QA) communities. OPKE is the rst step of building a programming-centric knowledge base. Data mining and Natural Language Processing techniques are leveraged to identify paraphrased questions and construct structured information. Preliminary evaluation shows the eectiveness of OPKE. + + Programming QA + Open IE + + NLP + Open IE + + NLP + + + + + Kowledge Extraction + + + + + + + Kowledge Extraction + + Towards Building Open Knowledge Base From Programming Question-Answering Communities + + In this paper, we propose the rst system, so-called Open Programming Knowledge Extraction (OPKE), to automatically extract knowledge from programming Question-Answering (QA) communities. OPKE is the rst step of building a programming-centric knowledge base. Data mining and Natural Language Processing techniques are leveraged to identify paraphrased questions and construct structured information. Preliminary evaluation shows the eectiveness of OPKE. + + Towards Building Open Knowledge Base From Programming Question-Answering Communities + Programming QA + + Towards Building Open Knowledge Base From Programming Question-Answering Communities + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Jean-Paul Calbimonte + + + Jean-Paul Calbimonte + + + Jean-Paul Calbimonte + a0556fac631e917c6c1e1a697fe0e513e8e58f81 + + + + + + + + + + + + + + + 2016-10-19T15:00:00 + 2016-10-19T15:00:00 + + 2016-10-19T14:40:00 + 2016-10-19T15:00:00 + Optimizing Aggregate SPARQL Queries using Materialized RDF Views + + 2016-10-19T14:40:00 + Optimizing Aggregate SPARQL Queries using Materialized RDF Views + + Dilshod Ibragimov, Katja Hose, Torben Bach Pedersen and Esteban Zimanyi + 2016-10-19T15:00:00 + + + + + + SPARQL query processing + + + Federated querying + Big Data infrastructures + + Federated querying + + + + Semantic Web Technologies and Big Data Infrastructures: SPARQL Federated Querying of Heterogeneous Big Data Stores + Semantic Web Technologies and Big Data Infrastructures: SPARQL Federated Querying of Heterogeneous Big Data Stores + + + SPARQL query processing + + + The ability to cross-link large scale data with each other and with structured Semantic Web data, and the ability to uniformly process Semantic Web and other data adds value to both the Semantic Web and to the Big Data community. This paper presents work in progress towards integrating Big Data infrastructures with Semantic Web technologies, allowing for the cross-linking and uniform retrieval of data stored in both Big Data infrastructures and Semantic Web data. The technical challenges involved in achieving this, pertain to both data and system interoperability: we need a way to make the semantics of Big Data explicit so that they can interlink and we need a way to make it transparent for the client applications to query federations of such heterogeneous systems. The paper presents an extension of the Semagrow federated SPARQL query processor that is able to seamlessly federated SPARQL endpoints, Cassandra databases, and Solr databases, and discusses future directions of this line of work. + + The ability to cross-link large scale data with each other and with structured Semantic Web data, and the ability to uniformly process Semantic Web and other data adds value to both the Semantic Web and to the Big Data community. This paper presents work in progress towards integrating Big Data infrastructures with Semantic Web technologies, allowing for the cross-linking and uniform retrieval of data stored in both Big Data infrastructures and Semantic Web data. The technical challenges involved in achieving this, pertain to both data and system interoperability: we need a way to make the semantics of Big Data explicit so that they can interlink and we need a way to make it transparent for the client applications to query federations of such heterogeneous systems. The paper presents an extension of the Semagrow federated SPARQL query processor that is able to seamlessly federated SPARQL endpoints, Cassandra databases, and Solr databases, and discusses future directions of this line of work. + + + + + Big Data infrastructures + + + + + + Semantic Web Technologies and Big Data Infrastructures: SPARQL Federated Querying of Heterogeneous Big Data Stores + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dbe710769d92959350797a5922c0d59a94314050 + + Miriam Fernandez + + + + + + + + + + + Miriam Fernandez + + + Miriam Fernandez + + + + + + + + + + + + + + + + + + + + 3fb810d543b80bd85978ba263cb3cdb8907cb006 + Erhard Rahm + + + + Erhard Rahm + + + Erhard Rahm + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + life-science database + + + + + + RIKEN MetaDatabase: a database publication platform for RIKENs life-science researchers that promotes research collaborations over different research area + life-science database + Semantic Web + + + + Resource Description Framework + RIKEN MetaDatabase: a database publication platform for RIKENs life-science researchers that promotes research collaborations over different research area + + To promote data dissemination and integration of life science +datasets produced in a general research institute, RIKEN, we developed +an infrastructure database named as "RIKEN MetaDatabase", which en- +ables data publication and integration with Resource Description Frame- +work. We implemented simple data managing work +ow, relational data- +base like graphical interface represents data links across laboratories. As +a result, activities of inter-laboratories collaborations and coordination +began to accelerated. Combined with global standardisation activities, +we expect this database can contribute data integration across the world. + + To promote data dissemination and integration of life science +datasets produced in a general research institute, RIKEN, we developed +an infrastructure database named as "RIKEN MetaDatabase", which en- +ables data publication and integration with Resource Description Frame- +work. We implemented simple data managing work +ow, relational data- +base like graphical interface represents data links across laboratories. As +a result, activities of inter-laboratories collaborations and coordination +began to accelerated. Combined with global standardisation activities, +we expect this database can contribute data integration across the world. + Resource Description Framework + + database integration + + RIKEN MetaDatabase: a database publication platform for RIKENs life-science researchers that promotes research collaborations over different research area + + database integration + Semantic Web + + + + + + Daniel Hernandez + + + + Daniel Hernandez + + + + + + b0d6d207a9b309ae69e0a4195ae29aefcdc1eebd + + + + Daniel Hernandez + + + + + + + + + + + + + + + a933b3534829209cf3dbd337cd3c3e509d5c00c0 + + + Hiroshi Fujisawa + Hiroshi Fujisawa + + + + + + Hiroshi Fujisawa + + + + + + + + + + + + + + + + Soil Process + + An Ontology of Soil Properties and Processes + Soil Process + + + + + + An Ontology of Soil Properties and Processes + + + + + + + Assessing the Underworld (ATU) is a large interdisciplinary UK research project addressing urban infrastructure challenges, especially how to make streetworks more efficient and sustainable. One of the key challenges it addresses is integrated inter-asset maintenance. As the assets on the surface of the ground (e.g. pavements) and those buried under it (e.g. pipes and cables) are supported by the ground, the properties and processes of soil affect the performance of these assets to a significant degree. In order to make integrated decisions, it is necessary to combine the knowledge and expertise in multiple areas, such as roads, soil, buried assets, sensing, etc. This requires an underpinning knowledge model, in the form of an ontology. Within this context, we present a new ontology for describing soil properties (e.g. soil strength) and processes (e.g. soil compaction), as well as how they affect each other. This ontology can be used to express how the ground affects and is affected by assets buried under the ground or on the ground surface. The ontology is written in OWL 2 and openly available from the University of Leeds data repository: http://doi.org/10.5518/54. + + + An Ontology of Soil Properties and Processes + OWL Ontology + OWL Ontology + + Asset Maintenance + + + Asset Maintenance + + + + Soil Property + Soil Property + + + + Assessing the Underworld (ATU) is a large interdisciplinary UK research project addressing urban infrastructure challenges, especially how to make streetworks more efficient and sustainable. One of the key challenges it addresses is integrated inter-asset maintenance. As the assets on the surface of the ground (e.g. pavements) and those buried under it (e.g. pipes and cables) are supported by the ground, the properties and processes of soil affect the performance of these assets to a significant degree. In order to make integrated decisions, it is necessary to combine the knowledge and expertise in multiple areas, such as roads, soil, buried assets, sensing, etc. This requires an underpinning knowledge model, in the form of an ontology. Within this context, we present a new ontology for describing soil properties (e.g. soil strength) and processes (e.g. soil compaction), as well as how they affect each other. This ontology can be used to express how the ground affects and is affected by assets buried under the ground or on the ground surface. The ontology is written in OWL 2 and openly available from the University of Leeds data repository: http://doi.org/10.5518/54. + + + + + + + + + + + + + + + + Path queries + SPARQL + + + Context-free path queries + + + + Navigational queries + + + Context-free path queries + RDF + + Regular path queries + Navigational graph queries are an important class of queries that can extract implicit binary relations over the nodes of input graphs. Most of the navigational query languages used in the RDF community, e.g. property paths in W3C SPARQL 1.1 and nested regular expressions in nSPARQL, are based on the regular expressions. It is known that regular expressions have limited expressivity; for instance, some natural queries, like same generations-queries} are not expressible with regular expressions. To overcome this limitation, in this paper, we present cfSPARQL, an extension of SPARQL query language equipped with context-free grammars. The cfSPARQL language is strictly more expressive than property paths and nested expressions. The additional expressivity can be used for modelling graph similarities, graph summarization and ontology alignment. Despite the increasing expressivity, we show that cfSPARQL still enjoys a low computational complexity and can be evaluated efficiently. + Context-Free Path Queries on RDF Graphs + + Context-Free Path Queries on RDF Graphs + + RDF + + + + Context-Free Path Queries on RDF Graphs + + + + Regular path queries + + Navigational graph queries are an important class of queries that can extract implicit binary relations over the nodes of input graphs. Most of the navigational query languages used in the RDF community, e.g. property paths in W3C SPARQL 1.1 and nested regular expressions in nSPARQL, are based on the regular expressions. It is known that regular expressions have limited expressivity; for instance, some natural queries, like same generations-queries} are not expressible with regular expressions. To overcome this limitation, in this paper, we present cfSPARQL, an extension of SPARQL query language equipped with context-free grammars. The cfSPARQL language is strictly more expressive than property paths and nested expressions. The additional expressivity can be used for modelling graph similarities, graph summarization and ontology alignment. Despite the increasing expressivity, we show that cfSPARQL still enjoys a low computational complexity and can be evaluated efficiently. + + + + Path queries + SPARQL + + Navigational queries + + + + + + + + + + + Tony Lee + + + + + + + + + + Tony Lee + + Tony Lee + 7feda9d7ed587af441ac90ae67d2c5b09418dddd + + + + + + + + This paper proposes Agriculture Activity Ontology(AAO) as a basis of the core vocabulary of agricultural activity. Since concepts of agriculture activities are formed by the various context such as purpose, means, crop, and field, we organize the agriculture activity ontology as a hierarchy of concepts discriminated by various properties such as purpose, means, crop and field. The vocabulary of agricultural activity is then defined as the subset of the ontology. Since the ontology is consistent, extendable, and capable of some inferences thanks to Description Logics, so the vocabulary inherits these features. The vocabulary is also linked to existing vocabularies such as AGROVOC. It is expected to use in the data format in the agricultural IT system. The vocabulary is adopted as the part of "the guideline for agriculture activity names for agriculture IT systems" issued by Ministry of Agriculture, Forestry and Fisheries (MAFF), Japan. + Agriculture Activity Ontology : An ontology for core vocabulary of agriculture activity + + + This paper proposes Agriculture Activity Ontology(AAO) as a basis of the core vocabulary of agricultural activity. Since concepts of agriculture activities are formed by the various context such as purpose, means, crop, and field, we organize the agriculture activity ontology as a hierarchy of concepts discriminated by various properties such as purpose, means, crop and field. The vocabulary of agricultural activity is then defined as the subset of the ontology. Since the ontology is consistent, extendable, and capable of some inferences thanks to Description Logics, so the vocabulary inherits these features. The vocabulary is also linked to existing vocabularies such as AGROVOC. It is expected to use in the data format in the agricultural IT system. The vocabulary is adopted as the part of "the guideline for agriculture activity names for agriculture IT systems" issued by Ministry of Agriculture, Forestry and Fisheries (MAFF), Japan. + agriculture + ontology + + + agriculture + + agronomic sciences + + + agronomic sciences + + + + ontology + + + + knowledge representation + + + Agriculture Activity Ontology : An ontology for core vocabulary of agriculture activity + + core vocabulary + core vocabulary + + vocabulary management + + vocabulary management + knowledge representation + Agriculture Activity Ontology : An ontology for core vocabulary of agriculture activity + + + + + + + + + + + + + + + + + + + + cf5050096ed30e26541ecb2ea068ab01e915aca8 + Roberto Garcia + + + + + Roberto Garcia + + Roberto Garcia + + + + + + + + 2016-10-20T17:00:00 + 2016-10-20T18:00:00 + 2016-10-20T18:00:00 + 2016-10-20T18:00:00 + Town Hall + 2016-10-20T18:00:00 + Town Hall + 2016-10-20T17:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Ian Horrocks + + + + 3361a8a2f71036d7ca03076a41f4d8ae08c71e97 + + + + + + Ian Horrocks + + + Ian Horrocks + + + + + + + + + + + + + + + + + + + + + + + + + + Frank Den Hartog + + + + + ec0d87cac503768feb5676d8910729d845b9881f + Frank Den Hartog + + + + + + Frank Den Hartog + + + + + + + + + + + + + + + + + + + + + + + + + + + + Natural Language Generation + Aggregation + Natural Language Generation + Enriching Answers in Question Answering Systems using Linked Data + Enriched Answer + Linked Data + + Lexicalization + + + + + + Question Answering + + + Enriching Answers in Question Answering Systems using Linked Data + Question Answering + Aggregation + + + Linked Data has emerged as the most widely used and the most powerful knowledge source for Question Answering (QA). Although Question Answering using Linked Data (QALD) fills in many gaps in the traditional QA models, the answers are still presented as factoids. This research introduces an answer presentation model for QALD by employing Natural Language Generation (NLG) to generate natural language descriptions to present an informative answer. The proposed approach employs lexicalization, aggregation, and referring expression generation to build a human-like enriched answer utilizing the triples extracted from the entities mentioned in the question as well as the entities contained in the answer. + + Referring Expression Generation + Referring Expression Generation + Lexicalization + + Enriched Answer + Linked Data has emerged as the most widely used and the most powerful knowledge source for Question Answering (QA). Although Question Answering using Linked Data (QALD) fills in many gaps in the traditional QA models, the answers are still presented as factoids. This research introduces an answer presentation model for QALD by employing Natural Language Generation (NLG) to generate natural language descriptions to present an informative answer. The proposed approach employs lexicalization, aggregation, and referring expression generation to build a human-like enriched answer utilizing the triples extracted from the entities mentioned in the question as well as the entities contained in the answer. + + Enriching Answers in Question Answering Systems using Linked Data + Linked Data + + + + + 2016-10-19T14:40:00 + 2016-10-19T15:00:00 + + 2016-10-19T15:00:00 + Zhishi.lemon:On Publishing Zhishi.me as Linguistic Linked Open Data + Haofen Wang, Zhijia Fang, Jorge Gracia, Julia Bosque-Gil and Tong Ruan + Zhishi.lemon:On Publishing Zhishi.me as Linguistic Linked Open Data + + + 2016-10-19T14:40:00 + 2016-10-19T15:00:00 + 2016-10-19T15:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Leipzig + + + University of Leipzig + + + University of Leipzig + + + + + + + + Distributed Queries + Multi-Agent Systems + + + + This study presents a framework to allow human and machine agents to reason and coordinate actions without direct communication mechanisms by sharing distributed Linked Data resources. This framework addresses the problems of querying frequently-updating distributed datasets and guaranteeing transactional consistency. The motivation for this framework comes from the use-case of opportunistic automation of humans-generated procedures. This use-case is based on existing real-world Linked Data representations of human instructions and their integration with machine functionalities. + + + + Multi-Agent Systems + Human-Machine Collaboration over Linked Data + Human-Machine Collaboration over Linked Data + + Distributed Queries + + Human Computation + + Linked Data + + Linked Data + Human-Machine Collaboration + + This study presents a framework to allow human and machine agents to reason and coordinate actions without direct communication mechanisms by sharing distributed Linked Data resources. This framework addresses the problems of querying frequently-updating distributed datasets and guaranteeing transactional consistency. The motivation for this framework comes from the use-case of opportunistic automation of humans-generated procedures. This use-case is based on existing real-world Linked Data representations of human instructions and their integration with machine functionalities. + Human Computation + Human-Machine Collaboration over Linked Data + Human-Machine Collaboration + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 9e46519c300f7b6321d6d2ab61bdfdb5624fa296 + + + + + + + + Ilaria Tiddi + + + Ilaria Tiddi + + Ilaria Tiddi + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-21T13:30:00 + 2016-10-21T14:50:00 + 2016-10-21T14:50:00 + 2016-10-21T14:50:00 + 2016-10-21T13:30:00 + Medical Applications + Medical Applications + 2016-10-21T14:50:00 + + + + + + + Mauro Dragoni + + + Mauro Dragoni + + + cb0fc458c5bd360c5fa92b296c68974ccd1112c5 + + + + Mauro Dragoni + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + DBpedia + + + Lexicalization + + + Linked Data + DBpedia encodes massive amounts of open domain knowledge and is growing by accumulating more triples at the same rate as Wikipedia. However, in order to be able to present the knowledge processed using DBpedia, the applications need to present this knowledge often require natural language formulations of these triples. The RealText-lex2 framework offers a scalable platform to transform these triples to natural language sentences using lexicalization patterns. The framework has evolved from its previous version (RealText-lex) and is comprised of four lexicalization pattern mining modules which derive patterns from a training triple collection. These patterns can be then applied on the new triples given that they satisfy a defined set of constraints. + Natural Language Generation + + Linked Data + Lexicalizing DBpedia with Realization Enabled Ensemble Architecture: RealText-lex2 Approach + Natural Language Generation + + + + Lexicalization + + DBpedia encodes massive amounts of open domain knowledge and is growing by accumulating more triples at the same rate as Wikipedia. However, in order to be able to present the knowledge processed using DBpedia, the applications need to present this knowledge often require natural language formulations of these triples. The RealText-lex2 framework offers a scalable platform to transform these triples to natural language sentences using lexicalization patterns. The framework has evolved from its previous version (RealText-lex) and is comprised of four lexicalization pattern mining modules which derive patterns from a training triple collection. These patterns can be then applied on the new triples given that they satisfy a defined set of constraints. + + + + Lexicalizing DBpedia with Realization Enabled Ensemble Architecture: RealText-lex2 Approach + DBpedia + Lexicalizing DBpedia with Realization Enabled Ensemble Architecture: RealText-lex2 Approach + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Scalable Link Discovery for Modern Data-Driven Applications + 2016-10-18T15:00:00 + 2016-10-18T15:15:00 + + Scalable Link Discovery for Modern Data-Driven Applications + 2016-10-18T15:15:00 + 2016-10-18T15:15:00 + 2016-10-18T15:15:00 + 2016-10-18T15:00:00 + Kleanthi Georgala + + + + + + + + + + + + + + + + + + Philipp Cimiano + + + + + + Philipp Cimiano + Philipp Cimiano + + + 4d4ded20b46ca7280eb79b3bffdec9c8722e8335 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Leeds + + University of Leeds + University of Leeds + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Amelie Gyrard + b28fc7fb3ee28ba063dec452b9d2a876878d6728 + + Amelie Gyrard + + + + + + + + Amelie Gyrard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 3cf94eccca55ab0ebd77a50c180bdc41fbfb30cc + Giuseppe De Giacomo + + + + + Giuseppe De Giacomo + Giuseppe De Giacomo + + + + + + + + + + + + + + German University in Cairo + + + + + + + German University in Cairo + + + + German University in Cairo + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Efstratios Sygkounas + + + + + Efstratios Sygkounas + 82bc09d096246a18d6b76011c4fb2388edc45289 + + Efstratios Sygkounas + + + + + + 05aad93467b2184dff5726c93aea878d6c10736a + Gerhard Weikum + Gerhard Weikum + + + + + + + + + Gerhard Weikum + + + + + + Joscha Jäger + + + + + + Joscha Jäger + + b238a1a77f31130d9dff2a6e9931eb50c92ccb05 + Joscha Jäger + + + + + + + + + + + + + + + + + + + + Julia Bosque-Gil + + + + + + + Julia Bosque-Gil + c77a31610a92e47a391e24fcf6cbd73d0c3e1faa + + + + Julia Bosque-Gil + + + + 2016-10-19T15:20:00 + + 2016-10-19T15:00:00 + Xiaowang Zhang, Zhiyong Feng, Xin Wang, Guozheng Rao and Wenrui Wu + + Context-Free Path Queries on RDF Graphs + 2016-10-19T15:20:00 + 2016-10-19T15:20:00 + 2016-10-19T15:20:00 + 2016-10-19T15:00:00 + Context-Free Path Queries on RDF Graphs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + semantic audio analysis + Semantic Web technologies + linked open data + + linked open data + + music information retrieval + + semantic audio analysis + Feature extraction algorithms in Music Informatics aim at deriving statistical and semantic information directly from audio signals. These may be ranging from energies in several frequency bands to musical information such as key, chords or rhythm. There is an increasing diversity and complexity of features and algorithms in this domain and applications call for a common structured representation to facilitate interoperability, reproducibility and machine interpretability. We propose a solution relying on Semantic Web technologies that is designed to serve a dual purpose (1) to represent computational workflows of audio features and (2) to provide a common structure for feature data to enable the use of Open Linked Data principles and technologies in Music Informatics. The Audio Feature Ontology is based on the analysis of existing tools and music informatics literature, which was instrumental in guiding the ontology engineering process. The ontology provides a descriptive framework for expressing different conceptualisations of the audio feature extraction domain and enables designing linked data formats for representing feature data. In this paper, we discuss important modelling decisions and introduce a harmonised ontology library consisting of modular interlinked ontologies that describe the different entities and activities involved in music creation, production and publishing. + + Feature extraction algorithms in Music Informatics aim at deriving statistical and semantic information directly from audio signals. These may be ranging from energies in several frequency bands to musical information such as key, chords or rhythm. There is an increasing diversity and complexity of features and algorithms in this domain and applications call for a common structured representation to facilitate interoperability, reproducibility and machine interpretability. We propose a solution relying on Semantic Web technologies that is designed to serve a dual purpose (1) to represent computational workflows of audio features and (2) to provide a common structure for feature data to enable the use of Open Linked Data principles and technologies in Music Informatics. The Audio Feature Ontology is based on the analysis of existing tools and music informatics literature, which was instrumental in guiding the ontology engineering process. The ontology provides a descriptive framework for expressing different conceptualisations of the audio feature extraction domain and enables designing linked data formats for representing feature data. In this paper, we discuss important modelling decisions and introduce a harmonised ontology library consisting of modular interlinked ontologies that describe the different entities and activities involved in music creation, production and publishing. + + + Ontological representation of audio features + + Ontological representation of audio features + Semantic Web technologies + + + Ontological representation of audio features + + + + + music information retrieval + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 7fc9c74bf9b38cba0a03f49609524524e9a449e1 + + + Pierpaolo Tommasi + + + + + + + + + Pierpaolo Tommasi + + Pierpaolo Tommasi + + + + + + + + + + + + + + + + Jongmin Lee + + + + + + + + + + Jongmin Lee + c4c03cb475e7a67eada70ea6dea388ed7d5b64c6 + + Jongmin Lee + + + + + + + + + + + + + + + + + Linked Linguistic Data + + + + + biological pathways + Sembank + + Abstract Meaning Representation + + + biological pathways + + Sembank + Linked Linguistic Data + Abstract Meaning Representation + + + Abstract Meaning Representations as Linked Data + + AMR + Significant advances in Natural Language Processing (NLP) research are fostered when high-quality annotated corpora were provided for general use. In an effort to develop a sembank (i.e., an annotated corpus dedicated to capturing the semantic meaning of a large set of annotated sentences), NLP researchers have developed the Abstract Meaning Representation (AMR) formulation. Each AMR is a rooted, labeled graph that represents the semantics of a single sentence. Nodes in the core AMR graph represent concepts/entities (such as nouns, PropBank frames, etc.) and edges represent relations between concepts, (such a frame-specific arguments, roles, etc.). AMRs have been used to annotate corpora of classic books, newstext and the biomedical research literature. Research is progressing on creating automatic parsers to generate AMRs directly from textual input. In the work described here, we map the AMR representation to a linked data format (AMR-LD), adopting the ontological formulation of the underlying AMR faithfully. We describe the process of generating AMR-LD data from standard AMRs derived from biomedical research articles, including mapping named entities to well-known linked-data resources, such as Uniprot and PubChem, as well as an open-source software to convert AMR data to RDF. We describe the benefits of AMR-LD, including convenient analysis using SPARQL queries and ontology inferences, and embedding into the web of Linked Data. Finally, we discuss the possible impact of semantic web representations that are directly derived from natural language. + + Abstract Meaning Representations as Linked Data + Significant advances in Natural Language Processing (NLP) research are fostered when high-quality annotated corpora were provided for general use. In an effort to develop a sembank (i.e., an annotated corpus dedicated to capturing the semantic meaning of a large set of annotated sentences), NLP researchers have developed the Abstract Meaning Representation (AMR) formulation. Each AMR is a rooted, labeled graph that represents the semantics of a single sentence. Nodes in the core AMR graph represent concepts/entities (such as nouns, PropBank frames, etc.) and edges represent relations between concepts, (such a frame-specific arguments, roles, etc.). AMRs have been used to annotate corpora of classic books, newstext and the biomedical research literature. Research is progressing on creating automatic parsers to generate AMRs directly from textual input. In the work described here, we map the AMR representation to a linked data format (AMR-LD), adopting the ontological formulation of the underlying AMR faithfully. We describe the process of generating AMR-LD data from standard AMRs derived from biomedical research articles, including mapping named entities to well-known linked-data resources, such as Uniprot and PubChem, as well as an open-source software to convert AMR data to RDF. We describe the benefits of AMR-LD, including convenient analysis using SPARQL queries and ontology inferences, and embedding into the web of Linked Data. Finally, we discuss the possible impact of semantic web representations that are directly derived from natural language. + + + + + + AMR + Abstract Meaning Representations as Linked Data + + + + + + + + + + + + + + + + + + + + + + + + 4a5a465327d5cdbc52f1c12e13be99a556cde78d + + + + Sören Auer + Sören Auer + + Sören Auer + + + + + + + + + + + + + + + + + + + + + + + + British Geological Survey + British Geological Survey + + + British Geological Survey + + + + + + + + + Guohui Xiao + + + + + cf84eed127219398853a099856dcc752b11584d1 + + + + + + Guohui Xiao + Guohui Xiao + + + + + + + + + + + + + + + + + + + + + + ADAS Ontology + Map Converter + + + + Map Converter + + Sophisticated digital map is an essential resource for intelligent vehicles to localize and retrieve environment information. However, the open map source do not contain enough information for decision making during autonomous driving. Although comprehensive commercial map data can provide precise map knowledge, the data format is not in a machine-readable format. Therefore, we retrieve useful knowledge from high-precision commercial map and convert it into ontology based data to help intelligent vehicles perceive driving environment and make decisions at various traffic scenarios. Other than developing deci- sion making systems, the converted map data can be used as a golden standard for evaluating traffic sign detection, road mark detection, and automatic map construction. + + An Ontology based Map Converter for Intelligent Vehicles + + + Sophisticated digital map is an essential resource for intelligent vehicles to localize and retrieve environment information. However, the open map source do not contain enough information for decision making during autonomous driving. Although comprehensive commercial map data can provide precise map knowledge, the data format is not in a machine-readable format. Therefore, we retrieve useful knowledge from high-precision commercial map and convert it into ontology based data to help intelligent vehicles perceive driving environment and make decisions at various traffic scenarios. Other than developing deci- sion making systems, the converted map data can be used as a golden standard for evaluating traffic sign detection, road mark detection, and automatic map construction. + ADAS Ontology + An Ontology based Map Converter for Intelligent Vehicles + An Ontology based Map Converter for Intelligent Vehicles + Intelligent Vehicles + + Intelligent Vehicles + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Mohsen Taheriyan, Craig Knoblock, Pedro Szekely and José Luis Ambite + 2016-10-21T15:50:00 + + 2016-10-21T16:10:00 + Leveraging Linked Data to Discover Semantic Relations within Data Sources + 2016-10-21T16:10:00 + Leveraging Linked Data to Discover Semantic Relations within Data Sources + 2016-10-21T16:10:00 + 2016-10-21T16:10:00 + + + 2016-10-21T15:50:00 + + + + + + + + + + + + + Wei Emma Zhang + Wei Emma Zhang + + + + + + Wei Emma Zhang + + 72854449895b07f7cb8bd87798b33aacad619024 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + SOLS: A Semantically Enriched Learning System Using LOD Based Automatic Question Generation + + + + SOLS: A Semantically Enriched Learning System Using LOD Based Automatic Question Generation + + History Learning + + History Learning + + + Linked Open Data + Semantic Open Learning Space + + + + Question Generation + + Semantic Open Learning Space + SOLS: A Semantically Enriched Learning System Using LOD Based Automatic Question Generation + + Linked Open Data + + Question Generation + The purpose of this research is to use Linked Open Data (LOD) to support history learning on the Internet. The main issue to create meaningful content-dependent advice for learners is that the system requires an understanding of the learning domain. The learners use the Semantic Open Learning Space (SOLS) to create a machine-understandable concept map that represent their knowledge. SOLS is able to dynamically generate questions depending on each learner’s concept map. The system uses history domain ontologies to generate questions that aim to help learners develop their deep historical considerations. An evaluation showed that the learners using the question generation function could express deeper historical considerations after learning. + The purpose of this research is to use Linked Open Data (LOD) to support history learning on the Internet. The main issue to create meaningful content-dependent advice for learners is that the system requires an understanding of the learning domain. The learners use the Semantic Open Learning Space (SOLS) to create a machine-understandable concept map that represent their knowledge. SOLS is able to dynamically generate questions depending on each learner’s concept map. The system uses history domain ontologies to generate questions that aim to help learners develop their deep historical considerations. An evaluation showed that the learners using the question generation function could express deeper historical considerations after learning. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Martin Koch + + Martin Koch + 931600abfa52a6886c6bd1f7c0017d5ce669fc41 + Martin Koch + + + + + + + + + + + + + + + + Distributed Computing + + + + + + + RDF + Semantic Web + + Due to the growing need to timely process and derive valuable information and knowledge from data produced in the Semantic Web, RDF stream processing (RSP) has emerged as an important research domain. In this paper, we describe the design of an RSP engine that is built upon state of the art Big data frameworks, namely Apache Kafka and Apache Spark. Together, they support the implementation of a production-ready RSP engine that guarantees scalability, fault-tolerance, high availability, low latency and high throughput. Moreover, we highlight that the Spark framework considerably eases the implementation of complex applications requiring libraries as diverse as machine learning, graph processing, query processing and stream processing. + + + RDF + Distributed Computing + Due to the growing need to timely process and derive valuable information and knowledge from data produced in the Semantic Web, RDF stream processing (RSP) has emerged as an important research domain. In this paper, we describe the design of an RSP engine that is built upon state of the art Big data frameworks, namely Apache Kafka and Apache Spark. Together, they support the implementation of a production-ready RSP engine that guarantees scalability, fault-tolerance, high availability, low latency and high throughput. Moreover, we highlight that the Spark framework considerably eases the implementation of complex applications requiring libraries as diverse as machine learning, graph processing, query processing and stream processing. + + + Stream Processing + + + Apache Spark and Apache Kafka at the rescue of distributed RDF Stream Processing engines + + + Apache Spark and Apache Kafka at the rescue of distributed RDF Stream Processing engines + RSP + + + + + Apache Spark and Apache Kafka at the rescue of distributed RDF Stream Processing engines + Stream Processing + + Semantic Web + RSP + + + + + + + + + + + + Xiongnan Jin + + Xiongnan Jin + + + + Xiongnan Jin + + + + + + + + 3e563a2248b8a0a00f09e5e6563f6efbc97d5be9 + + + RDF aggregate view + + + + + + + + + + During the past couple of years, more and more data has been published as native RDF datasets. In this setup, both the size of the datasets and the need to process aggregate queries represent challenges for standard SPARQL query processing techniques. To overcome these limitations, materialized views can be created and used as a source of precomputed partial results during query processing. However, materialized view techniques, as proposed in relational databases, do not support RDF specifics, such as incompleteness and the need to support implicit (derived) information. Therefore, to overcome these challenges, this paper proposes MARVEL – the approach consisting of a view selection algorithm based on an RDF-specific cost model, a view definition syntax. and an algorithm for rewriting SPARQL queries using materialized RDF views. The experimental evaluation shows that the approach can improve query response time by more than an order of magnitude and is able to efficiently handle RDF specifics. + analytical query processing + SPARQL 1.1 + + analytical query processing + + SPARQL 1.1 + + + + Optimizing Aggregate SPARQL Queries using Materialized RDF Views + + + Optimizing Aggregate SPARQL Queries using Materialized RDF Views + + During the past couple of years, more and more data has been published as native RDF datasets. In this setup, both the size of the datasets and the need to process aggregate queries represent challenges for standard SPARQL query processing techniques. To overcome these limitations, materialized views can be created and used as a source of precomputed partial results during query processing. However, materialized view techniques, as proposed in relational databases, do not support RDF specifics, such as incompleteness and the need to support implicit (derived) information. Therefore, to overcome these challenges, this paper proposes MARVEL – the approach consisting of a view selection algorithm based on an RDF-specific cost model, a view definition syntax. and an algorithm for rewriting SPARQL queries using materialized RDF views. The experimental evaluation shows that the approach can improve query response time by more than an order of magnitude and is able to efficiently handle RDF specifics. + Optimizing Aggregate SPARQL Queries using Materialized RDF Views + + + RDF aggregate view + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Universität Bremen + + + Universität Bremen + + + + + Universität Bremen + + + + + + + + + + + + + + + + + + + + + + + + M. Tamer Ozsu + + + M. Tamer Ozsu + b80d152f81024189c05d26b1934503a29488f423 + M. Tamer Ozsu + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Heiko Paulheim + + + + + + + + Heiko Paulheim + + + Heiko Paulheim + + + + 8fe63debbbaf9daa6d4def3ebfdf5e0d6ac2d368 + + + + We describe a DBpedia extractor materializing as linked data the editing history of Wikipedia pages to support historical queries and indicators. + DBpedia + + Materializing the editing history of Wikipedia as linked Data in DBpedia + Wikipedia + + + Editing history + + We describe a DBpedia extractor materializing as linked data the editing history of Wikipedia pages to support historical queries and indicators. + Wikipedia + Materializing the editing history of Wikipedia as linked Data in DBpedia + + Materializing the editing history of Wikipedia as linked Data in DBpedia + + + + + + Editing history + + + DBpedia + + + + + + + + + + + + + + + + + + + + + + + + Daniel Faria + + c9c7a7da22700c89a63f7cc4c03d49e229730308 + + + + Daniel Faria + + + + + + + Daniel Faria + + + + + + + + + + + + + Dmitriy Zheleznyakov + + + + + + + + Dmitriy Zheleznyakov + + ae369e4d41a81ab112ac21fe44b96510e4ff7f7c + + + Dmitriy Zheleznyakov + + + + 2016-10-21T13:50:00 + Anthony Potter, Boris Motik, Yavor Nenov and Ian Horrocks + 2016-10-21T13:50:00 + 2016-10-21T13:50:00 + + 2016-10-21T13:30:00 + Distributed RDF Query Answering with Dynamic Data Exchange + + 2016-10-21T13:50:00 + Distributed RDF Query Answering with Dynamic Data Exchange + 2016-10-21T13:30:00 + + + + + + + + + + + + + + + + + + + + + + computational creativity + description logics + Can you imagine... a language for combinatorial creativity? + Can you imagine... a language for combinatorial creativity? + Combinatorial creativity combines existing concepts in a novel way in order to produce a new concept. For example, we can imag- ine jewelry that measures blood pressure. For this, we would combine the concept of jewelry with the capabilities of medical devices. Combinato- rial creativity can be used to develop new business ideas, to find plots for books or movies, or simply to disrupt conventional thinking. In this paper, we propose a formal language for combinatorial creativity, based on description logics. We show that our language can be used to model existing inventions and (to a limited degree) to generate new concepts. + + + + + Can you imagine... a language for combinatorial creativity? + + Combinatorial creativity combines existing concepts in a novel way in order to produce a new concept. For example, we can imag- ine jewelry that measures blood pressure. For this, we would combine the concept of jewelry with the capabilities of medical devices. Combinato- rial creativity can be used to develop new business ideas, to find plots for books or movies, or simply to disrupt conventional thinking. In this paper, we propose a formal language for combinatorial creativity, based on description logics. We show that our language can be used to model existing inventions and (to a limited degree) to generate new concepts. + + + + + + + + description logics + + ontologies + + + + + computational creativity + + + ontologies + + + + + + + + + + + + + + University of Electro-Communications + + University of Electro-Communications + + + + + University of Electro-Communications + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Dilshod Ibragimov + + + + + + + + + Dilshod Ibragimov + + 659d8ce7d16c4bb19fb0caab06c70b582964fadd + + Dilshod Ibragimov + + + + + + + + + + + + + + + + + + + + Materialization + SERVICE + Proactive Replication of Dynamic Linked Data for Scalable RDF Stream Processing + + + SPARQL Endpoint + RDF stream processing + In this paper, we propose a proactive replication of Linked Data for RDF Stream Processing. Our solution achieves a fast query processing by replicating subsets of remote RDF datasets before query evaluation. To construct the replication process effectively, we present an update estimation model to handle the changes in updates over time. With the update estimation model, we re-compose instances of the replication process in response to some problems, i.e., the outdated data. +Finally, we conduct exhaustive tests with a real-world dataset to verify +our solution. + + + Materialization + + Replication + + + + + Proactive Replication of Dynamic Linked Data for Scalable RDF Stream Processing + + + Proactive Replication of Dynamic Linked Data for Scalable RDF Stream Processing + + SERVICE + + + + + RDF stream processing + SPARQL Endpoint + + + + + In this paper, we propose a proactive replication of Linked Data for RDF Stream Processing. Our solution achieves a fast query processing by replicating subsets of remote RDF datasets before query evaluation. To construct the replication process effectively, we present an update estimation model to handle the changes in updates over time. With the update estimation model, we re-compose instances of the replication process in response to some problems, i.e., the outdated data. +Finally, we conduct exhaustive tests with a real-world dataset to verify +our solution. + Replication + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + National University of Ireland, Galway + + National University of Ireland, Galway + + + + + National University of Ireland, Galway + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Johannes Hoffart + Johannes Hoffart + + + Johannes Hoffart + + + + + + + bb43cd4c4209a7e7c368ba917ec884f8947ae595 + + + + + + + + + + + + University Politehnica of Bucharest + University Politehnica of Bucharest + + University Politehnica of Bucharest + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Hideaki Takeda + Hideaki Takeda + + + + b89c2a9de1b2b9c0b16d4d1e0bc33ca371f25e46 + Hideaki Takeda + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Valentina Ivanova + + + Valentina Ivanova + + + + + 7a322ceb995275257234fa3abcb633fb972069b0 + + + + Valentina Ivanova + + + + + + + + + + + + + + + + + + + + + + + + + + + + Université Paris-Est LIGM + Université Paris-Est LIGM + + Université Paris-Est LIGM + + + + + + 2016-10-19T14:00:00 + 2016-10-19T15:20:00 + 2016-10-19T15:20:00 + 2016-10-19T15:20:00 + 2016-10-19T15:20:00 + 2016-10-19T14:00:00 + Querying + Querying + + + + + + + + + + + + + + + + + + Ontologies Guidelines for Best Practice and a Process to Evaluate Existing Ontologies Mapping Tools and Algorithms + 2016-10-19T21:00:00 + Ian Harrow, Martin Romacker, Andrea Splendiani, Stefan Negru, Peter Woollard, Scott Markel, Yasmin Alam-Faruque, Martin Koch, Erfan Younesi, James Malone and Ernesto Jimenez-Ruiz + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + + Ontologies Guidelines for Best Practice and a Process to Evaluate Existing Ontologies Mapping Tools and Algorithms + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Lingkun Xue + + + + + + + Lingkun Xue + Lingkun Xue + + + + ed856da7f2c572db81f9a3f26ef0e15ca51b326e + + + + + + + Martin Serrano + 6c2abeed98b1778ba872f0ba32617cf7e6c58108 + + Martin Serrano + + + + + + Martin Serrano + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + James Malone + + + d515952bc1adc58048cad9b6b7c673ac7cb26cd1 + James Malone + + James Malone + + + + + + + + + + Université Côte d’Azur, CNRS, INRIA, I3S + + Université Côte d’Azur, CNRS, INRIA, I3S + + Université Côte d’Azur, CNRS, INRIA, I3S + + + + + + Quantified Self + Data integration + + Quantified Self + + + + + + The assessment of risk in medicine is a crucial task, depending on scientific knowledge derived by rigorous clinical studies regarding the (quantified) factors affecting biological changes, as well as on particular knowledge about the current status of a particular patient. Existing non-semantic risk prediction tools are typically based on hardcoded scientific knowledge, and only cover a very limited range of patient states. This makes them rapidly out of date, and limited in application, particularly for patients with co-morbidities (multiple co-occurring conditions). Semantic Web and Quantified Self technologies make it possible to address this task in a much more principled way, to maximise knowledge and data reuse and minimise maintenance requirements while enabling new and sophisticated applications involving widely-available biometric sensors. + +We present a framework for calculating clinical risk predictions for patients based on automatically-gathered biometric data. This framework relies on generic, reusable ontologies for representing clinical risk, and sensor readings, and reasoning to support the integration of data represented according to these ontologies. This integration makes novel use of Semantic Web technologies, and supports straightforward extension and maintenance by medical professionals. The framework is evaluated in terms of its predictions, extensibility and ease of use for domain experts. + + + + + eHealth + + + eHealth + + + + + + + + + Integrating medical scientific knowledge with the semantically Quantified Self + + Integrating medical scientific knowledge with the semantically Quantified Self + + Data integration + + + Integrating medical scientific knowledge with the semantically Quantified Self + + + The assessment of risk in medicine is a crucial task, depending on scientific knowledge derived by rigorous clinical studies regarding the (quantified) factors affecting biological changes, as well as on particular knowledge about the current status of a particular patient. Existing non-semantic risk prediction tools are typically based on hardcoded scientific knowledge, and only cover a very limited range of patient states. This makes them rapidly out of date, and limited in application, particularly for patients with co-morbidities (multiple co-occurring conditions). Semantic Web and Quantified Self technologies make it possible to address this task in a much more principled way, to maximise knowledge and data reuse and minimise maintenance requirements while enabling new and sophisticated applications involving widely-available biometric sensors. + +We present a framework for calculating clinical risk predictions for patients based on automatically-gathered biometric data. This framework relies on generic, reusable ontologies for representing clinical risk, and sensor readings, and reasoning to support the integration of data represented according to these ontologies. This integration makes novel use of Semantic Web technologies, and supports straightforward extension and maintenance by medical professionals. The framework is evaluated in terms of its predictions, extensibility and ease of use for domain experts. + + + + + + + + + + + + + + + + + + + + 22f4821ed44c568658a0b3b43d0429314a7b10f2 + + + Mariano Rodríguez Muro + Mariano Rodríguez Muro + Mariano Rodriguez-Muro + + + + + Mariano Rodríguez Muro + Mariano Rodriguez-Muro + + + + Mariano Rodriguez-Muro + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Ioana Manolescu + + + e9f137020be7d1c2124d61b9ea8d1e143b10e8c8 + + Ioana Manolescu + Ioana Manolescu + + + + + + + + + Yannis Kotidis + + + + + + Yannis Kotidis + + + + 8bb24690f6265676208438b68e9bf820dbbfd89b + Yannis Kotidis + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Norman Paton + + + Norman Paton + + + a5e01a63368506f602085790ffcc24042173c804 + + + Norman Paton + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Filip Minic + + + + + + + Filip Minic + Filip Minic + 4f8934e8318c2206a2ea97a5f943f7965842c155 + + + + + + + + + + + + + + + + + + + + + + + + + + Eva Fernandez + + + + + Eva Fernandez + + Eva Fernandez + + + + + 2286d3dadbeee380f6ad45b55e9f889fe859ef65 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 234fa98435383e615aae1ea20923df36499110e4 + + + + + Michele Ruta + + + + + + + + Michele Ruta + Michele Ruta + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Mohsen Taheriyan + + + + + Mohsen Taheriyan + + + + c4790d48328a9f0b42b299a6b6135c0cb9ce7c4e + Mohsen Taheriyan + + + + + + + Kyushu Institute of Technology + + + + + Kyushu Institute of Technology + + + + Kyushu Institute of Technology + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + RDF + + + + + Parallel computing + + Many existing approaches have been proposed to solve subgraph matching problem based on filter-and-refine strategy. The efficiency of those existing serial approaches relies on the computational capabilities of CPU. In this paper, we propose an RDF subgraph matching algorithm based on type-isomorphism using GPU since GPU has higher computational performance, more scalability, and lower price than CPU. Firstly, we present a concurrent matching model for type-isomorphism so that subgraph matching can be tackled in a parallel way. Secondly, we develop a parallel algorithm for capturing our proposed concurrent matching model and implement a prototype called IRSMG using GPU. Finally, we evaluate IRSMG on the benchmark datasets LUBM. The experiments show that IRSMG significantly outperforms the state-of-the-art algorithms on the CPU. + + Parallel computing + Many existing approaches have been proposed to solve subgraph matching problem based on filter-and-refine strategy. The efficiency of those existing serial approaches relies on the computational capabilities of CPU. In this paper, we propose an RDF subgraph matching algorithm based on type-isomorphism using GPU since GPU has higher computational performance, more scalability, and lower price than CPU. Firstly, we present a concurrent matching model for type-isomorphism so that subgraph matching can be tackled in a parallel way. Secondly, we develop a parallel algorithm for capturing our proposed concurrent matching model and implement a prototype called IRSMG using GPU. Finally, we evaluate IRSMG on the benchmark datasets LUBM. The experiments show that IRSMG significantly outperforms the state-of-the-art algorithms on the CPU. + Type-isomorphism + Type-isomorphism + IRSMG: Accelerating Inexact RDF Subgraph Matching on the GPU + IRSMG: Accelerating Inexact RDF Subgraph Matching on the GPU + Subgraph matching + RDF + + + + GPU + + + Subgraph matching + + GPU + + + + + IRSMG: Accelerating Inexact RDF Subgraph Matching on the GPU + + + + + + + + + + + + + + + + + + + + + + + + + d1822505990c7a62874033c9a382d43f94785308 + + + + + Boris Motik + Boris Motik + + + Boris Motik + + + + + + + + Norwegian University of Science and Technology + + + + + + Norwegian University of Science and Technology + Norwegian University of Science and Technology + + + + + + + + + + + + + + + + + + + + University of Waterloo + + + University of Waterloo + + + University of Waterloo + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Damian Bursztyn + + + Damian Bursztyn + + + + Damian Bursztyn + + + 2de4e71b32b777b0d9eff0492cd34ab355bc5edd + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Springer Nature + Springer Nature + + + + + + Springer Nature + + + + + + + + + + + + + + + + + + + + + e8a116fccc0d75e8b1797cade46ff5740e8b20ca + Wenrui Wu + + + + + + + + Wenrui Wu + + Wenrui Wu + + + + + + + + + + + + + Fuji Xerox Co., Ltd + Fuji Xerox Co., Ltd + + + + Fuji Xerox Co., Ltd + + + + + + + Michel Héon + + + + + Michel Héon + + + + + Michel Héon + 9543b2410d183477bd2c859301c944c89bad147d + + + + + + + + + + 0fb8b8ec11b797cb181f04f8c7534a39dd42812c + Erik Mannens + + Erik Mannens + + Erik Mannens + + + + + + + + + + + + + + + + + + + + + + + + Norio Kobayashi + + + 676e9c8a337fe5ec7513036db9e3656eca935258 + + + Norio Kobayashi + + + + + Norio Kobayashi + + + + + Khalil Drira + + ee480c269a9e8628c54b709d359284e19d031fb4 + + + Khalil Drira + + + + + + + + Khalil Drira + + + + Alo Allik + + + + + Alo Allik + + + + Alo Allik + + 004932ce9922e1329c79f54f34944683e8f67189 + + + + + + + + + + + + + + + + + + + + + + Prediction + Prediction + + + + + + + Ontology Reasoning + Energy + Energy + + The unprecedented growth in mobile devices, combined with advances in Semantic Web Technologies, has given birth to opportunities for more intelligent systems on-the-go. Limited resources of mobile devices, especially energy, demand approaches to make mobile reasoning more applicable. While Mobile-Cloud integration is a promising method for harnessing the power of semantic technologies in the mobile infrastructure, it is an open question on deciding when to reason with ontologies on mobile devices. In this paper, we introduce an energy consumption prediction mechanism for ontology reasoning on mobile devices, which allows analysis of feasibility of ontology reasoning on mobile devices in terms of energy consumption. The prediction models contributes to mobile-cloud integration and helps improve further development of ontology and semantic solutions in general. + Semantic Web + + Ontology Reasoning + + + Predicting Energy Consumption of Ontology Reasoning over Mobile Devices + + + + Predicting Energy Consumption of Ontology Reasoning over Mobile Devices + Random Forests + The unprecedented growth in mobile devices, combined with advances in Semantic Web Technologies, has given birth to opportunities for more intelligent systems on-the-go. Limited resources of mobile devices, especially energy, demand approaches to make mobile reasoning more applicable. While Mobile-Cloud integration is a promising method for harnessing the power of semantic technologies in the mobile infrastructure, it is an open question on deciding when to reason with ontologies on mobile devices. In this paper, we introduce an energy consumption prediction mechanism for ontology reasoning on mobile devices, which allows analysis of feasibility of ontology reasoning on mobile devices in terms of energy consumption. The prediction models contributes to mobile-cloud integration and helps improve further development of ontology and semantic solutions in general. + + Random Forests + Mobile Computing + Semantic Web + + + Predicting Energy Consumption of Ontology Reasoning over Mobile Devices + Mobile Computing + + + + + + bb18daf2c3096e3c06a42caad9d2fe91a4de88ba + + + + + + + + Carlos Rojas + Carlos Rojas + + + + Carlos Rojas + + + + + + + + + + + + + + + + + + + + + + + Matias Junemann + Matias Junemann + + + + + + + + + 7ec0278bcf2e5b6612e93d70e8b059ee3d0bc99e + Matias Junemann + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Olaf Hartig + + + + 654ca904f7c4dfa438e672f559b02ca8c6aaae22 + Olaf Hartig + Olaf Hartig + + + + + + + Qingliang Miao + + eef81157d6c10276a138252a2fd96103174d8080 + + + + + + + Qingliang Miao + + Qingliang Miao + + + + + + + + + + + + + + + 2016-10-19T11:00:00 + 2016-10-19T12:20:00 + 2016-10-19T12:20:00 + 2016-10-19T12:20:00 + Interaction + 2016-10-19T11:00:00 + 2016-10-19T12:20:00 + Interaction + + + + 2016-10-19T12:00:00 + Affective Graphs: The Visual Appeal of Linked Data + 2016-10-19T12:20:00 + Suvodeep Mazumdar, Daniela Petrelli, Khadija Elbedweihy, Vitaveska Lanfranchi and Fabio Ciravegna + 2016-10-19T12:20:00 + + + 2016-10-19T12:20:00 + 2016-10-19T12:00:00 + Affective Graphs: The Visual Appeal of Linked Data + 2016-10-19T12:20:00 + + + + Ontologies for Knowledge Graphs: Breaking the Rules + tuple-generating dependencies + + + Ontologies for Knowledge Graphs: Breaking the Rules + Datalog + + Ontologies for Knowledge Graphs: Breaking the Rules + Datalog + finite expansion set + + finite expansion set + functional dependency + bounded treewidth set + + bounded treewidth set + + finite unification set + + + finite unification set + + tuple-generating dependencies + Large-scale knowledge graphs (KGs) abound in industry and academia. +They provide a unified format for integrating information sources, +aided by standards such as, e.g., the W3C RDB to RDF Mapping Language. +Meaningful semantic integration, however, is much harder than +syntactic alignment. Ontologies could be an interoperable and +declarative solution to this task. At a closer look, however, we find +that popular ontology languages, such as OWL and Datalog, cannot +express even the most basic relationships on the normalised data +format of KGs. Existential rules are more powerful, but may make +reasoning undecidable, and normalising them to suit KGs can destroy +syntactic restrictions that ensure decidability and low complexity. We +study this issue for several classes of existential rules and derive more +general syntactic criteria to recognise well-behaved rule-based ontologies +over knowledge graphs. + functional dependency + + + + Large-scale knowledge graphs (KGs) abound in industry and academia. +They provide a unified format for integrating information sources, +aided by standards such as, e.g., the W3C RDB to RDF Mapping Language. +Meaningful semantic integration, however, is much harder than +syntactic alignment. Ontologies could be an interoperable and +declarative solution to this task. At a closer look, however, we find +that popular ontology languages, such as OWL and Datalog, cannot +express even the most basic relationships on the normalised data +format of KGs. Existential rules are more powerful, but may make +reasoning undecidable, and normalising them to suit KGs can destroy +syntactic restrictions that ensure decidability and low complexity. We +study this issue for several classes of existential rules and derive more +general syntactic criteria to recognise well-behaved rule-based ontologies +over knowledge graphs. + existential rules + + + existential rules + + + + 1ee386235c89959195075bc4944d5c68f8265f96 + + + + + Emanuele Della Valle + + + + + + + Emanuele Della Valle + Emanuele Della Valle + + + 2016-10-19T18:00:00 + Visualizing Semantic Table Annotations with TableMiner+ + Suvodeep Mazumdar and Ziqi Zhang + Visualizing Semantic Table Annotations with TableMiner+ + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-21T10:50:00 + RDF2Vec: RDF Graph Embeddings for Data Mining + RDF2Vec: RDF Graph Embeddings for Data Mining + + 2016-10-21T11:10:00 + 2016-10-21T10:50:00 + Petar Ristoski and Heiko Paulheim + 2016-10-21T11:10:00 + 2016-10-21T11:10:00 + 2016-10-21T11:10:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Institute of Information Systems, University of Lübeck + + Institute of Information Systems, University of Lübeck + Institute of Information Systems, University of Lübeck + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Open Knowledge Belgium + + Open Knowledge Belgium + + + + + + Open Knowledge Belgium + + + + + + + + + + + + + + + + + + + + + Fabio Ciravegna + Fabio Ciravegna + + + + Fabio Ciravegna + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Linked Stream Data + Continuous queries + + + + Linked Stream Data + Operator-aware approach for boosting performance in RDF stream processing + Operator-aware approach for boosting performance in RDF stream processing + Linked Data + + Semantic Web + To enable efficiency in stream processing, the evaluation of a query is usually performed over bounded parts of (potentially) unbounded streams, i.e., processing windows “slide” over the streams. To avoid inefficient re-evaluations of already evaluated parts of a stream in respect to a query, incremental evaluation strategies are applied, i.e., the query results are obtained incrementally from the result set of the preceding processing state without having to re-evaluate all input buffers. This method is highly efficient but it comes at the cost of having to maintain processing state, which is not trivial, and may defeat performance advantages of the incremental evaluation strategy. In the context of RDF streams the problem is further aggravated by the hard-to-predict evolution of the structure of RDF graphs over time and the application of sub-optimal implementation approaches, e.g., using relational technologies for storing data and processing states which incur significant performance drawbacks for graph-based query patterns. To address these performance problems, this paper proposes a set of novel operator-aware data structures coupled with incremental evaluation algorithms which outperform the counterparts of relational stream processing systems. This claim is demonstrated through extensive experimental results on both simulated and real datasets. + + + Continuous queries + To enable efficiency in stream processing, the evaluation of a query is usually performed over bounded parts of (potentially) unbounded streams, i.e., processing windows “slide” over the streams. To avoid inefficient re-evaluations of already evaluated parts of a stream in respect to a query, incremental evaluation strategies are applied, i.e., the query results are obtained incrementally from the result set of the preceding processing state without having to re-evaluate all input buffers. This method is highly efficient but it comes at the cost of having to maintain processing state, which is not trivial, and may defeat performance advantages of the incremental evaluation strategy. In the context of RDF streams the problem is further aggravated by the hard-to-predict evolution of the structure of RDF graphs over time and the application of sub-optimal implementation approaches, e.g., using relational technologies for storing data and processing states which incur significant performance drawbacks for graph-based query patterns. To address these performance problems, this paper proposes a set of novel operator-aware data structures coupled with incremental evaluation algorithms which outperform the counterparts of relational stream processing systems. This claim is demonstrated through extensive experimental results on both simulated and real datasets. + + stream processing + Semantic Web + + Operator-aware approach for boosting performance in RDF stream processing + Linked Data + + stream processing + + + + + + + + + + + + + + + + + + bd5f2180caec276f3e26c02ac360fe6b711f5a11 + + Harald Sack + + + + Harald Sack + Harald Sack + + + + + + + + + + + + + + Monash University + + Monash University + + + + + + Monash University + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 44c9bbaf2d7e02ca4e27b3c5e53c92fe9136949e + Fernando Florenzano + + + + + + Fernando Florenzano + Fernando Florenzano + + + + + + + + + + + + + + + + + + f59443a95f72fede91ee05c4b766aeef3fc4e4a1 + Terue Takatsuki + + + + + + Terue Takatsuki + + Terue Takatsuki + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Max-Planck Institute for Informatics + Max Planck Institute for Informatics + Max Planck Institute for Informatics + + + Max Planck Institute for Informatics + + Max-Planck Institute for Informatics + Max-Planck Institute for Informatics + + + + 2016-10-20T13:30:00 + 2016-10-20T14:50:00 + 2016-10-20T14:50:00 + 2016-10-20T14:50:00 + 2016-10-20T13:30:00 + Streams + 2016-10-20T14:50:00 + Streams + + + + + + + + + + + + + + + + + + + + + 2016-10-19T11:00:00 + 2016-10-19T12:20:00 + 2016-10-19T12:20:00 + 2016-10-19T12:20:00 + 2016-10-19T12:20:00 + Knowledge Representation + Knowledge Representation + 2016-10-19T11:00:00 + + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + Fernando Florenzano, Denis Parra, Juan L. Reutter and Freddie Venegas + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + An interactive visualisation for RDF data + An interactive visualisation for RDF data + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Marta Sabou + + + + + Marta Sabou + Marta Sabou + + + + Sebastian Hellmann + + + + + 3b9b030bfa83b9c747d525b7943829d3abc2813b + + + + + + Sebastian Hellmann + + Sebastian Hellmann + + + + + + + + + + + Roman Kontchakov + + + Roman Kontchakov + + + Roman Kontchakov + + + + + + + + + + + + + + + + + + + + + + + + + Linked Open Data + MusicWeb: music discovery with open linked semantic metadata + + This demo presents MusicWeb, a novel platform for linking music artists within a web-based application for discovering associations between them. MusicWeb provides a browsing experience using connections that are either extra-musical or tangential to music, such as the artists' political affiliation or social influence, or intra-musical, such as the artists' main instrument or most favoured musical key. The platform integrates open linked semantic metadata from various Semantic Web, music recommendation and social media data sources. The connections are further supplemented by thematic analysis of journal articles, blog posts and content-based similarity measures focussing on high level musical categories. + + Semantic Web + + + music metadata + + + + + + MusicWeb: music discovery with open linked semantic metadata + + This demo presents MusicWeb, a novel platform for linking music artists within a web-based application for discovering associations between them. MusicWeb provides a browsing experience using connections that are either extra-musical or tangential to music, such as the artists' political affiliation or social influence, or intra-musical, such as the artists' main instrument or most favoured musical key. The platform integrates open linked semantic metadata from various Semantic Web, music recommendation and social media data sources. The connections are further supplemented by thematic analysis of journal articles, blog posts and content-based similarity measures focussing on high level musical categories. + + + Semantic Web + + MusicWeb: music discovery with open linked semantic metadata + + Linked Open Data + semantic audio analysis + + + music metadata + music information retrieval + semantic audio analysis + + music information retrieval + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + OWLAx: A Protege Plugin to Support Ontology Axiomatization through Diagramming + 2016-10-19T18:00:00 + OWLAx: A Protege Plugin to Support Ontology Axiomatization through Diagramming + 2016-10-19T21:00:00 + + Md. Kamruzzaman Sarker, Adila A. Krisnadhi and Pascal Hitzler + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + 89f88f590a47d82f90e022147efdcd85b1e85f21 + Atsuko Yamaguchi + + Atsuko Yamaguchi + + + Atsuko Yamaguchi + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + protege plugin + + linked data mining + + + + We present a Protege plugin implementing Swift Linked Data Miner, an anytime algorithm for extending an ontology with new subsumptions. The algorithm mines an RDF graph accessible via a SPARQL endpoint and proposes new SubClassOf axioms to the user. + We present a Protege plugin implementing Swift Linked Data Miner, an anytime algorithm for extending an ontology with new subsumptions. The algorithm mines an RDF graph accessible via a SPARQL endpoint and proposes new SubClassOf axioms to the user. + swift linked data miner + A Protege Plugin with Swift Linked Data Miner + + A Protege Plugin with Swift Linked Data Miner + A Protege Plugin with Swift Linked Data Miner + + + + + swift linked data miner + + + linked data mining + protege plugin + + + + + + + + Gentile + + + + Anna Lisa + Anna Lisa Gentile + + + + afc0b2d9d17f63457688ebab6a66bf2913774795 + Anna Lisa Gentile + + + Anna Lisa Gentile + + + + + + + + + + + + + + + + + + + Olivier Curé + + Olivier Curé + + + + + 33cd62b318bd504dde9eea1cdcf0095394e4ba40 + Olivier Curé + + + + + + + Chiara Ghidini + Chiara Ghidini + + + + + + + + Chiara Ghidini + + + + + + + + + + + + + + + + + + Duke University + + Duke University + + + Duke University + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T14:40:00 + Melisachew Wudage Chekol and Giuseppe Pirrò + 2016-10-19T14:20:00 + 2016-10-19T14:20:00 + + 2016-10-19T14:40:00 + Containment of Expressive SPARQL Navigational Queries + 2016-10-19T14:40:00 + + + Containment of Expressive SPARQL Navigational Queries + 2016-10-19T14:40:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Fabio Vitali + + feb8c8e23e4e4d60fc409a35349430509cad92f1 + + Fabio Vitali + + + Fabio Vitali + + + + + + + + + + David Martin + + + + + + + David Martin + + + + aae4343bbb16e012e0912037ff410a13ca36a0be + David Martin + + + + + + + + + + Andrea Maurino + + + + + + Andrea Maurino + + + + + + Andrea Maurino + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + Thi-Nhu Nguyen, Hideaki Takeda, Khai Nguyen, Ryutaro Ichise and Tuan-Dung Cao + Type Prediction for Entities in DBpedia by Aggregating Multilingual Resources + Type Prediction for Entities in DBpedia by Aggregating Multilingual Resources + + 2016-10-19T18:00:00 + + + + + 795f113ba89d73d202c0f85b54b33d95673d9038 + + + Melisachew Wudage Chekol + + + + Melisachew Wudage Chekol + Melisachew Wudage Chekol + + + + + + + Australian Bureau of Statistics / Australian National University + + + Australian Bureau of Statistics / Australian National University + + + + + Australian Bureau of Statistics / Australian National University + + + + + + + + + + + + + + + + + + + 2016-10-21T11:10:00 + Abstract Meaning Representations as Linked Data + + 2016-10-21T11:10:00 + + 2016-10-21T11:10:00 + 2016-10-21T11:10:00 + 2016-10-21T10:50:00 + 2016-10-21T10:50:00 + Gully Burns, Ulf Hermjakob and José Luis Ambite + Abstract Meaning Representations as Linked Data + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Takeshi Morita + Takeshi Morita + + + Takeshi Morita + 1f96d51ea567d4068288c14a41f9e8fbc6892693 + + + + + + + + + Saverio Ieva + + + + + 534d7e9edbed40cf8679d963a295792201da70d7 + Saverio Ieva + Saverio Ieva + + + + + + + + + + + + + + + + + 2016-10-20T13:50:00 + Structuring Linked Data Search Results Using Probabilistic Soft Logic + 2016-10-20T14:10:00 + Duhai Alshukaili, Alvaro A. A. Fernandes and Norman Paton + Structuring Linked Data Search Results Using Probabilistic Soft Logic + + 2016-10-20T14:10:00 + + + 2016-10-20T14:10:00 + 2016-10-20T13:50:00 + 2016-10-20T14:10:00 + + + + + + + + + + Christof Mahieu + + Christof Mahieu + + + + + ad4c342fb603f96a6bcd281bf49efae4ad7d9873 + + + Christof Mahieu + + + + + + Ritsumeikan University + + + + + + Ritsumeikan University + + + + Ritsumeikan University + + + + + + + + Thomas Vanhove + b28efc640adbf8d996b9605f01e7699f68c9847d + + + + + + Thomas Vanhove + + Thomas Vanhove + + + + + + + + + + + + + + Software Engineering + Linked data + Enabling combined software and data engineering at Web-scale: The ALIGNED suite of ontologies + + + Software Engineering + Enabling combined software and data engineering at Web-scale: The ALIGNED suite of ontologies + + + + + + Data Engineering + + + + Linked data + + Unified Governance + + Ontologies + + Effective, collaborative integration of software services and big data to develop insightful analytics, for Web-scale systems, is now a crucial techno-economic challenge. This requires new combined data and software engineering processes and tools. Semantic metadata standards such as RDFS and OWL, and linked data principles, provide a technical grounding for such integrated systems given an appropriate model of the domain. In this paper we introduce the ALIGNED suite of ontologies or vocabularies specifically designed to model the information exchange needs of combined software and data engineering processes. The models have been deployed to enable: tool-chain integration, such as the exchange of data quality reports; cross-domain communication, such as interlinked data and software unit testing; mediation of the system design process through the capture of design intents and as a source of context for model-driven software engineering processes. These ontologies are deployed in trial live web-scale, data-intensive system development environments in both the commercial and academic domains. We exemplify the usage of the suite on a complex collaborative software and data engineering scenario from the legal information system domain. + + + + Enabling combined software and data engineering at Web-scale: The ALIGNED suite of ontologies + + Effective, collaborative integration of software services and big data to develop insightful analytics, for Web-scale systems, is now a crucial techno-economic challenge. This requires new combined data and software engineering processes and tools. Semantic metadata standards such as RDFS and OWL, and linked data principles, provide a technical grounding for such integrated systems given an appropriate model of the domain. In this paper we introduce the ALIGNED suite of ontologies or vocabularies specifically designed to model the information exchange needs of combined software and data engineering processes. The models have been deployed to enable: tool-chain integration, such as the exchange of data quality reports; cross-domain communication, such as interlinked data and software unit testing; mediation of the system design process through the capture of design intents and as a source of context for model-driven software engineering processes. These ontologies are deployed in trial live web-scale, data-intensive system development environments in both the commercial and academic domains. We exemplify the usage of the suite on a complex collaborative software and data engineering scenario from the legal information system domain. + Ontologies + + Data Engineering + + + Unified Governance + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Osaka Prefecture University + Osaka Prefecture University + + Osaka Prefecture University + + + + + + Andrea Giovanni Nuzzolese, Anna Lisa Gentile, Valentina Presutti and Aldo Gangemi + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + Generating Conference Linked Open Data in One Click + Generating Conference Linked Open Data in One Click + + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + + + + Danh Le-Phuoc + + + + Danh Le-Phuoc + + + + + + Danh Le-Phuoc + + + + + + + + + + + + + + + + + Aesthetics + + + + + + Visual Analytics + + + + + + + Information Visualisation + Aesthetics + Affective Graphs: The Visual Appeal of Linked Data + + The essence and value of Linked Data lies in the ability of humans and machines to query, access and reason upon highly structured and formalised data. Ontology structures provide an unambiguous description of the structure and content of data. While a multitude of software applications and visualization systems have been developed over the past years for Linked Data, there is still a significant gap that exists between applications that consume Linked Data and interfaces that have been designed with significant focus on aesthetics. Though the importance of aesthetics in affecting the usability, effectiveness and acceptability of user interfaces have long been recognised, little or no explicit attention has been paid to the aesthetics of Linked Data applications. In this paper, we introduce a formalised approach to developing aesthetically pleasing semantic web interfaces by following aesthetic principles and guidelines identified from literature. We apply such principles to design and develop a generic approach of using visualizations to support exploration of Linked Data, in an interface that is pleasing to users. This provides users with means to browse ontology structures, enriched with statistics of the underlying data, facilitating exploratory activities and enabling visual query for highly precise information needs. We evaluated our approach in three ways: an initial objective evaluation comparing our approach with other well-known interfaces for the semantic web and two user evaluations with semantic web researchers. + + Affective Graphs: The Visual Appeal of Linked Data + + + Affective Graphs: The Visual Appeal of Linked Data + The essence and value of Linked Data lies in the ability of humans and machines to query, access and reason upon highly structured and formalised data. Ontology structures provide an unambiguous description of the structure and content of data. While a multitude of software applications and visualization systems have been developed over the past years for Linked Data, there is still a significant gap that exists between applications that consume Linked Data and interfaces that have been designed with significant focus on aesthetics. Though the importance of aesthetics in affecting the usability, effectiveness and acceptability of user interfaces have long been recognised, little or no explicit attention has been paid to the aesthetics of Linked Data applications. In this paper, we introduce a formalised approach to developing aesthetically pleasing semantic web interfaces by following aesthetic principles and guidelines identified from literature. We apply such principles to design and develop a generic approach of using visualizations to support exploration of Linked Data, in an interface that is pleasing to users. This provides users with means to browse ontology structures, enriched with statistics of the underlying data, facilitating exploratory activities and enabling visual query for highly precise information needs. We evaluated our approach in three ways: an initial objective evaluation comparing our approach with other well-known interfaces for the semantic web and two user evaluations with semantic web researchers. + + + + Visual Analytics + Semantic Web + Linked Data + + Semantic Web + Linked Data + Information Visualisation + + + + + + + + + + + + + + + + + + + + + + Anastasia Dimou + + + Anastasia Dimou + + Anastasia Dimou + 6ae5a96a9885e213c64efd5848f4dd5b6dd1c16a + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ddae780bd6b450041ec6eafd76849aebeecbf987 + Catia Pesquita + + + Catia Pesquita + Catia Pesquita + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Visual Exploration + Exploratory search + + + + Knowledge exploration + This paper presents a novel approach to Linked Data exploration that uses Encyclopedic Knowledge Patterns (EKPs) as relevance criteria for selecting, organising, and visualising knowledge. EKP are discovered by mining the linking structure of Wikipedia and evaluated by means of a user-based study, which shows that they are cognitively sound as models for building entity summarisations. We implemented a tool named Aemoo that supports EKP-driven knowledge exploration and integrates data coming from heterogeneous resources, namely static and dynamic knowledge as well as text and Linked Data. Aemoo is evaluated by means of controlled, task-driven user experiments in order to assess its usability, and ability to provide relevant and serendipitous information as compared to two existing tools: Google and RelFinder. + + Visual Exploration + Aemoo: Linked Data exploration based on Knowledge Patterns + + + This paper presents a novel approach to Linked Data exploration that uses Encyclopedic Knowledge Patterns (EKPs) as relevance criteria for selecting, organising, and visualising knowledge. EKP are discovered by mining the linking structure of Wikipedia and evaluated by means of a user-based study, which shows that they are cognitively sound as models for building entity summarisations. We implemented a tool named Aemoo that supports EKP-driven knowledge exploration and integrates data coming from heterogeneous resources, namely static and dynamic knowledge as well as text and Linked Data. Aemoo is evaluated by means of controlled, task-driven user experiments in order to assess its usability, and ability to provide relevant and serendipitous information as compared to two existing tools: Google and RelFinder. + + Knowledge Patterns + + + Knowledge Patterns + Analysis of Linked Data + + Aemoo: Linked Data exploration based on Knowledge Patterns + + + + Analysis of Linked Data + Aemoo: Linked Data exploration based on Knowledge Patterns + Exploratory search + + + + + Knowledge exploration + + + Rule-Based Reasoning using State Space Search + 2016-10-19T21:00:00 + + + Rule-Based Reasoning using State Space Search + 2016-10-19T21:00:00 + Dieter De Paepe, Ruben Verborgh, Erik Mannens and Rik Van de Walle + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + + + National Technical University of Athens + + + + National Technical University of Athens + + + + + National Technical University of Athens + + + + + + + + + + + + + VISTA GmbH + VISTA GmbH + + VISTA GmbH + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Poznan University of Technology + + Poznan University of Technology + + + + + + Poznan University of Technology + + + + + + + + + Youngkyoung Ham + + + + + + Youngkyoung Ham + + bbd09cfc7eff31c89fc07d652f64a0670beec621 + + + Youngkyoung Ham + + + + + + + + + + + + + + OWL + + + Ontologies + + OBDA + + Databases + SPARQL + Ontologies + Ontop: Answering SPARQL queries over relational databases + + Ontop: Answering SPARQL queries over relational databases + Databases + + + OBDA + + + We present Ontop, an open-source Ontology-Based Data Access (OBDA) system that allows for querying relational data sources through a conceptual representation of the domain of interest, provided in terms of an ontology, to which the data sources are mapped. Key features of Ontop are its solid theoretical foundations, a virtual approach to OBDA, which avoids materializing triples and is implemented through the query rewriting technique, extensive optimizations exploiting all elements of the OBDA architecture, its compliance to all relevant W3C recommendations (including SPARQL queries, R2RML mappings, and OWL 2 QL and RDFS ontologies), and its support for all major relational databases. + + + + + + Ontop + + SPARQL + + + OWL + Ontop + + + RDF + + + R2RML + We present Ontop, an open-source Ontology-Based Data Access (OBDA) system that allows for querying relational data sources through a conceptual representation of the domain of interest, provided in terms of an ontology, to which the data sources are mapped. Key features of Ontop are its solid theoretical foundations, a virtual approach to OBDA, which avoids materializing triples and is implemented through the query rewriting technique, extensive optimizations exploiting all elements of the OBDA architecture, its compliance to all relevant W3C recommendations (including SPARQL queries, R2RML mappings, and OWL 2 QL and RDFS ontologies), and its support for all major relational databases. + + + + + R2RML + + Ontop: Answering SPARQL queries over relational databases + + RDF + + + + a241f0cbbef38075d6656af5d98a712b95b4aa00 + Hiroshi Masuya + + + + + + Hiroshi Masuya + + + + Hiroshi Masuya + + + + + + + + + + + + + + + + + + + + Charlotte Jewell + 8be120b54dd3dac2bdce06124b1e30353c832c26 + + + + + + + Charlotte Jewell + + Charlotte Jewell + + + + + + + + + + + + + FarolApp: Live Linked Data on Light Pollution + evolution + FarolApp is a mobile web application that aims to increase the awareness of light pollution by generating illustrative maps for cities and by encouraging citizens and public administrations to provide street light information in an ubiquitous and interactive way using online street views. In addition to the maps, FarolApp builds on existing sources to generate and provide up-to-date data by crowdsourced user annotations. Generated data is available as dereferenceable Linked Data resources in several RDF formats and via a queryable SPARQL endpoint. The demo presented in this paper illustrates how FarolApp maintains continuously evolving Linked Data that reflect the current status of city street light infrastructures and use that data to generate light pollution maps. + linked data + + + + + + + light pollution + + + light pollution + crowdsourcing + evolution + FarolApp: Live Linked Data on Light Pollution + + FarolApp is a mobile web application that aims to increase the awareness of light pollution by generating illustrative maps for cities and by encouraging citizens and public administrations to provide street light information in an ubiquitous and interactive way using online street views. In addition to the maps, FarolApp builds on existing sources to generate and provide up-to-date data by crowdsourced user annotations. Generated data is available as dereferenceable Linked Data resources in several RDF formats and via a queryable SPARQL endpoint. The demo presented in this paper illustrates how FarolApp maintains continuously evolving Linked Data that reflect the current status of city street light infrastructures and use that data to generate light pollution maps. + + + + + crowdsourcing + + linked data + + + FarolApp: Live Linked Data on Light Pollution + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Université Paris 13 & STLab (CNR-ISTC) + + + Université Paris 13 & STLab (CNR-ISTC) + + + + Université Paris 13 & STLab (CNR-ISTC) + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + Enhancing Rule-based OWL Reasoning on Spark + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Enhancing Rule-based OWL Reasoning on Spark + 2016-10-19T18:00:00 + + Zhihui Liu, Xiaowang Zhang and Zhiyong Feng + + + + + Zhiyong Feng + + + + Zhiyong Feng + Zhiyong Feng + + + de8b98e10e9b1cf660322f919ca0f3e2f75ae736 + + + + + + + + Yavor Nenov + Yavor Nenov + + + + Yavor Nenov + + + + + + + + b4e08b0ac920df0d9f2a84703c4cd08f802fb42a + + + + + Linked Open Vocabularies + + Linked Open Vocabularies (LOV): a gateway to reusable semantic vocabularies on the Web + + LOV + + + + + Linked Open Vocabularies (LOV): a gateway to reusable semantic vocabularies on the Web + Vocabulary catalogue + + + LOV + + + + One of the major barriers to the deployment of Linked Data is the difficulty that data publishers have in determining which vocabularies to use to describe the semantics of data. This system report describes Linked Open Vocabularies (LOV), a high quality catalogue of reusable vocabularies for the description of data on the Web. The LOV initiative gathers and makes visible indicators that have not been previously harvested such as the interconnections between vocabularies, version history along with past and current referent (individual or organization). The report details the various components of the system along with some innovations such as the introduction of a property-level boost in the vocabulary search scoring which takes into account the property's type (e.g rdfs:label, dc:comment) associated with a matching literal value. By providing an extensive range of data access methods (full-text search, SPARQL endpoint, API, data dump or UI), the project aims at facilitating the reuse of well-documented vocabularies in the Linked Data ecosystem. The adoption of LOV by many applications and methods shows the importance of such a set of vocabularies and related features for the ontology design activity and the publication of data on the Web. + + Linked Data + + Linked Data + Ontology search + + Ontology search + Vocabulary catalogue + One of the major barriers to the deployment of Linked Data is the difficulty that data publishers have in determining which vocabularies to use to describe the semantics of data. This system report describes Linked Open Vocabularies (LOV), a high quality catalogue of reusable vocabularies for the description of data on the Web. The LOV initiative gathers and makes visible indicators that have not been previously harvested such as the interconnections between vocabularies, version history along with past and current referent (individual or organization). The report details the various components of the system along with some innovations such as the introduction of a property-level boost in the vocabulary search scoring which takes into account the property's type (e.g rdfs:label, dc:comment) associated with a matching literal value. By providing an extensive range of data access methods (full-text search, SPARQL endpoint, API, data dump or UI), the project aims at facilitating the reuse of well-documented vocabularies in the Linked Data ecosystem. The adoption of LOV by many applications and methods shows the importance of such a set of vocabularies and related features for the ontology design activity and the publication of data on the Web. + + + + Linked Open Vocabularies + Linked Open Vocabularies (LOV): a gateway to reusable semantic vocabularies on the Web + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Dieter De Paepe + + + + + + + e2216c32b8b5be98e0e4e6a5632c9a869e684107 + + Dieter De Paepe + + Dieter De Paepe + + + + Shen Gao + + + + + Shen Gao + + Shen Gao + + + + + + 3b33725c7633555310bb4084e06053bd6d2be33e + + + + + RDF + SPARQL federation + + + A Fine-Grained Evaluation of SPARQL Endpoint Federation Systems + + + + + + + + The Web of Data has grown enormously over the last years. Currently, it comprises a large compendium of interlinked and distributed datasets from multiple domains. Running complex queries on this compendium often requires accessing data from different endpoints within one query. The abundance of datasets and the need for running complex query has thus motivated a considerable body of work on SPARQL query federation systems, the dedicated means to access data distributed over the Web of Data. However, the granularity of previous evaluations of such systems has not allowed deriving of insights concerning their behavior in different steps involved during federated query processing. In this work, we perform extensive experiments to compare state-of-the-art SPARQL endpoint federation systems using the comprehensive performance evaluation framework FedBench. In addition to considering the tradition query runtime as an evaluation criterion, we extend the scope of our performance evaluation by considering criteria, which have not been paid much attention to in previous studies. In particular, we consider the number of sources selected, the total number of SPARQL ASK requests used, the completeness of answers as well as the source selection time. Yet, we show that they have a significant impact on the overall query runtime of existing systems. Moreover, we extend FedBench to mirror a highly distributed data environment and assess the behavior of existing systems by using the same performance criteria. As the result we provide a detailed analysis of the experimental outcomes that reveal novel insights for improving current and future SPARQL federation systems. + The Web of Data has grown enormously over the last years. Currently, it comprises a large compendium of interlinked and distributed datasets from multiple domains. Running complex queries on this compendium often requires accessing data from different endpoints within one query. The abundance of datasets and the need for running complex query has thus motivated a considerable body of work on SPARQL query federation systems, the dedicated means to access data distributed over the Web of Data. However, the granularity of previous evaluations of such systems has not allowed deriving of insights concerning their behavior in different steps involved during federated query processing. In this work, we perform extensive experiments to compare state-of-the-art SPARQL endpoint federation systems using the comprehensive performance evaluation framework FedBench. In addition to considering the tradition query runtime as an evaluation criterion, we extend the scope of our performance evaluation by considering criteria, which have not been paid much attention to in previous studies. In particular, we consider the number of sources selected, the total number of SPARQL ASK requests used, the completeness of answers as well as the source selection time. Yet, we show that they have a significant impact on the overall query runtime of existing systems. Moreover, we extend FedBench to mirror a highly distributed data environment and assess the behavior of existing systems by using the same performance criteria. As the result we provide a detailed analysis of the experimental outcomes that reveal novel insights for improving current and future SPARQL federation systems. + + + RDF + + Web of Data + + + + + Web of Data + + A Fine-Grained Evaluation of SPARQL Endpoint Federation Systems + + SPARQL federation + A Fine-Grained Evaluation of SPARQL Endpoint Federation Systems + + + + + + + + + German Rigau + + + + + German Rigau + + + + German Rigau + + + + + + + + + + Radityo Eko Prasojo + + + + + + + Radityo Eko Prasojo + Radityo Eko Prasojo + + 5a968589cad6977d19faf62ea7d325861d9aaeec + + + + Fabien Gandon + + + + + + Fabien Gandon + 583b2ab35d1cef69e21b25a7f36ec5a36e11d31d + + Fabien Gandon + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + Christophe Gravier and Julien Subercaze + USE-RB : Benchmarking how reasoners work in harmony with modern hardware + 2016-10-19T21:00:00 + + + USE-RB : Benchmarking how reasoners work in harmony with modern hardware + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + e556260231af1a25b322d9486bd2fbb20c702ca6 + Fanghuai Hu + + + Fanghuai Hu + + + + + + + Fanghuai Hu + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Pieter Bonte + a500e551ca8091a80cab79c2204408231496684d + Pieter Bonte + + Pieter Bonte + + + + + + + + + + + + + + + + + + + Parma Nand + Parma Nand + + + 8915a3d2eb4ef06ef9a8120814babc2cb65f9eac + + Parma Nand + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Quality Assessment for Linked Data: A Survey + The development and standardization of semantic web technologies has resulted in an unprecedented volume of data being published on the Web as Linked Data (LD). However, we observe widely varying data quality ranging from extensively curated datasets to crowdsourced and extracted data of relatively low quality. In this article, we present the results of a systematic review of approaches for assessing the quality of LD. We gather existing approaches and analyze them qualitatively. In particular, we unify and formalize commonly used terminologies across papers related to data quality and provide a comprehensive list of 18 quality dimensions and 69 metrics. Additionally, we qualitatively analyze the 30 core approaches and 12 tools using a set of attributes. The aim of this article is to provide researchers and data curators a comprehensive understanding of existing work, thereby encouraging further experimentation and development of new approaches focused towards data quality, specifically for LD. + Survey + data quality + + The development and standardization of semantic web technologies has resulted in an unprecedented volume of data being published on the Web as Linked Data (LD). However, we observe widely varying data quality ranging from extensively curated datasets to crowdsourced and extracted data of relatively low quality. In this article, we present the results of a systematic review of approaches for assessing the quality of LD. We gather existing approaches and analyze them qualitatively. In particular, we unify and formalize commonly used terminologies across papers related to data quality and provide a comprehensive list of 18 quality dimensions and 69 metrics. Additionally, we qualitatively analyze the 30 core approaches and 12 tools using a set of attributes. The aim of this article is to provide researchers and data curators a comprehensive understanding of existing work, thereby encouraging further experimentation and development of new approaches focused towards data quality, specifically for LD. + Survey + Quality Assessment for Linked Data: A Survey + + + + + + + + Linked Data + + + + + assessment + + + data quality + Linked Data + + + Quality Assessment for Linked Data: A Survey + assessment + + + + + + + + + + + + + + + + + + + + + + + d121bf16f374de868ec9203902995d9f1a903b8b + + + + + Lara Piccolo + Lara Piccolo + Lara Piccolo + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + Julien Subercaze and Christophe Gravier + Parallel sort-merge-join reasoning + + Parallel sort-merge-join reasoning + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + + + + CNR-ISTC + + + + CNR-ISTC + + CNR-ISTC + + + + + + + Linked Data (in resourceless) Platforms: a mapping for Constrained Application Protocol + + + + + + + + + + This paper proposes a mapping of the Linked Data Platform (LDP) specification for Constrained Application Protocol (CoAP). Main motivation stems from the fact that LDP W3C Recommendation presents resource management primitives for HTTP only. Hence, use cases related to Web of Things scenarios, where HTTP-based communication and infrastructures are unfeasible, are partially neglected. A general translation of LDP-HTTP requests and responses is provided, as well as a fully comprehensive framework for HTTP-to-CoAP proxying. The theoretical work is corroborated by an experimental campaign using the W3C Test Suite for LDP. + + + + Linked Data Platform + CoAP + Semantic Web of Things + + Linked Data Platform + This paper proposes a mapping of the Linked Data Platform (LDP) specification for Constrained Application Protocol (CoAP). Main motivation stems from the fact that LDP W3C Recommendation presents resource management primitives for HTTP only. Hence, use cases related to Web of Things scenarios, where HTTP-based communication and infrastructures are unfeasible, are partially neglected. A general translation of LDP-HTTP requests and responses is provided, as well as a fully comprehensive framework for HTTP-to-CoAP proxying. The theoretical work is corroborated by an experimental campaign using the W3C Test Suite for LDP. + + + + + Linked Data (in resourceless) Platforms: a mapping for Constrained Application Protocol + + Semantic Web of Things + + + Linked Data (in resourceless) Platforms: a mapping for Constrained Application Protocol + CoAP + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Natural language processing + + + + Real world data + + Information integration + Knowledge graphs have gained increasing popularity in the past couple of years, thanks to their adoption in everyday search engines. Typically, they consist of fairly static and encyclopedic facts about persons and organizations–e.g. a celebrity’s birth date, occupation and family members–obtained from large repositories such as Freebase or Wikipedia. In this paper, we present a method and tools to automatically build knowledge graphs from news articles. As news articles describe changes in the world through the events they report, we present an approach to create Event-Centric Knowledge Graphs (ECKGs) using state-of-the-art natural language processing and semantic web techniques. Such ECKGs capture long-term developments and histories on hundreds of thousands of entities and are complementary to the static encyclopedic information in traditional knowledge graphs. We describe our event-centric representation schema, the challenges in extracting event information from news, our open source pipeline, and the knowledge graphs we have extracted from four different news corpora: general news (Wikinews), the FIFA world cup, the Global Automotive Industry, and Airbus A380 airplanes. Furthermore, we present an assessment on the accuracy of the pipeline in extracting the triples of the knowledge graphs. Moreover, through an event-centered browser and visualization tool we show how approaching information from news in an event-centric manner can increase the user’s understanding of the domain, facilitates the reconstruction of news story lines, and enable to perform exploratory investigation of news hidden facts." + + + + Knowledge graphs have gained increasing popularity in the past couple of years, thanks to their adoption in everyday search engines. Typically, they consist of fairly static and encyclopedic facts about persons and organizations–e.g. a celebrity’s birth date, occupation and family members–obtained from large repositories such as Freebase or Wikipedia. In this paper, we present a method and tools to automatically build knowledge graphs from news articles. As news articles describe changes in the world through the events they report, we present an approach to create Event-Centric Knowledge Graphs (ECKGs) using state-of-the-art natural language processing and semantic web techniques. Such ECKGs capture long-term developments and histories on hundreds of thousands of entities and are complementary to the static encyclopedic information in traditional knowledge graphs. We describe our event-centric representation schema, the challenges in extracting event information from news, our open source pipeline, and the knowledge graphs we have extracted from four different news corpora: general news (Wikinews), the FIFA world cup, the Global Automotive Industry, and Airbus A380 airplanes. Furthermore, we present an assessment on the accuracy of the pipeline in extracting the triples of the knowledge graphs. Moreover, through an event-centered browser and visualization tool we show how approaching information from news in an event-centric manner can increase the user’s understanding of the domain, facilitates the reconstruction of news story lines, and enable to perform exploratory investigation of news hidden facts." + + + Event extraction + + Information integration + Event-centric knowledge + + + + Event-centric knowledge + + + + + + + + Real world data + Building event-centric knowledge graphs from news + + + Natural language processing + + + + Event extraction + + Big data + Building event-centric knowledge graphs from news + + Big data + + + Building event-centric knowledge graphs from news + + + + 360997bfdc91a53cacee965dca5f86d68117543c + + + + + Zhihui Liu + + + + + Zhihui Liu + + Zhihui Liu + + + + 4379fc3d8337cff050550e8527912744d5b7a4ec + + + Guozhu Dong + + + + Guozhu Dong + + + + Guozhu Dong + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Lorraine McNerney + 646f2520c086e0052617e0ac2a8d2bb57df27c2b + + + Lorraine McNerney + + + + + + + + + Lorraine McNerney + + + EOXPLORE UG + + + + + + + EOXPLORE UG + + + + EOXPLORE UG + + + + + + + Ulf Hermjakob + + 7d2b13bdb66ac486307bb2c77f6aaed8235516cd + + Ulf Hermjakob + + + Ulf Hermjakob + + + + + + Stanford University + Stanford University + + + + + + + + + + Stanford University + + + + + + + + domain adaptation + + Domain Adaptation for Ontology Localization + Domain Adaptation for Ontology Localization + statistical machine translation + Ontology localization is the task of adapting an ontology to a different cultural context, and has been identified as an important task in the context of the Multilingual Semantic Web vision. The key task in ontology localization is translating the lexical layer of an ontology, i.e., its labels, into some foreign language. For this task, we hypothesize that the translation quality can be improved by adapting a machine translation system to the domain of the ontology. To this end, we build on the success of existing statistical machine translation (SMT) approaches, and investigate the impact of different domain adaptation techniques on the task. In particular, we investigate three techniques: (i) enriching a phrase table by domain-specific translation candidates acquired from existing Web resources, (ii) relying on Explicit Semantic Analysis as an additional technique for scoring a certain translation of a given source phrase, as well as (iii) adaptation of the language model by means of weighting n-grams with scores obtained from topic modelling. We present in detail the impact of each of these three techniques on the task of translating ontology labels. We show that these techniques have a generally positive effect on the quality of translation of the ontology and that, in combination, they provide a significant improvement in quality. + + + Domain Adaptation for Ontology Localization + + + + + + + + ontology localization + + Ontology localization is the task of adapting an ontology to a different cultural context, and has been identified as an important task in the context of the Multilingual Semantic Web vision. The key task in ontology localization is translating the lexical layer of an ontology, i.e., its labels, into some foreign language. For this task, we hypothesize that the translation quality can be improved by adapting a machine translation system to the domain of the ontology. To this end, we build on the success of existing statistical machine translation (SMT) approaches, and investigate the impact of different domain adaptation techniques on the task. In particular, we investigate three techniques: (i) enriching a phrase table by domain-specific translation candidates acquired from existing Web resources, (ii) relying on Explicit Semantic Analysis as an additional technique for scoring a certain translation of a given source phrase, as well as (iii) adaptation of the language model by means of weighting n-grams with scores obtained from topic modelling. We present in detail the impact of each of these three techniques on the task of translating ontology labels. We show that these techniques have a generally positive effect on the quality of translation of the ontology and that, in combination, they provide a significant improvement in quality. + statistical machine translation + + + + ontology localization + + + domain adaptation + + + + + + + + + + + Vienna University of Economics and Business - WU Wien + + Vienna University of Economics and Business - WU Wien + + + + + Vienna University of Economics and Business - WU Wien + + + + + + + + + + SPARQL is the W3C standard query language for querying data expressed in the Resource Description Framework (RDF). The increasing amounts of RDF data available raise a major need and research interest in building efficient and scalable distributed SPARQL query evaluators. In this context, we propose and share SPARQLGX: our implementation of a distributed RDF datastore based on Apache Spark. SPARQLGX is designed to leverage existing Hadoop infrastructures for evaluating SPARQL queries. SPARQLGX relies on a translation of SPARQL queries into executable Spark code that adopts evaluation strategies according to (1) the storage method used and (2) statistics on data. We show that SPARQLGX makes it possible to evaluate SPARQL queries on billions of triples distributed across multiple nodes, while providing attractive performance figures. We report on experiments which show how SPARQLGX compares to related state-of-the-art implementations. Using a simple design, SPARQLGX already represents an interesting alternative in several scenarios. We share it as a resource for the further construction of efficient SPARQL evaluators. + + Experimental Validation + + SPARQLGX: Efficient Distributed Evaluation of SPARQL with Apache Spark + SPARQL Evaluator + + + SPARQLGX: Efficient Distributed Evaluation of SPARQL with Apache Spark + SPARQL Evaluator + + + + Distributed RDF Store + + Distributed RDF Store + Experimental Validation + + + + SPARQL is the W3C standard query language for querying data expressed in the Resource Description Framework (RDF). The increasing amounts of RDF data available raise a major need and research interest in building efficient and scalable distributed SPARQL query evaluators. In this context, we propose and share SPARQLGX: our implementation of a distributed RDF datastore based on Apache Spark. SPARQLGX is designed to leverage existing Hadoop infrastructures for evaluating SPARQL queries. SPARQLGX relies on a translation of SPARQL queries into executable Spark code that adopts evaluation strategies according to (1) the storage method used and (2) statistics on data. We show that SPARQLGX makes it possible to evaluate SPARQL queries on billions of triples distributed across multiple nodes, while providing attractive performance figures. We report on experiments which show how SPARQLGX compares to related state-of-the-art implementations. Using a simple design, SPARQLGX already represents an interesting alternative in several scenarios. We share it as a resource for the further construction of efficient SPARQL evaluators. + + SPARQLGX: Efficient Distributed Evaluation of SPARQL with Apache Spark + + + + + + + + + + + + + + + + + + + + + + + + b06220bfcb6dff56ff364afd456f951eefaafeaa + + + + Tomokazu Yoshida + Tomokazu Yoshida + Tomokazu Yoshida + + + + + + + + + + + + + + + + + + + + + + + Information Sciences Institute, University of Southern California + + Information Sciences Institute, University of Southern California + + + + + + + + Information Sciences Institute, University of Southern California + + + + Ricardo Usbeck + + + cf1c5ea391961440c9c6f7b8348d0f920a3cea77 + + Ricardo Usbeck + + + Ricardo Usbeck + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1984133103dbdcc226687d631320a8bc7ca117ac + Aram Galstyan + + + + + + + + Aram Galstyan + + Aram Galstyan + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The Pistoia Alliance Ontologies Mapping project (http://www.pistoiaalliance.org/projects/ontologies-mapping) was set up to find or create better tools or services for mapping between ontologies in the same domain and to establish best practices for ontology management in the Life Sciences. It was proposed through the Pistoia Alliance Ideas Portfolio Platform (IP3: https://www.qmarkets.org/live/pistoia/home) which was selected by the Pistoia Alliance Operations Team for development of a formal business case. +The project has delivered a set of guidelines for best practice which build on existing standards. We show how these guidelines can be used as a "checklist" to support the application and mapping of source ontologies in the disease and phenotype domain. Another important output of this project was to specify the requirements for an Ontologies Mapping Tool. These requirements were used in a preliminary survey that established that such tools already exist which substantially meet them. Therefore, we have developed a formal process to define and submit a request for information (RFI) from existing ontologies mapping tool providers to enable their evaluation. This RFI process will be described and we summarise our findings from evaluation of seven ontologies mapping tools from academic and commercial providers. The guidelines and RFI materials are accessible on a public wiki:- https://pistoiaalliance.atlassian.net/wiki/display/PUB/Ontologies+Mapping+Resources. +A critical component of any Ontologies Mapping tool is the embedded ontology matching algorithm. Therefore, the Pistoia Alliance Ontologies Mapping Project is supporting development and evaluation of ontology matching algorithms though sponsorship and organisation of the new Disease and Phenotype track for OAEI 2016, which is also be summarised in this poster. This new track has been organised because currently, mappings between ontologies in a given data domain are mostly curated by bioinformatics and disease experts in academia or industry, who would benefit from automation of their procedures. This could be accomplished through implementation of ontology matching algorithms into their existing workflow environment or investment in an ontologies mapping tool for management of the ontologies mapping life cycle. +Work is in progress by the Ontologies Mapping project is to develop user requirements for an ontologies mapping service. We will conduct a survey of Pistoia Alliance members to understand the need for such a service and whether it should be implemented in future. + Ontologies Guidelines for Best Practice and a Process to Evaluate Existing Ontologies Mapping Tools and Algorithms + + + Mapping + + + + + + + + Ontologies Guidelines for Best Practice and a Process to Evaluate Existing Ontologies Mapping Tools and Algorithms + + Tools + + + + + + + Ontologies Guidelines for Best Practice and a Process to Evaluate Existing Ontologies Mapping Tools and Algorithms + Guidelines + Evaluation + + Tools + + + + Algorithms + + + + Guidelines + + + Ontologies + Mapping + Evaluation + The Pistoia Alliance Ontologies Mapping project (http://www.pistoiaalliance.org/projects/ontologies-mapping) was set up to find or create better tools or services for mapping between ontologies in the same domain and to establish best practices for ontology management in the Life Sciences. It was proposed through the Pistoia Alliance Ideas Portfolio Platform (IP3: https://www.qmarkets.org/live/pistoia/home) which was selected by the Pistoia Alliance Operations Team for development of a formal business case. +The project has delivered a set of guidelines for best practice which build on existing standards. We show how these guidelines can be used as a "checklist" to support the application and mapping of source ontologies in the disease and phenotype domain. Another important output of this project was to specify the requirements for an Ontologies Mapping Tool. These requirements were used in a preliminary survey that established that such tools already exist which substantially meet them. Therefore, we have developed a formal process to define and submit a request for information (RFI) from existing ontologies mapping tool providers to enable their evaluation. This RFI process will be described and we summarise our findings from evaluation of seven ontologies mapping tools from academic and commercial providers. The guidelines and RFI materials are accessible on a public wiki:- https://pistoiaalliance.atlassian.net/wiki/display/PUB/Ontologies+Mapping+Resources. +A critical component of any Ontologies Mapping tool is the embedded ontology matching algorithm. Therefore, the Pistoia Alliance Ontologies Mapping Project is supporting development and evaluation of ontology matching algorithms though sponsorship and organisation of the new Disease and Phenotype track for OAEI 2016, which is also be summarised in this poster. This new track has been organised because currently, mappings between ontologies in a given data domain are mostly curated by bioinformatics and disease experts in academia or industry, who would benefit from automation of their procedures. This could be accomplished through implementation of ontology matching algorithms into their existing workflow environment or investment in an ontologies mapping tool for management of the ontologies mapping life cycle. +Work is in progress by the Ontologies Mapping project is to develop user requirements for an ontologies mapping service. We will conduct a survey of Pistoia Alliance members to understand the need for such a service and whether it should be implemented in future. + Algorithms + + + + + Ontologies + + + + + + + + + + + + + + + + + 2016-10-19T12:00:00 + Can you imagine... a language for combinatorial creativity? + 2016-10-19T11:40:00 + + + 2016-10-19T12:00:00 + 2016-10-19T12:00:00 + Fabian M. Suchanek, Colette Menard, Meghyn Bienvenu and Cyril Chapellier + Can you imagine... a language for combinatorial creativity? + + 2016-10-19T11:40:00 + 2016-10-19T12:00:00 + + + + + + + + Fujitsu Ireland + Fujitsu Ireland + Fujitsu Ireland + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + audio effects + + + audio effects + + music production + + + + ontology + + This paper discusses an extension to the Audio Effect Ontology (AUFX-O) for the interdisciplinary classification of audio effect types. The ontology extension implements a unified classification system that draws on knowledge from different music-related disciplines and is designed to facilitate the retrieval of audio effect information based on low-level and semantic aspects. It extends AUFX-O enabling communication between agents from different disciplines within the field of music creation and production. After briefly discussing the ontology, we show how it can be used to efficiently classify and retrieve effect types. + music production + Interdisciplinary Classification of Audio Effects in the Audio Effect Ontology + ontology + + semantic web + This paper discusses an extension to the Audio Effect Ontology (AUFX-O) for the interdisciplinary classification of audio effect types. The ontology extension implements a unified classification system that draws on knowledge from different music-related disciplines and is designed to facilitate the retrieval of audio effect information based on low-level and semantic aspects. It extends AUFX-O enabling communication between agents from different disciplines within the field of music creation and production. After briefly discussing the ontology, we show how it can be used to efficiently classify and retrieve effect types. + Interdisciplinary Classification of Audio Effects in the Audio Effect Ontology + + + semantic web + + + + Interdisciplinary Classification of Audio Effects in the Audio Effect Ontology + + + + + + + + + + + + + + + + + state space search + + + + Semantic Web reasoners are powerful tools that allow the extraction of implicit information from RDF data. This information is reachable through the definition of ontologies and/or rules provided to the reasoner. To achieve this, various algorithms are used by different reasoners. In this paper, we explain how state space search can be applied to perform backward-chaining rule-based reasoning. State space search is an approach used in the Artificial Intelligence domain that solves problems by modeling them as a graph and searching (using diverse algorithms) for solutions within this graph. State space search offers inherent proof generation and the ability to plug in different search algorithms to determine the characteristics of the reasoner such as: speed, memory or ensuring shortest proof generation. + rule-based reasoning + + + + + Rule-Based Reasoning using State Space Search + reasoning + + + + + + Rule-Based Reasoning using State Space Search + + state space search + Rule-Based Reasoning using State Space Search + Semantic Web reasoners are powerful tools that allow the extraction of implicit information from RDF data. This information is reachable through the definition of ontologies and/or rules provided to the reasoner. To achieve this, various algorithms are used by different reasoners. In this paper, we explain how state space search can be applied to perform backward-chaining rule-based reasoning. State space search is an approach used in the Artificial Intelligence domain that solves problems by modeling them as a graph and searching (using diverse algorithms) for solutions within this graph. State space search offers inherent proof generation and the ability to plug in different search algorithms to determine the characteristics of the reasoner such as: speed, memory or ensuring shortest proof generation. + + reasoning + + + + + + rule-based reasoning + + + + + + Linked Data + Prototypes + + Prototypes + Knowledge Representation + Linked Data + In recent years RDF and OWL have become the most common knowledge representation languages in use on the Web, propelled by the recommendation of the W3C. In this paper we examine an alternative way to represent knowledge based on Prototypes. This Prototype based representation has different properties, which we argue to be +more suitable for data sharing and reuse on the Web. Prototypes avoid the distinction between classes and instances and provide means for objects based data sharing and reuse. + +In this paper we discuss the requirements and design principles for Knowledge Representation based on Prototypes on the Web, after which we propose a formal syntax and semantics. We show how to embed knowledge representation based on Prototypes in the current Semantic Web standard stack. An implementation and practical evaluation of the system is presented in a separate resource paper. + + + + + In recent years RDF and OWL have become the most common knowledge representation languages in use on the Web, propelled by the recommendation of the W3C. In this paper we examine an alternative way to represent knowledge based on Prototypes. This Prototype based representation has different properties, which we argue to be +more suitable for data sharing and reuse on the Web. Prototypes avoid the distinction between classes and instances and provide means for objects based data sharing and reuse. + +In this paper we discuss the requirements and design principles for Knowledge Representation based on Prototypes on the Web, after which we propose a formal syntax and semantics. We show how to embed knowledge representation based on Prototypes in the current Semantic Web standard stack. An implementation and practical evaluation of the system is presented in a separate resource paper. + Knowledge Representation on the Web revisited: the Case for Prototypes + + + Knowledge Representation on the Web revisited: the Case for Prototypes + + + + + Knowledge Representation on the Web revisited: the Case for Prototypes + Knowledge Representation + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Texas Health Science Center + + + University of Texas Health Science Center + + + + University of Texas Health Science Center + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Axel-Cyrille Ngonga Ngomo + + + + + Axel-Cyrille Ngonga Ngomo + + Axel-Cyrille Ngonga Ngomo + 3e873fc82e7405de39cb8dc6f2d2c2e445f8c043 + + + + + + + + + + In this paper we describe cLODg2 (conference Linked Open Data generator - version 2), a tool to collect, refine and produce Linked Data about scientific conferences with their associated publications, participants and events. Conference metadata collected from different unstructured and semi-structured resources must be expressed with appropriate vocabularies to be exposed as Linked Data. cLODg2 facilitates this task by providing a one-click workflow to generate data which is ready to be integrated in the ScholarlyData.org dataset. cLODg2 is an open source project, which has the aim to foster the publication of scholarly Linked Open Data and encourage collaborative efforts in this direction between researchers and publishers. + + Generating Conference Linked Open Data in One Click + + + semantic web dog food + linked open data + + + + scholarlydata + + scholarlydata + semantic publishing + + Generating Conference Linked Open Data in One Click + + + + linked open data + + + + semantic publishing + + Generating Conference Linked Open Data in One Click + + semantic web dog food + + In this paper we describe cLODg2 (conference Linked Open Data generator - version 2), a tool to collect, refine and produce Linked Data about scientific conferences with their associated publications, participants and events. Conference metadata collected from different unstructured and semi-structured resources must be expressed with appropriate vocabularies to be exposed as Linked Data. cLODg2 facilitates this task by providing a one-click workflow to generate data which is ready to be integrated in the ScholarlyData.org dataset. cLODg2 is an open source project, which has the aim to foster the publication of scholarly Linked Open Data and encourage collaborative efforts in this direction between researchers and publishers. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 6f4d40a62c93d31e98c21f7d17f4caa489dc37cb + Haoxuan Li + + + Haoxuan Li + + + Haoxuan Li + + + Adrian Soto + + + 24fd857befa33b9941e2165d18316451b90c9a03 + Adrian Soto + + + + + + + + + + Adrian Soto + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Free University of Bozen-Bolzano + Free University of Bozen-Bolzano + + + + + + Free University of Bozen-Bolzano + + + + + + + + + + + + + + + + + + + + + + + + + Rule-based reasoning + OWL + + Spark + The rule-based OWL reasoning is to compute the deductive +closure of an ontology by applying RDF/RDFS and OWL entailment +rules. In this paper, we present an approach to enhancing the perfor- +mance of the rule-based OWL reasoning on Spark based on a locally +optimal executable strategy. Firstly, we divide all rules (27 in total) in- +to four main classes, namely, SPO rules (5 rules), type rules (7 rules), +sameAs rules (7 rules), and schema rules (8 rules) since, as we investi- +gated, those triples corresponding to the rst three classes of rules are +overwhelming (e.g., over 99% in the LUBM dataset) in our practical +world. Secondly, based on the interdependence among those entailment +rules in each class, we pick out an optimal rule executable order of each +class and then combine them into a new rule execution order of all rules. +Finally, we implement the new rule execution order on Spark. The exper- +imental results show that the running time of our approach is improved +by about 30% as compared to Kim & Park's algorithm (2015). + + + + + Enhancing Rule-based OWL Reasoning on Spark + The rule-based OWL reasoning is to compute the deductive +closure of an ontology by applying RDF/RDFS and OWL entailment +rules. In this paper, we present an approach to enhancing the perfor- +mance of the rule-based OWL reasoning on Spark based on a locally +optimal executable strategy. Firstly, we divide all rules (27 in total) in- +to four main classes, namely, SPO rules (5 rules), type rules (7 rules), +sameAs rules (7 rules), and schema rules (8 rules) since, as we investi- +gated, those triples corresponding to the rst three classes of rules are +overwhelming (e.g., over 99% in the LUBM dataset) in our practical +world. Secondly, based on the interdependence among those entailment +rules in each class, we pick out an optimal rule executable order of each +class and then combine them into a new rule execution order of all rules. +Finally, we implement the new rule execution order on Spark. The exper- +imental results show that the running time of our approach is improved +by about 30% as compared to Kim & Park's algorithm (2015). + Semantic Web + + Enhancing Rule-based OWL Reasoning on Spark + + Rule-based reasoning + Spark + + + + OWL + Semantic Web + + Enhancing Rule-based OWL Reasoning on Spark + + + + + + + + + 2016-10-20T14:10:00 + Ivan Ermilov, Jens Lehmann, Michael Martin and Sören Auer + 2016-10-20T14:30:00 + 2016-10-20T14:30:00 + LODStats: The Data Web Census Dataset + + LODStats: The Data Web Census Dataset + 2016-10-20T14:30:00 + 2016-10-20T14:30:00 + 2016-10-20T14:10:00 + + + + + + + + + + Christian Neuenstadt + Christian Neuenstadt + 385bc1cfaff8363dcc699a5101b7a4730cc5adfb + + + Christian Neuenstadt + + + + + + + + + + + + + + + + + + Siemens AG, Corporate Technology + Siemens AG, Corporate Technology + + Siemens AG, Corporate Technology + + + + + + + + + + + + Sejin Chun, Jooik Jung, Xiongnan Jin, Seungjun Yoon and Kyong-Ho Lee + Proactive Replication of Dynamic Linked Data for Scalable RDF Stream Processing + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + Proactive Replication of Dynamic Linked Data for Scalable RDF Stream Processing + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 209af6a5da064a4a0f0cb89a336bfeb0ebcc196d + + Rik Van de Walle + + + + + + + + + Rik Van de Walle + Rik Van de Walle + + + + + + + + + + 2016-10-20T16:30:00 + 2016-10-20T16:50:00 + Faceted search over RDF-based knowledge graphs + + Faceted search over RDF-based knowledge graphs + 2016-10-20T16:30:00 + + Marcelo Arenas, Bernardo Cuenca Grau, Evgeny Kharlamov, Šarūnas Marciuška and Dmitriy Zheleznyakov + 2016-10-20T16:50:00 + 2016-10-20T16:50:00 + 2016-10-20T16:50:00 + + + + + + + + + + + + + + + + + + + + + + Yolanda Gil + Yolanda Gil + + Yolanda Gil + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + Scalable Semantic Access to Siemens Static and Streaming Distributed Data + + Scalable Semantic Access to Siemens Static and Streaming Distributed Data + Evgeny Kharlamov, Sebastian Brandt, Martin Giese, Ernesto Jimenez-Ruiz, Yannis Kotidis, Steffen Lamparter, Theofilos Mailis, Christian Neuenstadt, Özgür Lütfü Özcep, Christoph Pinkel, Ahmet Soylu, Christoforos Svingos, Dmitriy Zheleznyakov, Ian Horrocks, Yannis Ioannidis, Ralf Möller and Arild Waaler + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + Bernardo Cuenca Grau + + + Bernardo Cuenca Grau + + + + + + + c0879a5783f8750335b2d2830dd7dbb99dc8f94b + + + Bernardo Cuenca Grau + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-21T14:50:00 + Ontop of Geospatial Databases + 2016-10-21T14:50:00 + Ontop of Geospatial Databases + 2016-10-21T14:30:00 + + + 2016-10-21T14:30:00 + + 2016-10-21T14:50:00 + 2016-10-21T14:50:00 + Konstantina Bereta and Manolis Koubarakis + + + + Mohamed H. Gad-Elrab + + + + + + + + + 9a4c2982ecb311ef12921f43c6bd3be430914cee + + Mohamed H. Gad-Elrab + + Mohamed H. Gad-Elrab + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KRDB Research Centre, Free University of Bozen-Bolzano + + + + KRDB Research Centre, Free University of Bozen-Bolzano + KRDB Research Centre, Free University of Bozen-Bolzano + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + LAAS-CNRS / IRIT + + + + + + LAAS-CNRS / IRIT + + + LAAS-CNRS / IRIT + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Guozheng Rao + + + + + 7a4a3f9540990bfe00519e0906bd4802070cde5e + Guozheng Rao + Guozheng Rao + + + + + Smart Trip Alternatives for the Curious + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Damien Graux, Pierre Geneves and Nabil Layaida + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + + 2016-10-19T21:00:00 + Smart Trip Alternatives for the Curious + + + + + + + + + Mathias Van Compernolle + + + + + + + 1c03751ebd8cd99eeb57e8fcb853771646e3134d + + Mathias Van Compernolle + Mathias Van Compernolle + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + VU University Amsterdam + VU University Amsterdam + + + + + + + + VU University Amsterdam + + + + + + + + + + + + + + Christophe Debruyne + + + ed3783609bbe08e801b87ecc375c11b6e5ae765b + + Christophe Debruyne + + + + + + + + Christophe Debruyne + + + + + + + + 443d701dde45605cf194b2de164b01e17b72f473 + Katja Hose + Katja Hose + + + + + + Katja Hose + + + + + + Semantic labeling: A domain-independent approach + + Semantic labeling is the process of mapping attributes in data sources to classes in an ontology and is a necessary step in heterogeneous data integration. Variations in data formats, attribute names and even ranges of values of data make this a very challenging task. In this paper, we present a novel domain-independent approach to automatic semantic labeling that uses machine learning techniques. Previous approaches use machine learning to learn a model that extracts features related to the data of a domain, which requires the model to be re-trained for every new domain. Our solution uses similarity metrics as features to compare against labeled domain data and learns a matching function to infer the correct semantic labels for data. Since our approach depends on the learned similarity metrics but not the data itself, it is domain-independent and only needs to be trained once to work effectively across multiple domains. In our evaluation, our approach achieves higher accuracy than other approaches, even when the learned models are trained on domains other than the test domain. + + + semantic labeling + + + + + + Semantic labeling: A domain-independent approach + data integration + + semantic labeling + semantic web + + Semantic labeling: A domain-independent approach + + + Semantic labeling is the process of mapping attributes in data sources to classes in an ontology and is a necessary step in heterogeneous data integration. Variations in data formats, attribute names and even ranges of values of data make this a very challenging task. In this paper, we present a novel domain-independent approach to automatic semantic labeling that uses machine learning techniques. Previous approaches use machine learning to learn a model that extracts features related to the data of a domain, which requires the model to be re-trained for every new domain. Our solution uses similarity metrics as features to compare against labeled domain data and learns a matching function to infer the correct semantic labels for data. Since our approach depends on the learned similarity metrics but not the data itself, it is domain-independent and only needs to be trained once to work effectively across multiple domains. In our evaluation, our approach achieves higher accuracy than other approaches, even when the learned models are trained on domains other than the test domain. + semantic web + + + + + data integration + + + + + + + + + + + + Peter Boncz + Peter Boncz + + + + + + + + + + + + da89467b6c6397e5e8ebac0f8c307fabc54664b4 + Peter Boncz + + + + + + + + + + + + + + + + + + + + + + + + b59ca8493302f1644ba3fcb198a64536c6530d86 + Torben Bach Pedersen + + + + + Torben Bach Pedersen + + Torben Bach Pedersen + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Fujitsu Laboratories Limited + + Fujitsu Laboratories Limited + Fujitsu Laboratories Limited + + + + + + + + + + + + + The University of the Basque Country + + The University of the Basque Country + + + The University of the Basque Country + + + + + + + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + + Corentin Jouault, Kazuhisa Seta and Yuki Hayashi + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + SOLS: A Semantically Enriched Learning System Using LOD Based Automatic Question Generation + + 2016-10-19T21:00:00 + SOLS: A Semantically Enriched Learning System Using LOD Based Automatic Question Generation + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Freddy Lecue + + + + Freddy Lecue + Freddy Lecue + + + f1a1055588342958afbd758ff970e26533fd3bdb + + + Matthew Horridge + + + + + + + + + + Matthew Horridge + Matthew Horridge + + + + + + + + + + + + + + + + + + + + + + + + + + Khai Nguyen and Ryutaro Ichise + Ranking Feature for Classifier-based Instance Matching + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + Ranking Feature for Classifier-based Instance Matching + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + + + + + + + + + + + + + Andrea Giovanni Nuzzolese, Anna Lisa Gentile, Valentina Presutti and Aldo Gangemi + 2016-10-20T10:30:00 + Conference Linked Data: the ScholarlyData project + + 2016-10-20T10:50:00 + 2016-10-20T10:50:00 + + 2016-10-20T10:50:00 + Conference Linked Data: the ScholarlyData project + 2016-10-20T10:30:00 + + 2016-10-20T10:50:00 + + + Maria Poveda + + + + + + Maria Poveda + + Maria Poveda + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Tabea Tietz, Jörg Waitelonis, Joscha Jäger and Harald Sack + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + refer: a Linked Data based Text Annotation and Recommender System for Wordpress + + refer: a Linked Data based Text Annotation and Recommender System for Wordpress + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Jens Lehmann + + + + + Jens Lehmann + 01fee219e665ecea3905f361517b2bd4a344975d + Jens Lehmann + + + + + + + + + + + + + Bernard Vatant + + + + Bernard Vatant + + + Bernard Vatant + + + + + 2016-10-21T10:30:00 + 2016-10-21T11:50:00 + 2016-10-21T11:50:00 + 2016-10-21T11:50:00 + 2016-10-21T11:50:00 + 2016-10-21T10:30:00 + Lightning Talks + Lightning Talks + + + + + + + + + + + + + + + + + + + + + + + + + + Majid Ghasemi-Gol + + + Majid Ghasemi-Gol + + + + + + + + Majid Ghasemi-Gol + e1a8526e1bf49d3bf707a897cc345675dcc0e946 + + + + + + + + + + + + Yonsei University + + + + + + + Yonsei University + + Yonsei University + + + + + + Ghislain Auguste Atemezing + + + Ghislain Auguste Atemezing + Ghislain Auguste Atemezing + + + + + 9971b68740871b230036efe3e33193acd84e2a53 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Luigi Asprino + c775678ec000c80a483a4c2fc69f34d6a62b6281 + + Luigi Asprino + Luigi Asprino + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Abraham Bernstein + + Abraham Bernstein + 8704ad77580618cb845036d3a15626d30fd828c3 + + + + + Abraham Bernstein + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + yovisto GmbH + + + + yovisto GmbH + + + yovisto GmbH + + + + 2016-10-20T11:50:00 + 2016-10-20T11:30:00 + Wei Hu, Haoxuan Li, Zequn Sun, Xinqi Qian, Lingkun Xue, Ermei Cao and Yuzhong Qu + + Clinga: Bringing Chinese Physical and Human Geography in Linked Open Data + 2016-10-20T11:50:00 + + + 2016-10-20T11:30:00 + 2016-10-20T11:50:00 + 2016-10-20T11:50:00 + Clinga: Bringing Chinese Physical and Human Geography in Linked Open Data + + + + + + + + + + + + + + + + + + Daniela Petrelli + + + + + + + + + + Daniela Petrelli + Daniela Petrelli + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + Building Evidence Graph for Clinical Decision Support + Building Evidence Graph for Clinical Decision Support + + 2016-10-19T21:00:00 + Jing Mei + + + + + + + + + + + + + + + 2016-10-21T13:30:00 + + + 2016-10-21T13:50:00 + Enabling combined software and data engineering at Web-scale: The ALIGNED suite of ontologies + Monika Solanki, Bojan Božić, Markus Freudenberg, Rob Brennan and Dimitris Kontokostas + 2016-10-21T13:50:00 + + 2016-10-21T13:50:00 + 2016-10-21T13:50:00 + Enabling combined software and data engineering at Web-scale: The ALIGNED suite of ontologies + 2016-10-21T13:30:00 + + + + + + + + + + + + + + Ildikó Szabó + + + + + + Ildikó Szabó + + + + + 5005aaa611b454785502e9c6b2d2eaf35376beba + Ildikó Szabó + + + + + + + + + + + + + + + + + Martin J. Kollingbaum + + + + 4b5792c4b473dff25831c0ee0f172d4ec595d14d + + + + + + Martin J. Kollingbaum + Martin J. Kollingbaum + + + 2016-10-20T11:10:00 + + FOOD: FOod in Open Data + 2016-10-20T11:10:00 + 2016-10-20T10:50:00 + + + Silvio Peroni, Giorgia Lodi, Luigi Asprino, Aldo Gangemi and Valentina Presutti + 2016-10-20T11:10:00 + 2016-10-20T10:50:00 + FOOD: FOod in Open Data + 2016-10-20T11:10:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Declan O'Sullivan + + + + Declan O'Sullivan + + 936a0a2881af653ae0c38aae27793dc67e354d0b + + + + Declan O'Sullivan + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Šarūnas Marciuška + + + + + + + Sarunas Marciuska + + Sarunas Marciuska + Šarūnas Marciuška + fa769f08236892f18aca15816b35e8d9c1c5598b + + Šarūnas Marciuška + Sarunas Marciuska + + + + + + + + + + + + + + + + + + + Enzo Zerega + + Enzo Zerega + + + + + 5df1567ef3dc6392bf8b14e33442048b648d81eb + + + + + + Enzo Zerega + + + + University of Sheffield + + + + + + + University of Sheffield + + + + University of Sheffield + + + Michael Cochez + + + + + + + Michael Cochez + + Michael Cochez + 4b0bd3e29e5d93684250c5dad31c5915396903e4 + + + + + + + + + + As our computers embed more cores, efficient reasoners are designed with parallelization but also CPU and memory friendliness in mind. % +These latter contribute to make reasoner tractable in practice despite the computational complexity of logical fragments. % +However, creating benchmark to monitor this CPU-friendliness for many reasoners, datasets and logical fragments is a tedious task. % +In this paper, we present the Université Saint-Etienne Reasoners Benchmark (USE-RB) that automates the setup and execution of reasoners benchmarks with a particular attention to monitor how +reasoners work in harmony with the CPU. + + + + benchmark + Reasoning + + memory + + + performance + + caches + + + + benchmark + USE-RB : Benchmarking how reasoners work in harmony with modern hardware + USE-RB : Benchmarking how reasoners work in harmony with modern hardware + + + + performance + + memory + As our computers embed more cores, efficient reasoners are designed with parallelization but also CPU and memory friendliness in mind. % +These latter contribute to make reasoner tractable in practice despite the computational complexity of logical fragments. % +However, creating benchmark to monitor this CPU-friendliness for many reasoners, datasets and logical fragments is a tedious task. % +In this paper, we present the Université Saint-Etienne Reasoners Benchmark (USE-RB) that automates the setup and execution of reasoners benchmarks with a particular attention to monitor how +reasoners work in harmony with the CPU. + + + + caches + Reasoning + USE-RB : Benchmarking how reasoners work in harmony with modern hardware + + + + + + + + + 5644932825980e4bc88c96181c827e9c8f7fef62 + + + Mohamed Gaha + Mohamed Gaha + + + + Mohamed Gaha + + + + + + + + + University of Manchester + + University of Manchester + + + University of Manchester + + + + + + + Kyong-Ho Lee + + + Kyong-Ho Lee + + + + 6b7b2e4d1dd79a0d7b30e138da96ba558a4c952a + + Kyong-Ho Lee + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Athens + + + University of Athens + + + + University of Athens + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Parallel sort-merge-join reasoning + Jena + in-memory reasoner + Jena + + + + + open-source + + + RDFSPlus + + + We present an in-memory, cross-platform, parallel reasoner +for RDFS and RDFSPlus . Inferray uses carefully optimized hash-based +join and sorting algorithms to perform parallel materialization. Designed +to take advantage of the architecture of modern CPUs, Inferray exhibits +a very good uses of cache and memory bandwidth. It offers state-of-the- +art performance on RDFS materialization, outperforms its counterparts +on RDFSPlus and can be connected with Jena. +Reasons to see the poster: i) Presentation of the system, how to use +it; ii) Discussion about implementation, source code walkthrough. + RDFSPlus + + + high-performance + + + + open-source + + high-performance + + Parallel sort-merge-join reasoning + We present an in-memory, cross-platform, parallel reasoner +for RDFS and RDFSPlus . Inferray uses carefully optimized hash-based +join and sorting algorithms to perform parallel materialization. Designed +to take advantage of the architecture of modern CPUs, Inferray exhibits +a very good uses of cache and memory bandwidth. It offers state-of-the- +art performance on RDFS materialization, outperforms its counterparts +on RDFSPlus and can be connected with Jena. +Reasons to see the poster: i) Presentation of the system, how to use +it; ii) Discussion about implementation, source code walkthrough. + in-memory reasoner + Parallel sort-merge-join reasoning + + + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + Paramita Mirza, Simon Razniewski and Werner Nutt + Expanding Wikidata's Parenthood Information by 178%, or How To Mine Relation Cardinality Information + 2016-10-19T21:00:00 + Expanding Wikidata's Parenthood Information by 178%, or How To Mine Relation Cardinality Information + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Jaroslaw Bak + + + Jaroslaw Bak + + b43584a6470f93690d641a57586dd3d7e213feb5 + + + + + + + + Jaroslaw Bak + + + + Gully Burns + + + Gully Burns + Gully Burns + + + + + cd43f244307564f1f20d95e655e08fc11614fa45 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Yannis Ioannidis + 6a0e45f72065338cc49ba2804c37502f7a51ad1b + + + + + + Yannis Ioannidis + + Yannis Ioannidis + + + + + + + + + + + + + + + + + + + + + + + + Piek Vossen + + + + + + + + + + + Piek Vossen + Piek Vossen + + + + + + + + + + + University of Stuttgart + University of Stuttgart + + + + + + + + + University of Stuttgart + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + User-Friendly and Scalable Platform for the Design of Intelligent IoT Services: a Smart Office Use Case + User-Friendly and Scalable Platform for the Design of Intelligent IoT Services: a Smart Office Use Case + + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Femke Ongenae, Pieter Bonte, Jelle Nelis, Thomas Vanhove and Filip De Turck + + + + + LinkGen: Multipurpose Linked Data Generator + + rdf + data generator + + power-law distribution + rdf + data generator + linked data + + + + The paper presents a synthetic linked data generator that can generate a large amount of RDF data based on certain statistical distribution. Data generation is platform independent, supports streaming mode and produces output in N-Triples and N-Quad format. Different sets of output can be generated using various configuration parameters and the outputs are reproducible. Unlike existing generators, our generator accepts any vocabulary and can supplement the output with noisy and inconsistent data. The generator has an option to inter-link instances with real ones provided that the user supplies entities from real datasets. + + + power-law distribution + linked data + + + + LinkGen: Multipurpose Linked Data Generator + + LinkGen: Multipurpose Linked Data Generator + + + + + The paper presents a synthetic linked data generator that can generate a large amount of RDF data based on certain statistical distribution. Data generation is platform independent, supports streaming mode and produces output in N-Triples and N-Quad format. Different sets of output can be generated using various configuration parameters and the outputs are reproducible. Unlike existing generators, our generator accepts any vocabulary and can supplement the output with noisy and inconsistent data. The generator has an option to inter-link instances with real ones provided that the user supplies entities from real datasets. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + John P. McCrae + + + + + + John P. Mccrae + + 4afa4da7b0a6c64c6f7dc841cc0fa67d130fbb06 + + John P. McCrae + + John P. McCrae + + John P. Mccrae + + John P. Mccrae + + + + + + + + + + Télécom ParisTech + + + Télécom ParisTech + Télécom ParisTech + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Athens University of Economics and Business + + Athens University of Economics and Business + + + + + Athens University of Economics and Business + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Towards an Interface for User-Friendly Linked Data Generation Administration + + + Anastasia Dimou, Pieter Heyvaert, Wouter Maroy, Laurens De Graeve, Ruben Verborgh, Erik Mannens and Rik Van de Walle + + Towards an Interface for User-Friendly Linked Data Generation Administration + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + + + + IBM + + + + + + IBM + + IBM + + + + + Ernesto Jiménez-Ruiz + + Ernesto Jimenez-Ruiz + + Ernesto Jimenez-Ruiz + + Ernesto Jiménez-Ruiz + + + Ernesto Jiménez-Ruiz + + + + + bcd45a988fceba58dbe34a54e7797e89b514dc51 + Ernesto Jimenez-Ruiz + + + + + Yuki Hayashi + + + + + 703abcd0aea51561ecbee2f55af5f23a39a9dfcc + + + Yuki Hayashi + + + Yuki Hayashi + + + + + + + + + Faculty of Computer Science and Media Technology, Norwegian University of Science and Technology + Faculty of Computer Science and Media Technology, Norwegian University of Science and Technology + Faculty of Computer Science and Media Technology, Norwegian University of Science and Technology + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-21T14:50:00 + 2016-10-21T15:30:00 + 2016-10-21T15:30:00 + 2016-10-21T15:30:00 + 2016-10-21T14:50:00 + Coffee Break + 2016-10-21T15:30:00 + Coffee Break + + + + + + + + + + + + + + + + + + + + + + + + OntoBench: Generating Custom OWL 2 Benchmark Ontologies + 2016-10-21T16:50:00 + OntoBench: Generating Custom OWL 2 Benchmark Ontologies + 2016-10-21T16:50:00 + 2016-10-21T16:30:00 + + + + Vincent Link, Steffen Lohmann and Florian Haag + 2016-10-21T16:50:00 + 2016-10-21T16:50:00 + 2016-10-21T16:30:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Linked Open Vocabularies (LOV): a gateway to reusable semantic vocabularies on the Web + 2016-10-20T16:30:00 + 2016-10-20T16:10:00 + + 2016-10-20T16:30:00 + 2016-10-20T16:30:00 + 2016-10-20T16:30:00 + Pierre-Yves Vandenbussche, Ghislain A. Atemezing, Maria Poveda and Bernard Vatant + Linked Open Vocabularies (LOV): a gateway to reusable semantic vocabularies on the Web + 2016-10-20T16:10:00 + + + DBpedia Mappings Quality Assessment + + + Anastasia Dimou, Dimitris Kontokostas, Markus Freudenberg, Ruben Verborgh, Jens Lehmann, Erik Mannens, Sebastian Hellmann and Rik Van de Walle + + DBpedia Mappings Quality Assessment + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + 2016-10-20T15:50:00 + + + CubeQA—Question Answering on RDF Data Cubes + 2016-10-20T15:30:00 + 2016-10-20T15:50:00 + CubeQA—Question Answering on RDF Data Cubes + Konrad Höffner, Jens Lehmann and Ricardo Usbeck + 2016-10-20T15:30:00 + 2016-10-20T15:50:00 + 2016-10-20T15:50:00 + + + + + + + + + + + + + + + + + David Shotton + David Shotton + David Shotton + + + 2fda526be970b5e393d425de0f4b47a6a4f05850 + + + + + + + + + + + + + + + 2016-10-21T10:00:00 + 2016-10-21T10:30:00 + 2016-10-21T10:30:00 + 2016-10-21T10:30:00 + Coffee Break + 2016-10-21T10:00:00 + 2016-10-21T10:30:00 + Coffee Break + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Makoto Iwayama + + + + Makoto Iwayama + + + Makoto Iwayama + + + + + I. Budak Arpinar + + + + + f83f4591138b6fe67692b5e33a00106e1e96cec8 + + + + + I. Budak Arpinar + + I. Budak Arpinar + + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Harnessing Crowds and Experts for Semantic Annotation of the Qur'an + Harnessing Crowds and Experts for Semantic Annotation of the Qur'an + Amna Basharat, Khaled Rasheed and I. Budak Arpinar + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MSD IT Global Innovation Center + + + + + MSD IT Global Innovation Center + + + + + + MSD IT Global Innovation Center + + + + + + + Walking without a Map: Ranking-Based Traversal for Querying Linked Data + + 2016-10-20T16:10:00 + 2016-10-20T16:10:00 + Walking without a Map: Ranking-Based Traversal for Querying Linked Data + + 2016-10-20T16:30:00 + + Olaf Hartig and M. Tamer Ozsu + 2016-10-20T16:30:00 + 2016-10-20T16:30:00 + 2016-10-20T16:30:00 + + + + 2016-10-20T14:50:00 + Quality Assessment for Linked Data: A Survey + 2016-10-20T14:30:00 + Quality Assessment for Linked Data: A Survey + Amrapali Zaveri, Anisa Rula, Andrea Maurino, Ricardo Pietrobon, Jens Lehmann and Sören Auer + 2016-10-20T14:50:00 + 2016-10-20T14:30:00 + 2016-10-20T14:50:00 + 2016-10-20T14:50:00 + + + + + + + + + + + + + + + + + + + + Explanation + + + Expenses + Prediction + Reasoning + + Anomaly detection + Prediction + + + + Spend optimization + Reasoning + + + + Travel expenses represent up to 7% of organizations overall budget. Existing expenses systems are designed for reporting expenses types and amount, but not for understanding how to save and spend. We present a system, manipulating semantic web technologies, which aims at identifying, explaining, predicting abnormal expense claims by employees of large organizations in 500+ cities. + + + + Using Semantic Web Technologies for Explaining and Predicting Abnormal Expense + Explanation + Using Semantic Web Technologies for Explaining and Predicting Abnormal Expense + + Using Semantic Web Technologies for Explaining and Predicting Abnormal Expense + Travel expenses represent up to 7% of organizations overall budget. Existing expenses systems are designed for reporting expenses types and amount, but not for understanding how to save and spend. We present a system, manipulating semantic web technologies, which aims at identifying, explaining, predicting abnormal expense claims by employees of large organizations in 500+ cities. + + Anomaly detection + + Expenses + + Spend optimization + + + + + + Domagoj Vrgoc + + + 116719fc41fd2bd28d7bce271756b5eab2c9eda0 + + + Domagoj Vrgoc + + + Domagoj Vrgoc + + + + + + IIT Bombay + IIT Bombay + + + + + + + IIT Bombay + + + + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + + Atsuko Yamaguchi, Kouji Kozaki, Kai Lenz, Yasunori Yamamoto, Hiroshi Masuya and Norio Kobayashi + + Data Acquisition by Traversing Class-Class Relationships over the Linked Open Data + 2016-10-19T18:00:00 + Data Acquisition by Traversing Class-Class Relationships over the Linked Open Data + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + ac3a38ea3c73dfd4c5ee1d5d9ad98ede3ffe92d1 + + + + + Seiji Koide + + + + + Seiji Koide + Seiji Koide + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ae61113b7fb9de8879e311a8048a893a5f46ff4f + + + + + + Martin Romacker + Martin Romacker + + + + + + + Martin Romacker + + + Eugenio Di Sciascio + + + Eugenio Di Sciascio + + + + + + + + Eugenio Di Sciascio + 9786472dc7aa4754914460418664e559cef39ca4 + + + + + + + + + + + + + + + + + + + + + + + Erdal Kuzey + + 75b6b3e761989307fe995c636f3dfe3b60be62e6 + + + + + + Erdal Kuzey + + + + + Erdal Kuzey + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + Constructing Semantic Networks of Development Activities from Weekly Reports + + Motoyuki Takaai and Yohei Yamane + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + Constructing Semantic Networks of Development Activities from Weekly Reports + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Georgia + + + + University of Georgia + + University of Georgia + + + + 2016-10-18T12:00:00 + Le Tuan Anh + + + Linked Data processing for Embedded Devices + 2016-10-18T12:00:00 + Linked Data processing for Embedded Devices + 2016-10-18T12:00:00 + 2016-10-18T11:45:00 + 2016-10-18T11:45:00 + 2016-10-18T12:00:00 + + + + + + Kalliopi Pafilis + + + Kalliopi Pafilis + + + + + + Kalliopi Pafilis + + + + + + + Peter Patel-Schneider + + Peter Patel-Schneider + + 35a838a13f014e0d3924f7a0aeeb929105fbf234 + + + + + + + + Peter Patel-Schneider + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + e25fa578672a1e755ed7a35fb082e01a69f39f16 + + Rafael S. Gonçalves + + + + Rafael S. Gonçalves + + Rafael S. Gonçalves + + + + + + + + + + + Nuance Communications + + + Nuance Communications + + + + + + Nuance Communications + + + + + + + + + + + + + + + + + + Achim Rettinger + + Achim Rettinger + + Achim Rettinger + + c8534f2cda2d909bbb64009ae8156d78764438db + + + + + + + + 2016-10-21T16:10:00 + Giuseppe De Giacomo, Xavier Oriol, Riccardo Rosati and Domenico Fabio Savo + + + 2016-10-21T16:10:00 + 2016-10-21T15:50:00 + 2016-10-21T16:10:00 + + 2016-10-21T16:10:00 + Updating DL-Lite Ontologies through First-Order Queries + 2016-10-21T15:50:00 + Updating DL-Lite Ontologies through First-Order Queries + + + Satoshi Kume, Hiroshi Masuya, Yosky Kataoka and Norio Kobayashi + 2016-10-19T21:00:00 + + Development of an Ontology for an Integrated Image Analysis Platform to enable Global Sharing of Microscopy Imaging Data + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + Development of an Ontology for an Integrated Image Analysis Platform to enable Global Sharing of Microscopy Imaging Data + 2016-10-19T18:00:00 + + + + + KCL London + KCL London + + + KCL London + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T11:20:00 + Knowledge Representation on the Web revisited: the Case for Prototypes + + 2016-10-19T11:20:00 + Michael Cochez, Stefan Decker and Eric Prud'Hommeaux + 2016-10-19T11:00:00 + + 2016-10-19T11:20:00 + 2016-10-19T11:00:00 + Knowledge Representation on the Web revisited: the Case for Prototypes + + 2016-10-19T11:20:00 + + + + + + + + Database Center for Life Science + + + Database Center for Life Science + + Database Center for Life Science + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Oslo + + + + + + University of Oslo + + + University of Oslo + + + + + 686466b4d76673eee98164954a886377cda2251f + + Christian Hennig + + Christian Hennig + + + + + + + + Christian Hennig + + + + + + Database + Database + Taxonomic relations (also known as ``isa'' relations or hypernymy relations) represent a fundamental atomic piece of structured information for many text understanding applications. Such structured information is part of the basic topology structure of knowledge bases and foundational ontologies. Despite the availability of shared knowledge bases, some NLP applications (e.g. Ontology Learning) require automatic isa relation harvesting techniques to cope with the coverage of domain-specific and long-tail terms. We present a Web Application to directly query our repository of isa relations extracted from the Common Crawl (the largest publicly available crawl of the Web). Our resource can be also downloaded for research purposes and accessed programmatically (we also release a Java application programming interface). + A Web Application to Search a Large Repository of Taxonomic Relations from the Web + + Natural Language Processing techniques for the Semantic Web + + + Natural Language Processing techniques for the Semantic Web + + + + + + + Information extraction + A Web Application to Search a Large Repository of Taxonomic Relations from the Web + Information extraction + + + A Web Application to Search a Large Repository of Taxonomic Relations from the Web + + + + + + + + Taxonomic relations (also known as ``isa'' relations or hypernymy relations) represent a fundamental atomic piece of structured information for many text understanding applications. Such structured information is part of the basic topology structure of knowledge bases and foundational ontologies. Despite the availability of shared knowledge bases, some NLP applications (e.g. Ontology Learning) require automatic isa relation harvesting techniques to cope with the coverage of domain-specific and long-tail terms. We present a Web Application to directly query our repository of isa relations extracted from the Common Crawl (the largest publicly available crawl of the Web). Our resource can be also downloaded for research purposes and accessed programmatically (we also release a Java application programming interface). + + + + + + + + + + + + + + + + + + + + + + + + University of Bradford + + + + University of Bradford + + University of Bradford + + + + + + + + + + University of Oxford + + + + + + University of Oxford + + University of Oxford + + + + + + + + + + + + + + + + + + + + + Syeda Sana E Zainab + + + 4f2a79237677cee7075e7624b8342e939d6f293d + Syeda Sana E Zainab + Syeda Sana E Zainab + + + + + + + + + + + + + + + + + + + + Md. Kamruzzaman Sarker, David Carral, Adila A. Krisnadhi and Pascal Hitzler + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + Modeling OWL with Rules: The ROWL Protege Plugin + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Modeling OWL with Rules: The ROWL Protege Plugin + + + + + + + + + + + + 2016-10-21T16:50:00 + 2016-10-21T16:50:00 + 2016-10-21T16:50:00 + 2016-10-21T16:30:00 + Extracting Semantic Information for e-Commerce + 2016-10-21T16:50:00 + + Bruno Charron, Yu Hirate, David Purcell and Martin Rezk + + Extracting Semantic Information for e-Commerce + 2016-10-21T16:30:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Ryota Nishimura + + + + + + + + + 1c1ea4eff63576d018f18437c07a5e82fbdc2033 + + + + Ryota Nishimura + Ryota Nishimura + + + + + + + 2016-10-19T21:00:00 + Robin Keskisärkkä + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Representing RDF Stream Processing Queries in RSP-SPIN + 2016-10-19T21:00:00 + + Representing RDF Stream Processing Queries in RSP-SPIN + + + Wright State University + + Wright State University + + Wright State University + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + An Ontology based Map Converter for Intelligent Vehicles + 2016-10-19T18:00:00 + Lihua Zhao, Naoya Arakawa, Hiroaki Wagatsuma and Ryutaro Ichise + 2016-10-19T18:00:00 + + An Ontology based Map Converter for Intelligent Vehicles + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + + + + + + + + + + + + + Function + + Data has been made reusable and machine-interpretable by publishing it as Linked Data. +However, Linked Data automatic processing is not fully achieved yet, +as manual effort is still needed to integrate existing tools and libraries within a certain technology stack. +To enable automatic processing, +we propose exposing functions and methods as Linked Data, +publishing it in different programming languages, +using content negotiation to cater to different technology stacks, +and making use of common, technology-independent identifiers to make them discoverable. +As such, we can enable automatic processing of Linked Data across formats and technology stacks. +By using discovery endpoints, similarly as being used to discover vocabularies and ontologies, +the publication of these functions can remain decentralized whilst still be easily discoverable. + Content Negotiation + Discovering and Using Functions via Content Negotiation + + + + Linked Data + + + + + Content Negotiation + + + + + + + Function + + Discovering and Using Functions via Content Negotiation + + + + + + + Data has been made reusable and machine-interpretable by publishing it as Linked Data. +However, Linked Data automatic processing is not fully achieved yet, +as manual effort is still needed to integrate existing tools and libraries within a certain technology stack. +To enable automatic processing, +we propose exposing functions and methods as Linked Data, +publishing it in different programming languages, +using content negotiation to cater to different technology stacks, +and making use of common, technology-independent identifiers to make them discoverable. +As such, we can enable automatic processing of Linked Data across formats and technology stacks. +By using discovery endpoints, similarly as being used to discover vocabularies and ontologies, +the publication of these functions can remain decentralized whilst still be easily discoverable. + Linked Data + Discovering and Using Functions via Content Negotiation + + + + + + + + + + 2016-10-21T14:10:00 + 2016-10-21T13:50:00 + + Minh-Duc Pham and Peter Boncz + 2016-10-21T14:10:00 + 2016-10-21T14:10:00 + 2016-10-21T14:10:00 + Exploiting Emergent Schemas to make RDF systems more efficient + 2016-10-21T13:50:00 + Exploiting Emergent Schemas to make RDF systems more efficient + + + + + 2016-10-21T09:00:00 + 2016-10-21T10:00:00 + 2016-10-21T10:00:00 + 2016-10-21T10:00:00 + Keynote: Hiroaki Kitano + 2016-10-21T10:00:00 + Keynote: Hiroaki Kitano + 2016-10-21T09:00:00 + + + + + + + 2016-10-18T12:30:00 + 2016-10-18T14:00:00 + 2016-10-18T14:00:00 + 2016-10-18T14:00:00 + Lunch + 2016-10-18T14:00:00 + Lunch + 2016-10-18T12:30:00 + + + + + + Amna Basharat + + + Amna Basharat + + 9aa3e37c9e5e63bb5aea612c758f39244ccf8c8f + + + + + Amna Basharat + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + Cross-Language Record Linkage using Word Embedding driven Metadata Similarity Measurement + Cross-Language Record Linkage using Word Embedding driven Metadata Similarity Measurement + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Yuting Song, Taisuke Kimura, Biligsaikhan Batjargal and Akira Maeda + + + + 2016-10-19T21:00:00 + + + 2016-10-19T21:00:00 + + Xiang Nan Ren, Olivier Curé, Houda Khrouf, Zakia Kazi-Aoul and Yousra Chabchoub + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Apache Spark and Apache Kafka at the rescue of distributed RDF Stream Processing engines + Apache Spark and Apache Kafka at the rescue of distributed RDF Stream Processing engines + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Renzo Angles and Claudio Gutierrez + 2016-10-19T14:20:00 + 2016-10-19T14:20:00 + 2016-10-19T14:00:00 + 2016-10-19T14:20:00 + + + 2016-10-19T14:20:00 + The multiset semantics of SPARQL patterns + The multiset semantics of SPARQL patterns + 2016-10-19T14:00:00 + + + + 2016-10-21T15:30:00 + 2016-10-21T16:50:00 + 2016-10-21T16:50:00 + 2016-10-21T16:50:00 + Enriching Data Sources + Enriching Data Sources + 2016-10-21T15:30:00 + 2016-10-21T16:50:00 + + + + + + + + + + + + + + + + + + + + + + + + + + Zequn Sun + + + + f12622c58ed65cf28e8a16eeaf113d623e1d6f51 + + Zequn Sun + + + Zequn Sun + + + + + + Pieter Simoens + + Pieter Simoens + + + 7fa90bcb2351da9bc27562ec0ec4fdf2c83587fa + + Pieter Simoens + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + e45c28f05c0bcb63e65db4b8569498de035baa88 + + + + + + + Hiroaki Wagatsuma + Hiroaki Wagatsuma + + + + Hiroaki Wagatsuma + + + + + + + + + + + + + + + + + + World Wide Web Consortium (W3C) + + + World Wide Web Consortium (W3C) + + + + + World Wide Web Consortium (W3C) + + + + + + Giulio Curioni + 7319659db24dc0b8e33a27d1b3f94bd91ac521af + Giulio Curioni + + + + + + + + + Giulio Curioni + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + An Extensible Linear Approach For Holistic Ontology Matching + + Holistic Ontology Matching + + + + + + + + + + Resolving the semantic heterogeneity in the semantic web requires finding correspondences between ontologies describing resources. In particular, with the explosive growth of data sets in the Linked Open Data, linking multiple vocabularies and ontologies simultaneously, known as holistic matching problem, become necessary. Currently, most state-of-the-art matching approaches are limited to pairwise matching. In this paper, we propose an approach for holistic ontology matching that is modeled through a linear program extending the maximum-weighted graph matching problem with linear constraints (cardinality, structural, and coherence constraints). Our approach guarantees the optimal solution with mostly coherent alignments. To evaluate our proposal, we discuss the results of experiments performed on the Conference track of the OAEI 2015, under both holistic and pairwise matching settings. + Combinatorial optimisation + + + Linear programming + Combinatorial optimisation + An Extensible Linear Approach For Holistic Ontology Matching + Resolving the semantic heterogeneity in the semantic web requires finding correspondences between ontologies describing resources. In particular, with the explosive growth of data sets in the Linked Open Data, linking multiple vocabularies and ontologies simultaneously, known as holistic matching problem, become necessary. Currently, most state-of-the-art matching approaches are limited to pairwise matching. In this paper, we propose an approach for holistic ontology matching that is modeled through a linear program extending the maximum-weighted graph matching problem with linear constraints (cardinality, structural, and coherence constraints). Our approach guarantees the optimal solution with mostly coherent alignments. To evaluate our proposal, we discuss the results of experiments performed on the Conference track of the OAEI 2015, under both holistic and pairwise matching settings. + An Extensible Linear Approach For Holistic Ontology Matching + + Linear programming + + Holistic Ontology Matching + + + + + + + + + + + + + + + + + + + + + 89ff2466008411464749069a197222abbfec05f3 + + + + + + Pedro Szekely + Pedro Szekely + + Pedro Szekely + + + + + + + + + + + + + + + + + + + + + + + + + Aidan Hogan + b332a58e34e3ddc201c4684233761a53b1d57050 + + + Aidan Hogan + + + + + + + + Aidan Hogan + + + + + SPARQL 1.1 + + + + active learning + + + SPARQL 1.1 + + active learning + An On-Line Learning to Query System + We present an on-line system which learns a SPARQL query from a set of wanted and a set of unwanted results of the query. The sets are extended during a dialog with the user. The system leverages SPARQL 1.1 and does not depend on any particular RDF graph. + An On-Line Learning to Query System + + An On-Line Learning to Query System + learning to query + + + + + + learning to query + We present an on-line system which learns a SPARQL query from a set of wanted and a set of unwanted results of the query. The sets are extended during a dialog with the user. The system leverages SPARQL 1.1 and does not depend on any particular RDF graph. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + Materializing the editing history of Wikipedia as linked Data in DBpedia + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + Fabien Gandon + Materializing the editing history of Wikipedia as linked Data in DBpedia + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Cristian Riveros + + + + + + Cristian Riveros + 01d4b6f7e0c1ef0e74365bb317ad450b974f3afe + + + + Cristian Riveros + + + + + + + + + + + + + + + + + + + IRSMG: Accelerating Inexact RDF Subgraph Matching on the GPU + IRSMG: Accelerating Inexact RDF Subgraph Matching on the GPU + 2016-10-19T21:00:00 + Junzhao Zhang, Xiaowang Zhang and Zhiyong Feng + + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-18T11:15:00 + Muhammad Amith + 2016-10-18T11:30:00 + Ontology-based dialogue systems for improved patient HPV vaccine knowledge and perception + + Ontology-based dialogue systems for improved patient HPV vaccine knowledge and perception + 2016-10-18T11:15:00 + 2016-10-18T11:30:00 + 2016-10-18T11:30:00 + + 2016-10-18T11:30:00 + + + + Gisela Klette + + + + + + + + + + e37a1ff2c8b04e94ecc7673ce15103b397fd168e + + Gisela Klette + + Gisela Klette + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T12:00:00 + + + 2016-10-19T12:20:00 + Freddy Brasileiro, Joao Paulo Almeida, Victorio Albani Carvalho and Giancarlo Guizzardi + 2016-10-19T12:20:00 + 2016-10-19T12:20:00 + + Expressive Multi-Level Modeling for the Semantic Web + 2016-10-19T12:20:00 + 2016-10-19T12:00:00 + Expressive Multi-Level Modeling for the Semantic Web + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Martin G. Skjæveland + + + + + + + + Martin G. Skjæveland + 5223e81829088aa837295fab98f3c286b8f106a2 + + Martin G. Skjæveland + + + + + + + NEL + + DBpedia + refer: a Linked Data based Text Annotation and Recommender System for Wordpress + refer: a Linked Data based Text Annotation and Recommender System for Wordpress + + refer: a Linked Data based Text Annotation and Recommender System for Wordpress + DBpedia + + NEL + + + + visualization + + + + + + When searching for an arbitrary subject in weblogs or archives, users often don’t get the information they are really looking for. Often they are overwhelmed with an overflow of information while sometimes the presented information is too scarce to make any use of it. Without further knowledge about the context or background of the intended subject users are easily frustrated because they either cannot handle the amount of information or they might give up because they cannot make sense of the topic at all. Furthermore, authors of online-platforms often deal with the issue to provide useful recommendations of other articles and to motivate the readers to stay on the platform to explore more of the available but most times hidden content of their blog or archive. +In the demo presentation, we present refer, a semantic annotation and visualization system integrated into the Wordpress platform. With refer, content creators are enabled to (semi-)automatically annotate their texts with DBpedia resources as part of the original writing process and visualize them automatically. With refer users are encouraged to take an active part in discovering a platform’s information content interactively and intuitively, rather than just to have to read the entire textual information provided by the author. They can discover background information as well as relationships among persons, places, events, and anything related to the subject in current focus and are inspired to navigate the previously hidden information on a platform. + + When searching for an arbitrary subject in weblogs or archives, users often don’t get the information they are really looking for. Often they are overwhelmed with an overflow of information while sometimes the presented information is too scarce to make any use of it. Without further knowledge about the context or background of the intended subject users are easily frustrated because they either cannot handle the amount of information or they might give up because they cannot make sense of the topic at all. Furthermore, authors of online-platforms often deal with the issue to provide useful recommendations of other articles and to motivate the readers to stay on the platform to explore more of the available but most times hidden content of their blog or archive. +In the demo presentation, we present refer, a semantic annotation and visualization system integrated into the Wordpress platform. With refer, content creators are enabled to (semi-)automatically annotate their texts with DBpedia resources as part of the original writing process and visualize them automatically. With refer users are encouraged to take an active part in discovering a platform’s information content interactively and intuitively, rather than just to have to read the entire textual information provided by the author. They can discover background information as well as relationships among persons, places, events, and anything related to the subject in current focus and are inspired to navigate the previously hidden information on a platform. + + + + + visualization + + + annotation + + annotation + + + + + + + + + + visual knowledge representation + + + + visual modeling + OntoCASE4G-OWL: Towards a modeling software tool for G-OWL a visual syntax for RDF/RDFS/OWL2 + + OWL2 + graphical ontological syntax + OntoCASE4G-OWL: Towards a modeling software tool for G-OWL a visual syntax for RDF/RDFS/OWL2 + + + visual modeling + + + + graphical ontological syntax + graphical language + graphical ontology + ontology + + + graphical language + OntoCASE4G-OWL: Towards a modeling software tool for G-OWL a visual syntax for RDF/RDFS/OWL2 + Ontological syntax standardized by the W3C offer the expressiveness needed in the formulation of complex concepts. However, the codification of an ontology is a process of formalization of thought that sometimes requires extensive knowledge and is often inaccessible in the layperson's logic. The G-OWL (for Graphical OWL) language has been designed to provide a tool to facilitate the expression of knowledge in a manner that is compatible with the OWL2 ontolo-gy. This paper presents the OntoCASE4G-OWL prototype, a visual modeling software for the editing of formal ontologies in G-OWL and their translation into Turtle. The executable version of OntoCASE for Windows and MacOsX is available at http://www.cotechnoe.com/iswc2016 + visual knowledge representation + + graphical ontology + + OWL2 + + ontology + Ontological syntax standardized by the W3C offer the expressiveness needed in the formulation of complex concepts. However, the codification of an ontology is a process of formalization of thought that sometimes requires extensive knowledge and is often inaccessible in the layperson's logic. The G-OWL (for Graphical OWL) language has been designed to provide a tool to facilitate the expression of knowledge in a manner that is compatible with the OWL2 ontolo-gy. This paper presents the OntoCASE4G-OWL prototype, a visual modeling software for the editing of formal ontologies in G-OWL and their translation into Turtle. The executable version of OntoCASE for Windows and MacOsX is available at http://www.cotechnoe.com/iswc2016 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Lu Fang + + + 580ad48decd01939ed83f81cd1cc251f075acd16 + + Lu Fang + + + + + + + Lu Fang + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-21T17:00:00 + 2016-10-21T17:30:00 + 2016-10-21T17:30:00 + 2016-10-21T17:30:00 + Closing Ceremony + 2016-10-21T17:00:00 + Closing Ceremony + 2016-10-21T17:30:00 + + + + + + 1e115fa957774cf5b84116f2a7980e75e9e27e12 + + + Rob Brennan + + + Rob Brennan + + + + Rob Brennan + + + + Charalampos Nikolaou + + + cf10b48a3376e8b7fd4cd5339e1d1c07dd45c385 + + Charalampos Nikolaou + Charalampos Nikolaou + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Shirley Elprama + + Shirley Elprama + + + + Shirley Elprama + + + + + deeb7cdf9dfc63ebf0bb2298c1e0a0291acb6fab + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Lihua Zhao + Lihua Zhao + + + + 6d0e8d810faa0e47a21d7427b741f2582ceff299 + + + Lihua Zhao + + + + + + + + + + Esteban Zimanyi + + + + + + + Esteban Zimanyi + a0fae1dc95e881b71bcb87824953400bcbd6ece9 + Esteban Zimanyi + + + + + + + Robin Keskisärkkä + + + + + + + Robin Keskisärkkä + 3ffe081e1a2621393470ef1ac01821417b32f7d1 + + + Robin Keskisärkkä + + + + + + + Valentina Presutti + 8bd6e0316a77a5ac133d65203c8592d80da602e5 + Presutti + + + + + + + Valentina Presutti + + + Valentina Presutti + Valentina + + + + + + Nandana Mihindukulasooriya + + + bff064a0c7a67911d341d06c62778934f5aac111 + + + + + Nandana Mihindukulasooriya + + + Nandana Mihindukulasooriya + + + + + + + + + + + + + + + + + Anthony Potter + 66571c46dfb54fa4abfb80baa667601bcecf5b89 + + + + + Anthony Potter + + + + + Anthony Potter + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Michael Rossman + + + e09aaaadde03c747807284adab837bef936f5936 + Michael Rossman + + + + Michael Rossman + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The Institute of Scientific and Industrial Research (ISIR), Osaka University + + + The Institute of Scientific and Industrial Research (ISIR), Osaka University + + + + + The Institute of Scientific and Industrial Research (ISIR), Osaka University + + + + + + + + + + The University of Manchester + + + + + + The University of Manchester + The University of Manchester + + + + + + Public Administration + Local Affairs + + + + Linked Data + + Local Council Decisions as Linked Data: a proof of concept + + Base registries are trusted authentic information sources controlled by an appointed public administration or organization appointed by the government. Maintaining a base registry comes with extra maintenance costs to create the dataset and keep it up to date. In this paper, we study the possibility to entangle the maintenance of base registries at the core of existing administrative processes and to reduce the cost of maintaining a new data source. We demonstrate a method to manage Local Council Decisions as Linked Data, which creates a new base registry for mandates. We found that no extra effort was needed in the process by local administrations. We show that an end-to-end approach for Local Council Decisions as Linked Data is feasible. Furthermore, using this proof of concept, we established a momentum to roll out these ideas for the region of Flanders in Belgium. + + + Local Council Decisions as Linked Data: a proof of concept + + + Local Affairs + + + Public Administration + + + + Local Council Decisions as Linked Data: a proof of concept + + Digital Publishing + Base registries are trusted authentic information sources controlled by an appointed public administration or organization appointed by the government. Maintaining a base registry comes with extra maintenance costs to create the dataset and keep it up to date. In this paper, we study the possibility to entangle the maintenance of base registries at the core of existing administrative processes and to reduce the cost of maintaining a new data source. We demonstrate a method to manage Local Council Decisions as Linked Data, which creates a new base registry for mandates. We found that no extra effort was needed in the process by local administrations. We show that an end-to-end approach for Local Council Decisions as Linked Data is feasible. Furthermore, using this proof of concept, we established a momentum to roll out these ideas for the region of Flanders in Belgium. + + + Linked Data + + + + + Digital Publishing + + + + + + Linked Corporations Data in Japan + + Based on Open Data Charter of G8, the governments are publishing corporation register data as Open Data. In Japan, the government recently published a dataset covering approximately 4.4 million corporations, but the dataset is rated as 3 star in the 5-star rating system. Our policy, which we believe is also common in the LOD community, is that low-star datasets must be converted into 5 star as early as possible for strengthening the power of LOD. Based on this policy, we designed a schema for corporation data, converted the Japanese dataset into 5 star using this schema, and published this dataset under Creative Commons Attribution 4.0 License on 9th December 2015, only eight days after the publication date of the original dataset. As far as we know, eight datasets currently refer to ours, which makes the degree of 5 star stronger. As a business purpose, we internally appended links between our dataset and other data such as DBpedia, and applied this enriched data to a visualization system for browsing a corporation from various perspectives. + + + Based on Open Data Charter of G8, the governments are publishing corporation register data as Open Data. In Japan, the government recently published a dataset covering approximately 4.4 million corporations, but the dataset is rated as 3 star in the 5-star rating system. Our policy, which we believe is also common in the LOD community, is that low-star datasets must be converted into 5 star as early as possible for strengthening the power of LOD. Based on this policy, we designed a schema for corporation data, converted the Japanese dataset into 5 star using this schema, and published this dataset under Creative Commons Attribution 4.0 License on 9th December 2015, only eight days after the publication date of the original dataset. As far as we know, eight datasets currently refer to ours, which makes the degree of 5 star stronger. As a business purpose, we internally appended links between our dataset and other data such as DBpedia, and applied this enriched data to a visualization system for browsing a corporation from various perspectives. + + Linked Open Data + + + + + Linked Corporations Data in Japan + dataset + + + + + Linked Corporations Data in Japan + + + + + LOD4ALL + Linked Open Data + corporations + + + LOD4ALL + + + + + + corporations + dataset + + + Agnieszka Ławrynowicz + + Agnieszka Ławrynowicz + + + Agnieszka Ławrynowicz + d16c46a8e688f893db4910fdeca8a346035e09ae + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 76a9829880fc781bb0d0ce09b671bf79b33d4a3c + + + Helen Reeves + Helen Reeves + Helen Reeves + + + + + Mathieu d'Aquin + + + + + Mathieu d'Aquin + + a73100718bd7c602efaec548fc543acb5aef76d5 + + + + + + Mathieu d'Aquin + + + + + + + + + + + + + + + + + + Pierre-Yves Vandenbussche + + + + + + + Pierre-Yves Vandenbussche + Pierre-Yves Vandenbussche + + + + + 4d190ad7bf441f2c9a4885751c3fa65a85110661 + + + + + + + + + + + + Ricardo Pietrobon + + + Ricardo Pietrobon + Ricardo Pietrobon + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Steffen Lamparter + Steffen Lamparter + + + + + 603f0c9b3231e550cd94e059a2ec26616be599c7 + + Steffen Lamparter + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Juan Manuel Gimeno + Juan Manuel Gimeno + + + + + Juan Manuel Gimeno + + + + + + + + + + + + + Peter Hendler + + + + 3f976a0e014e69a54455d23e4808330fac42a53d + + + + Peter Hendler + Peter Hendler + + + + + + + + + + + + + + + + + + + + + + + + + + + Shusaku Egami + + Shusaku Egami + e2ee4b12fe2f488ecef3c70e41b6045ab97b12d3 + + + + + + + Shusaku Egami + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Ontology Translation + + Collaborative Ontology Management Platform + + + + + + + + + + + + Context-based Concept Translation + To enable knowledge access across languages, ontologies that are often represented only in English, need to be translated into different languages. +Since manual multilingual enhancement of domain-specific ontologies is very time consuming and expensive, smart solutions are required to facilitate the translation task for the language and domain experts. +For this reason, we present ESSOT, an Expert Supporting System for Ontology Translation, which support experts in accomplishing the multilingual ontology management task. Differently than the classic document translation, ontology label translation faces highly specific vocabulary and lack contextual information. +Therefore, ESSOT takes advantage of the semantic information of the ontology for translation improvement of the ontology labels. + + The ESSOT System Goes Wild: an Easy Way For Translating Ontologies + The ESSOT System Goes Wild: an Easy Way For Translating Ontologies + + To enable knowledge access across languages, ontologies that are often represented only in English, need to be translated into different languages. +Since manual multilingual enhancement of domain-specific ontologies is very time consuming and expensive, smart solutions are required to facilitate the translation task for the language and domain experts. +For this reason, we present ESSOT, an Expert Supporting System for Ontology Translation, which support experts in accomplishing the multilingual ontology management task. Differently than the classic document translation, ontology label translation faces highly specific vocabulary and lack contextual information. +Therefore, ESSOT takes advantage of the semantic information of the ontology for translation improvement of the ontology labels. + + The ESSOT System Goes Wild: an Easy Way For Translating Ontologies + Context-based Concept Translation + Multilingual Ontology Management + + Collaborative Ontology Management Platform + + Multilingual Ontology Management + Ontology Translation + + + + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + SOMM: Industry Oriented Ontology Management Tool + 2016-10-19T21:00:00 + SOMM: Industry Oriented Ontology Management Tool + + 2016-10-19T21:00:00 + Evgeny Kharlamov, Bernardo Cuenca Grau, Ernesto Jimenez-Ruiz, Steffen Lamparter, Gulnar Mehdi, Martin Ringsquandl, Yavor Nenov, Stephan Grimm, Mikhail Roshchin and Ian Horrocks + + + + + + + + + + + + + + + + + + + + + + + + + + + + 96aef08cb0381e842c8367abf32eb3859f4d3e12 + + + Jinhyun Ahn + Jinhyun Ahn + + + Jinhyun Ahn + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Aliaksandr Birukou + 43240a6cc1be58ceb19806178c6ed246e3e13713 + + + + + + Aliaksandr Birukou + + + + + + + Aliaksandr Birukou + + + + + Corvinus University of Budapest + Corvinus University of Budapest + + Corvinus University of Budapest + + + + + + + + + + + + + + + Rivindu Perera + Rivindu Perera + 9f521f8d647fafec7a6583fb2ba20e4058b63b57 + + + Rivindu Perera + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Edinburgh + University of Edinburgh + + University of Edinburgh + + + + University of Aberdeen + + University of Aberdeen + + + + + + + University of Aberdeen + + + + + + + + + + + + + + + + + + + + Stefan Dietze + + + + + Stefan Dietze + + + + + + ee6dc5ffc8da2b150fab3da1bcf3d788011c3312 + + + Stefan Dietze + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The multiset semantics of SPARQL patterns + + SPARQL + Relational Algebra + + SPARQL + Relational Algebra + + The paper determines the algebraic and logic structure produced by the multiset semantics of the core patterns of SPARQL. We prove that the fragment formed by AND, UNION, OPTIONAL, FILTER, MINUS and SELECT corresponds precisely to both, the intuitive multiset relational algebra (projection, selection, natural join, arithmetic union and except), and multiset classical non-recursive Datalog with safe negation. + + The multiset semantics of SPARQL patterns + Datalog + Bag semantics + + + + The multiset semantics of SPARQL patterns + Bag semantics + + Datalog + + + + + + The paper determines the algebraic and logic structure produced by the multiset semantics of the core patterns of SPARQL. We prove that the fragment formed by AND, UNION, OPTIONAL, FILTER, MINUS and SELECT corresponds precisely to both, the intuitive multiset relational algebra (projection, selection, natural join, arithmetic union and except), and multiset classical non-recursive Datalog with safe negation. + + + + + Saltlux, Inc. + + Saltlux, Inc. + + + + Saltlux, Inc. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Francesco Osborne, Angelo Antonio Salatino, Aliaksandr Birukou and Enrico Motta + Smart Topic Miner: Supporting Springer Nature Editors with Semantic Web Technologies + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + Smart Topic Miner: Supporting Springer Nature Editors with Semantic Web Technologies + 2016-10-19T18:00:00 + + + + + + + + Masaru Miyazaki + Masaru Miyazaki + 405430333261b37dafd78840368a8789072ebd52 + + + + Masaru Miyazaki + + + + + + + + + + + + + + + + Karlsruhe Institute of Technology (KIT) + + + + + + + Karlsruhe Institute of Technology (KIT) + Karlsruhe Institute of Technology (KIT) + + + + + + + + + + + Kerry Taylor + Kerry Taylor + + + + f7a1125fd2e8a1dff398e6023848973a6337e9ab + + + Kerry Taylor + + + + + + + + + + + + + + + + + + + Giorgia Lodi + + + + Giorgia Lodi + + + + Giorgia Lodi + + 376b8d67cc6249aff3f746e002d2660be7cf104d + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + VoldemortKG: Mapping Schema.org Entities to Linked Open Data + 2016-10-19T11:00:00 + 2016-10-19T11:20:00 + + 2016-10-19T11:00:00 + 2016-10-19T11:20:00 + + Alberto Tonon, Djellel Eddine Difallah, Victor Felder and Philippe Cudré-Mauroux + VoldemortKG: Mapping Schema.org Entities to Linked Open Data + 2016-10-19T11:20:00 + + 2016-10-19T11:20:00 + + + 2016-10-19T21:00:00 + What if machines could be creative? + + + 2016-10-19T18:00:00 + Fabian M. Suchanek, Colette Menard, Meghyn Bienvenu and Cyril Chapellier + 2016-10-19T21:00:00 + What if machines could be creative? + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Flemish Agency for Domestic Governance + Flemish Agency for Domestic Governance + + + Flemish Agency for Domestic Governance + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + fd3e838e2512e44a55c1ff356fb5fe97502b2aa4 + + + + + + Yu Hirate + Yu Hirate + Yu Hirate + + + + + + + + + + + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + Damian Bursztyn, Francois Goasdoue and Ioana Manolescu + 2016-10-19T21:00:00 + Optimizing FOL reducible query answering: understanding performance challenges + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Optimizing FOL reducible query answering: understanding performance challenges + + + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + + + + + + + + f38cdca307f19938c201d780003037ae67c1dd4c + Akane Takezaki + + Akane Takezaki + + + Akane Takezaki + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + VLog: A Column-Oriented Datalog System for Large Knowledge Graphs + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Jacopo Urbani, Ceriel Jacobs and Markus Krötzsch + VLog: A Column-Oriented Datalog System for Large Knowledge Graphs + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + + + + + + Ben De Meester + + Ben De Meester + Ben De Meester + + + + + + + + + ad8b2ae36dc69d174133fd2fd63d13c583f646c3 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Southern California + + University of Southern California + + + + + + University of Southern California + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 8b3a38ec6a3728520f921739067cef070b173a9c + + + Patrick Lambrix + Patrick Lambrix + + + Patrick Lambrix + + + + + + Yasunori Yamamoto + 2ab8956fa93aef8534b3bc3d9d540a3f9395c96f + + + Yasunori Yamamoto + + + + Yasunori Yamamoto + + + + + + + + 2016-10-19T21:00:00 + Exploring Linked Classical Music Catalogs with OVERTURE + + Exploring Linked Classical Music Catalogs with OVERTURE + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Pasquale Lisena, Manel Achichi, Eva Fernandez, Konstantin Todorov and Raphaël Troncy + + 2016-10-19T18:00:00 + + + 2016-10-21T10:50:00 + 2016-10-21T10:50:00 + + + Efficient Algorithms for Association Finding and Frequent Association Pattern Mining + 2016-10-21T10:50:00 + + 2016-10-21T10:50:00 + Efficient Algorithms for Association Finding and Frequent Association Pattern Mining + 2016-10-21T10:30:00 + 2016-10-21T10:30:00 + Gong Cheng, Daxin Liu and Yuzhong Qu + + + + + + + + + + + + + + + + Konstantin Todorov + 5c4bd7cfa4123af996e157c664660c68c0237209 + + + + Konstantin Todorov + + + + + Konstantin Todorov + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + Yuzu: Publishing Any Data as Linked Data + 2016-10-19T18:00:00 + John P. Mccrae + Yuzu: Publishing Any Data as Linked Data + 2016-10-19T21:00:00 + + + + + + + + + + + + + 9da436ca83fe1f6c31b15af200be0fee8e29faca + + + + Miel Vander Sande + + Miel Vander Sande + + + + Miel Vander Sande + + + + + + + + + + + + + + + + + Universidad Técnica Federico Santa María + Universidad Técnica Federico Santa María + Universidad Técnica Federico Santa María + + + + + + + + + + + + 2016-10-20T10:00:00 + 2016-10-20T10:30:00 + 2016-10-20T10:30:00 + 2016-10-20T10:30:00 + 2016-10-20T10:30:00 + Coffee Break + 2016-10-20T10:00:00 + Coffee Break + + + + + + + + + + + + 2016-10-19T21:00:00 + + + SWISH: An Integrated Semantic Web Notebook + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + SWISH: An Integrated Semantic Web Notebook + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Wouter Beek and Jan Wielemaker + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + Tabea Tietz + + + Tabea Tietz + + + Tabea Tietz + + + e0e07bbfbb86dcd6b64b8a0b2d502884c5770bb3 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Nanjing University + Nanjing University + + + + + Nanjing University + + + + 2016-10-20T14:50:00 + 2016-10-20T15:30:00 + 2016-10-20T15:30:00 + 2016-10-20T15:30:00 + 2016-10-20T14:50:00 + Coffee Break + 2016-10-20T15:30:00 + Coffee Break + + + An Ontology of Soil Properties and Processes + 2016-10-21T14:10:00 + An Ontology of Soil Properties and Processes + + 2016-10-21T13:50:00 + 2016-10-21T14:10:00 + 2016-10-21T13:50:00 + 2016-10-21T14:10:00 + 2016-10-21T14:10:00 + + Heshan Du, Vania Dimitrova, Derek Magee, Anthony Cohn, Ross Stirling, Giulio Curioni, Barry Clarke and Helen Reeves + + + + + + + + + + + + + + 2016-10-21T11:30:00 + Efstratios Sygkounas, Giuseppe Rizzo and Raphaël Troncy + + 2016-10-21T11:10:00 + 2016-10-21T11:10:00 + A Replication Study of the Top Performing Systems in SemEval Twitter Sentiment Analysis + 2016-10-21T11:30:00 + + + 2016-10-21T11:30:00 + A Replication Study of the Top Performing Systems in SemEval Twitter Sentiment Analysis + 2016-10-21T11:30:00 + + + 2016-10-19T21:00:00 + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Konstantina Bereta, Guohui Xiao, Manolis Koubarakis, Martina Hodrius and Conrad Bielski + Ontop-spatial for Geospatial Data Integration using GeoSPARQL-to-SQL Translation + Ontop-spatial for Geospatial Data Integration using GeoSPARQL-to-SQL Translation + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + PIOTRe: Personal Internet of Things Repository + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Eugene Siow, Thanassis Tiropanis and Wendy Hall + + + PIOTRe: Personal Internet of Things Repository + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + Angelos Charalambidis + 211297be068db0fb4d1d58f1d8168d20ef0a06e2 + Angelos Charalambidis + + + + + + + + + Angelos Charalambidis + + + + ff749127c207b8bc7d463999eea7531e0d7d9aba + + + + + Arild Waaler + Arild Waaler + Arild Waaler + + + + + + + + + + + + + + + + + Cassia Trojahn + + + + acb31a208c24f68142fdeba467938e4621e6d836 + Cassia Trojahn + + + + + + + + Cassia Trojahn + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + WebBrain: Joint Neural Learning of Large-Scale Commonsense Knowledge + + relation prediction + + information extraction + + + commonsense knowledge + WebBrain: Joint Neural Learning of Large-Scale Commonsense Knowledge + + information extraction + + + While massive volumes of text are now more easily available for knowledge harvesting, many important facts about our everyday world are not expressed in a particularly explicit way. To address this, we present WebBrain, a new approach for harvesting commonsense knowledge that relies on joint learning from Web-scale data to fill gaps in the knowledge acquisition. We train a neural network model that not only learns word2vec-style vector representations of words but also commonsense knowledge about them. This joint model allows general semantic information to aid in generalizing beyond the extracted commonsense relationships. Experiments show that we can obtain word embeddings that reflect word meanings, yet also allow us to capture conceptual relationships and commonsense knowledge about them. + + + + + commonsense knowledge + WebBrain: Joint Neural Learning of Large-Scale Commonsense Knowledge + relation prediction + While massive volumes of text are now more easily available for knowledge harvesting, many important facts about our everyday world are not expressed in a particularly explicit way. To address this, we present WebBrain, a new approach for harvesting commonsense knowledge that relies on joint learning from Web-scale data to fill gaps in the knowledge acquisition. We train a neural network model that not only learns word2vec-style vector representations of words but also commonsense knowledge about them. This joint model allows general semantic information to aid in generalizing beyond the extracted commonsense relationships. Experiments show that we can obtain word embeddings that reflect word meanings, yet also allow us to capture conceptual relationships and commonsense knowledge about them. + + + + + + + + + + + + + + + + + + 2016-10-21T15:30:00 + 2016-10-21T15:30:00 + A Practical Acyclicity Notion for Query Answering over Horn-SRIQ Ontologies + 2016-10-21T15:50:00 + A Practical Acyclicity Notion for Query Answering over Horn-SRIQ Ontologies + + 2016-10-21T15:50:00 + 2016-10-21T15:50:00 + 2016-10-21T15:50:00 + David Carral, Cristina Feier and Pascal Hitzler + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-20T16:10:00 + 2016-10-20T16:10:00 + 2016-10-20T16:10:00 + Aemoo: Linked Data exploration based on Knowledge Patterns + + 2016-10-20T15:50:00 + Andrea Giovanni Nuzzolese, Valentina Presutti, Aldo Gangemi, Silvio Peroni and Paolo Ciancarini + 2016-10-20T16:10:00 + Aemoo: Linked Data exploration based on Knowledge Patterns + 2016-10-20T15:50:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Karl Aberer + + + + Karl Aberer + + a9877790616eb28af52fd602e67b0dbeb50f5399 + + + Karl Aberer + + + + + + + + + STIM + STIM + + + + + + + STIM + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + semantic web + + + semantic label + + + + linked data + Mapping data to a shared domain ontology is a key step in publishing semantic content on the Web. Most of the work on automatically mapping structured and semi-structured sources to ontologies focuses on semantic labeling, i.e., annotating data fields with ontology classes and/or properties. However, a precise mapping that fully recovers the intended meaning of the data needs to describe the semantic relations between the data fields too. We present a novel approach to automatically discover the semantic relations within a given data source. We mine the small graph patterns occurring in Linked Open Data and combine them to build a graph that will be used to infer semantic relations. We evaluated our approach on datasets from different domains. Mining patterns of maximum length five, our method achieves an average precision of 75% and recall of 77% for a dataset with very complex mappings to the domain ontology, increasing up to 86% and 82%, respectively, for simpler ontologies and mappings. + semantic model + Leveraging Linked Data to Discover Semantic Relations within Data Sources + + semantic relation + Mapping data to a shared domain ontology is a key step in publishing semantic content on the Web. Most of the work on automatically mapping structured and semi-structured sources to ontologies focuses on semantic labeling, i.e., annotating data fields with ontology classes and/or properties. However, a precise mapping that fully recovers the intended meaning of the data needs to describe the semantic relations between the data fields too. We present a novel approach to automatically discover the semantic relations within a given data source. We mine the small graph patterns occurring in Linked Open Data and combine them to build a graph that will be used to infer semantic relations. We evaluated our approach on datasets from different domains. Mining patterns of maximum length five, our method achieves an average precision of 75% and recall of 77% for a dataset with very complex mappings to the domain ontology, increasing up to 86% and 82%, respectively, for simpler ontologies and mappings. + semantic web + + + + Leveraging Linked Data to Discover Semantic Relations within Data Sources + + semantic relation + + + semantic label + + + Leveraging Linked Data to Discover Semantic Relations within Data Sources + linked data + + + semantic model + + + + + + + + + + + + + + + + + + + INRIA / LIX, Ecole Polytechnique + INRIA / LIX, Ecole Polytechnique + + + + + + + + INRIA / LIX, Ecole Polytechnique + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-21T11:50:00 + 2016-10-21T13:30:00 + 2016-10-21T13:30:00 + 2016-10-21T13:30:00 + 2016-10-21T11:50:00 + Lunch + 2016-10-21T13:30:00 + Lunch + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Riichiro Mizoguchi + Riichiro Mizoguchi + + + + + + Riichiro Mizoguchi + + + + + + Yasmin Alam-Faruque + + + + + + + + Yasmin Alam-Faruque + 691961f7af28ec05eb58dfcbb5e0a849b7355a2f + + + Yasmin Alam-Faruque + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Gofran Shukair + + + Gofran Shukair + + Gofran Shukair + + + 917192eb73c5e01779552c969e0fa17aa3b08d14 + + + + + + + + + + + + + + + + + + + + + + + Institut Supérieur d'Electronique de Paris + + Institut Supérieur d'Electronique de Paris + + + + Institut Supérieur d'Electronique de Paris + + + + + + + + + + + + Guillermo Vega-Gorgojo + Guillermo Vega-Gorgojo + + + + + + + + Guillermo Vega-Gorgojo + + + + + + + + + + + + + + + + Vangelis Karkaletsis + + Vangelis Karkaletsis + + + + Vangelis Karkaletsis + + + 7057e10b42006fe77e51f3e67c0ee26c90f94814 + + + + + + 2016-10-19T21:00:00 + Semantic Web Technologies for improving remote visits of museums, using a mobile robot + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + Semantic Web Technologies for improving remote visits of museums, using a mobile robot + 2016-10-19T18:00:00 + Michel Buffa, Catherine Faron Zucker, Thierry Bergeron and Hatim Aouzal + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + Department of Informatics, University of Oslo + + + Department of Informatics, University of Oslo + + Department of Informatics, University of Oslo + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Vincent Link + + + + + Vincent Link + daab7413b065316d22855f8762dcc095c0fe41c2 + + + Vincent Link + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Takahiro Kawamura + + + + 286baaa4c43daf1cc1c760192728f26ca8e529a2 + + Takahiro Kawamura + Takahiro Kawamura + + + + + + + + + + + + + + + + + + + + + + + + + + + + + National Institute of Advanced Industrial Science and Technology (AIST) + National Institute of Advanced Industrial Science and Technology (AIST) + + + National Institute of Advanced Industrial Science and Technology (AIST) + + + ontology + + + + semantic web + + ontology + audio effects + + + + + + + + + + semantic web + audio effects + + This paper introduces the Audio Effects Ontology (AUFX-O) building on previous theoretical models describing audio processing units and workflows in the context of music production. We discuss important conceptualisations of different abstraction layers, their necessity to successfully model audio effects, and their application method. We present use cases concerning the application of effects in music production projects, and the creation of audio effect metadata facilitating a linked data service exposing information about effect implementations. By doing so, we show how our model benefits knowledge sharing, and enables reproducibility and analysis of audio production workflows. + + AUFX-O: Novel Methods for the Representation of Audio Processing Workflows + music production + + music production + AUFX-O: Novel Methods for the Representation of Audio Processing Workflows + + AUFX-O: Novel Methods for the Representation of Audio Processing Workflows + This paper introduces the Audio Effects Ontology (AUFX-O) building on previous theoretical models describing audio processing units and workflows in the context of music production. We discuss important conceptualisations of different abstraction layers, their necessity to successfully model audio effects, and their application method. We present use cases concerning the application of effects in music production projects, and the creation of audio effect metadata facilitating a linked data service exposing information about effect implementations. By doing so, we show how our model benefits knowledge sharing, and enables reproducibility and analysis of audio production workflows. + + + + + + + + + + + + + + + + + + + + + Nobuyuki Igata + + + + 8e617ac525d55754ec759d00990dadc08dc38657 + + Nobuyuki Igata + + Nobuyuki Igata + + + Jan Mendling + + + + 15ada2ef51f9122f07cadca9899ba9e07b35d11b + + + + + + Jan Mendling + Jan Mendling + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Querying Dynamic Datasources with Continuously Mapped Sensor Data + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + Querying Dynamic Datasources with Continuously Mapped Sensor Data + + 2016-10-19T18:00:00 + Ruben Taelman, Pieter Heyvaert, Ruben Verborgh, Erik Mannens and Rik Van de Walle + 2016-10-19T21:00:00 + + + + + + + + + NTT Resonant Inc. + NTT Resonant Inc. + + + + NTT Resonant Inc. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Daniel Gerber + + + + + + Daniel Gerber + + Daniel Gerber + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Seungjun Yoon + Seungjun Yoon + + + + + + + + Seungjun Yoon + + f12185c1fa207ce21260cd547bf7421c33a2194c + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Birkbeck, University of London + + + + Birkbeck, University of London + + + + + Birkbeck, University of London + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Juan L. Reutter + + + + + 7fdaf70b3acd2223eb51d604ddffe21f5aab775e + + + Juan L. Reutter + Juan L. Reutter + + + + + + + + + + + + + + + + + + + + + + + + + Alo Allik, György Fazekas and Mark Sandler + 2016-10-21T14:30:00 + 2016-10-21T14:10:00 + Ontological representation of audio features + 2016-10-21T14:30:00 + + + + Ontological representation of audio features + 2016-10-21T14:10:00 + 2016-10-21T14:30:00 + 2016-10-21T14:30:00 + + + + + + + + + + + + 28a0f82609671f47d811e6bee865afb23abfb8db + Enrico Motta + + + Enrico Motta + + + + + Enrico Motta + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T11:40:00 + Visual query interfaces for semantic datasets: an evaluation study + 2016-10-19T11:40:00 + 2016-10-19T12:00:00 + 2016-10-19T12:00:00 + 2016-10-19T12:00:00 + Guillermo Vega-Gorgojo, Laura Slaughter, Martin Giese, Simen Heggestøyl, Ahmet Soylu and Arild Waaler + 2016-10-19T12:00:00 + + Visual query interfaces for semantic datasets: an evaluation study + + + + + + + + + + + + + + Linked Corporations Data in Japan + Shuya Abe, Yutaka Mitsuishi, Shinichiro Tago, Nobuyuki Igata, Seiji Okajima, Hiroaki Morikawa and Fumihito Nishino + 2016-10-19T21:00:00 + + Linked Corporations Data in Japan + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + OWL + The OWL Reasoner Evaluation (ORE) Competition is an annual competition (with an associated workshop) which pits OWL 2 compliant reasoners against each other on various standard reasoning tasks over naturally occurring problems. The 2015 competition was the third of its sort and had 14 reasoners competing in six tracks comprising three tasks (consistency, classification, and realisation) over two profiles (OWL 2 DL and EL). In this paper, we outline the design of the competition and present the infrastructure used for its execution: the corpora of ontologies, the competition framework, and the submitted systems. All resources are publicly available on the Web, allowing users to easily re-run the 2015 competition, or reuse any of the ORE infrastructure for reasoner experiments or ontology analysis. + + + + + The OWL Reasoner Evaluation (ORE) 2015 Resources + + The OWL Reasoner Evaluation (ORE) 2015 Resources + + + + + + reasoning + ontologies + reasoning + + + + + The OWL Reasoner Evaluation (ORE) 2015 Resources + + + The OWL Reasoner Evaluation (ORE) Competition is an annual competition (with an associated workshop) which pits OWL 2 compliant reasoners against each other on various standard reasoning tasks over naturally occurring problems. The 2015 competition was the third of its sort and had 14 reasoners competing in six tracks comprising three tasks (consistency, classification, and realisation) over two profiles (OWL 2 DL and EL). In this paper, we outline the design of the competition and present the infrastructure used for its execution: the corpora of ontologies, the competition framework, and the submitted systems. All resources are publicly available on the Web, allowing users to easily re-run the 2015 competition, or reuse any of the ORE infrastructure for reasoner experiments or ontology analysis. + OWL + + ontologies + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ad0c7d68490b84d6c7f8b0cb8aa1e457559386ef + Harith Alani + + Harith Alani + + + Harith Alani + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + NCSR Demokritos + + NCSR Demokritos + + + + + + + NCSR Demokritos + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Linked Sensor Data Generation using Queryable RML Mappings + Linked Sensor Data Generation using Queryable RML Mappings + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + Pieter Heyvaert, Ruben Taelman, Ruben Verborgh, Erik Mannens and Rik Van de Walle + + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Bonn & Fraunhofer IAIS + + University of Bonn & Fraunhofer IAIS + + University of Bonn & Fraunhofer IAIS + + + + + + + + + 2016-10-20T10:30:00 + 2016-10-20T11:50:00 + 2016-10-20T11:50:00 + 2016-10-20T11:50:00 + Querying/SPARQL (I) + 2016-10-20T10:30:00 + Querying/SPARQL (I) + 2016-10-20T11:50:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + Web of Things + + + + Semantic Web of Things + Semantic Web + Semantic Web + + Semantic Web of Things + + + Semantic Web of Things(SWoT) applications focus on providing a wide-scale interoperability that allows the sharing of IoT devices across domains and the reusing of available knowledge on the web. However, the application development is difficult because developers have to do various tasks such as designing an application, annotating IoT data, interpreting data, and combining application domains. + +To address the above challenges, this paper demonstrates SWoTSuite, a toolkit for prototyping SWoT applications. It hides the use of semantic web technologies as much as possible to avoid the burden of designing SWoT applications that involves designing ontologies, annotating sensor data, and using reasoning mechanisms to enrich data. Taking inspiration from sharing and reuse approaches, SWoTSuite reuses data and vocabularies. It leverages existing technologies to build applications. We take a hello world naturopathy application as an example and demonstrate an application development process using SWoTSuite. The demo video is available at URL- http://tinyurl.com/zs9flrt. + + + + Software Engineering + SWoTSuite: A Toolkit for Prototyping Cross-domain Semantic Web of Things Applications + + Software Engineering + Programming framework + + Toolkit + Programming framework + + SWoTSuite: A Toolkit for Prototyping Cross-domain Semantic Web of Things Applications + Internet of Things + Web of Things + Internet of Things + + Semantic Web of Things(SWoT) applications focus on providing a wide-scale interoperability that allows the sharing of IoT devices across domains and the reusing of available knowledge on the web. However, the application development is difficult because developers have to do various tasks such as designing an application, annotating IoT data, interpreting data, and combining application domains. + +To address the above challenges, this paper demonstrates SWoTSuite, a toolkit for prototyping SWoT applications. It hides the use of semantic web technologies as much as possible to avoid the burden of designing SWoT applications that involves designing ontologies, annotating sensor data, and using reasoning mechanisms to enrich data. Taking inspiration from sharing and reuse approaches, SWoTSuite reuses data and vocabularies. It leverages existing technologies to build applications. We take a hello world naturopathy application as an example and demonstrate an application development process using SWoTSuite. The demo video is available at URL- http://tinyurl.com/zs9flrt. + Toolkit + SWoTSuite: A Toolkit for Prototyping Cross-domain Semantic Web of Things Applications + + + + + + + + + Trinity College Dublin + Trinity College Dublin + + + + + Trinity College Dublin + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Naoya Arakawa + Naoya Arakawa + Naoya Arakawa + + + + 40648dec21e68f3aeb68f6bde134daeaf72cc9e0 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Pieter Colpaert + Pieter Colpaert + + + + + 5d1ef2cf8bfda01a622cac02ab7620013f11211b + + Pieter Colpaert + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + The ESSOT System Goes Wild: an Easy Way For Translating Ontologies + Mihael Arcan, Mauro Dragoni and Paul Buitelaar + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + The ESSOT System Goes Wild: an Easy Way For Translating Ontologies + + 2016-10-19T21:00:00 + + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + + + + University of Lisbon + + + + + University of Lisbon + + + University of Lisbon + + + + + + + + Amrapali Zaveri + + + Amrapali Zaveri + + + + + Amrapali Zaveri + + + + + + + + + + + + + + + + + + + + + + + + RDF stream + JSON-LD + + TripleWave: Spreading RDF Streams on the Web + + software + TripleWave: Spreading RDF Streams on the Web + RDF stream processing + RDF stream publishing + Processing data streams is increasingly gaining a momentum, given the need to process these flows of information in real time and at Web scale. +In this context, RDF Stream Processing (RSP) and Stream Reasoning (SR) have emerged as solutions to combine semantic technologies with stream and event processing techniques. +Research in these areas has proposed an ecosystem of solutions to query, reason and perform real time processing over heterogeneous and distributed data streams on the Web. +However, so far one basic building block has been missing: a mechanism to disseminate and exchange RDF streams on the Web. +In this work we close this gap, proposing TripleWave, a reusable and generic tool that enables the publication of RDF streams on the Web. +The features of TripleWave have been derived from requirements of real use-cases, and consider a diverse set of scenarios, independent of any specific RSP implementation. +TripleWave can be fed with existing Web streams (e.g. Twitter and Wikipedia streams) or time-annotated RDF datasets (e.g. the LinkedSensorData set), and it can be invoked through both pull- and push-based mechanisms, thus also enabling RSP engines to automatically register and receive data from TripleWave. + + + + + Processing data streams is increasingly gaining a momentum, given the need to process these flows of information in real time and at Web scale. +In this context, RDF Stream Processing (RSP) and Stream Reasoning (SR) have emerged as solutions to combine semantic technologies with stream and event processing techniques. +Research in these areas has proposed an ecosystem of solutions to query, reason and perform real time processing over heterogeneous and distributed data streams on the Web. +However, so far one basic building block has been missing: a mechanism to disseminate and exchange RDF streams on the Web. +In this work we close this gap, proposing TripleWave, a reusable and generic tool that enables the publication of RDF streams on the Web. +The features of TripleWave have been derived from requirements of real use-cases, and consider a diverse set of scenarios, independent of any specific RSP implementation. +TripleWave can be fed with existing Web streams (e.g. Twitter and Wikipedia streams) or time-annotated RDF datasets (e.g. the LinkedSensorData set), and it can be invoked through both pull- and push-based mechanisms, thus also enabling RSP engines to automatically register and receive data from TripleWave. + RDF stream + + + RDF stream publishing + + TripleWave: Spreading RDF Streams on the Web + + + + open source + software + open source + + + + JSON-LD + + + + + + + + + RDF stream processing + + + + + + + + + + + Ceriel Jacobs + + + Ceriel Jacobs + + + + + + + 38f4f77799ce0a5571dd6b38290a3ba890df635d + Ceriel Jacobs + + + + + + + + + + + + John Domingue + + + + John Domingue + John Domingue + + + + + + + + + Fernando Serena + + + c11398fd7784387762f82da84dbc40348f621dfb + Fernando Serena + + + Fernando Serena + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + An Extensible Linear Approach For Holistic Ontology Matching + 2016-10-20T11:10:00 + 2016-10-20T11:10:00 + Imen Megdiche, Olivier Teste and Cassia Trojahn + + 2016-10-20T11:10:00 + 2016-10-20T11:10:00 + 2016-10-20T10:50:00 + + An Extensible Linear Approach For Holistic Ontology Matching + + 2016-10-20T10:50:00 + + + + + + + + + + + + + + + SPARQL + + + + + + + Gize: A Time Warp in the Web of Data + LTL + Historical Data + + SPARQL + We introduce the Gize framework for querying historical RDF data. Gize builds upon two main pillars: a lightweight approach to keep historical data, and an extension of SPARQL called SPARQ–LTL, which incorporates temporal logic primitives to enable a rich class of queries. One striking point of Gize is that its features can be readily made available in existing query processors. + + + LTL + + + + + Gize: A Time Warp in the Web of Data + + Historical Data + We introduce the Gize framework for querying historical RDF data. Gize builds upon two main pillars: a lightweight approach to keep historical data, and an extension of SPARQL called SPARQ–LTL, which incorporates temporal logic primitives to enable a rich class of queries. One striking point of Gize is that its features can be readily made available in existing query processors. + Gize: A Time Warp in the Web of Data + + + + + + + + + + + + + + + + + + + + + + + + + + Seiji Suzuki + Seiji Suzuki + + + + Seiji Suzuki + + 4d5496dd9ba58c00736d43ab97d07d1840a3d1bc + + + + + + + + + + + + Alexander Viehl + b2039d2835c2c32540de3602bea26276fe98ef19 + + + Alexander Viehl + Alexander Viehl + + + + + + + + + + + + reification + + graph databases + + + + relational databases + + Querying Wikidata: Comparing SPARQL, Relational and Graph Databases + Querying Wikidata: Comparing SPARQL, Relational and Graph Databases + + Querying Wikidata: Comparing SPARQL, Relational and Graph Databases + + + + property graphs + + graph databases + In this paper, we experimentally compare the efficiency of various database engines for the purposes of querying the Wikidata knowledge-base, which can be conceptualised as a directed edge-labelled where edges can be annotated with meta-information called qualifiers. We select two popular SPARQL databases (Virtuoso, Blazegraph), a popular relational database (PostgreSQL), and a popular graph database (Neo4J) for comparison and discuss various options as to how Wikidata can be represented in the models of each engine. We design a set of experiments to test the relative query performance of these representations in the context of their respective engines. We first execute a large set of atomic lookups to establish a baseline performance for each test setting, and subsequently perform experiments on instances of more complex graph patterns based on real-world examples. We conclude with a summary of the strengths and limitations of the engines observed. + property graphs + + + + relational databases + + wikidata + reification + + In this paper, we experimentally compare the efficiency of various database engines for the purposes of querying the Wikidata knowledge-base, which can be conceptualised as a directed edge-labelled where edges can be annotated with meta-information called qualifiers. We select two popular SPARQL databases (Virtuoso, Blazegraph), a popular relational database (PostgreSQL), and a popular graph database (Neo4J) for comparison and discuss various options as to how Wikidata can be represented in the models of each engine. We design a set of experiments to test the relative query performance of these representations in the context of their respective engines. We first execute a large set of atomic lookups to establish a baseline performance for each test setting, and subsequently perform experiments on instances of more complex graph patterns based on real-world examples. We conclude with a summary of the strengths and limitations of the engines observed. + + + + sparql + sparql + + wikidata + + + + + + + + + + + + + + + + + Linköping University + + Linköping University + Linköping University + + + + + + + + + + + + + + + + + + + + + + + + + Amit Sheth + + c903202d3919813029e4dc56efbe0a2b2443074c + + + + + Amit Sheth + + + Amit Sheth + + + + + + + + + Practical Algorithms for Ontology Update + + + + + + Instance-level Ontology Update + + Instance-level Ontology Update + + OWL 2 QL + + In this paper we study instance-level update in DL-LiteA, the description logic underlying the OWL 2 QL standard. In particular we focus on formula based approaches to ABox insertion and deletion. We show that DL-LiteA, which is well known for enjoying first-order rewritability of query answering, enjoys a first-order rewritability property also for updates. That is, every update can be reformulated into a set of insertion and deletion instructions computable through a non-recursive DATALOG program. Such a program is readily translatable into a first-order query over the ABox considered as a database, and hence into SQL. Exploiting this result we implement an update component for DL-LiteA-based systems and perform some experiments showing that the approach works in practice. + DL-Lite + DL-Lite + Updating DL-Lite Ontologies through First-Order Queries + + + + OWL 2 QL + + + Updating DL-Lite Ontologies through First-Order Queries + + Updating DL-Lite Ontologies through First-Order Queries + + Practical Algorithms for Ontology Update + In this paper we study instance-level update in DL-LiteA, the description logic underlying the OWL 2 QL standard. In particular we focus on formula based approaches to ABox insertion and deletion. We show that DL-LiteA, which is well known for enjoying first-order rewritability of query answering, enjoys a first-order rewritability property also for updates. That is, every update can be reformulated into a set of insertion and deletion instructions computable through a non-recursive DATALOG program. Such a program is readily translatable into a first-order query over the ABox considered as a database, and hence into SQL. Exploiting this result we implement an update component for DL-LiteA-based systems and perform some experiments showing that the approach works in practice. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Olivier Teste + Olivier Teste + + + + 1c582f2ab56f6e966f4ef04c9b738e8c01964229 + Olivier Teste + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Apache Spark + + + SPARQLGX in Action: Efficient Distributed Evaluation of SPARQL with Apache Spark + + + RDF data + + + + + SPARQL evaluators + + We demonstrate SPARQLGX: our implementation of a distributed sparql evaluator. We show that SPARQLGX makes it possible to evaluate SPARQL queries on billions of triples distributed across multiple nodes, while providing attractive performance figures. + + SPARQLGX in Action: Efficient Distributed Evaluation of SPARQL with Apache Spark + Distributed systems + + + + RDF data + + + + SPARQL evaluators + Apache Spark + We demonstrate SPARQLGX: our implementation of a distributed sparql evaluator. We show that SPARQLGX makes it possible to evaluate SPARQL queries on billions of triples distributed across multiple nodes, while providing attractive performance figures. + + SPARQLGX in Action: Efficient Distributed Evaluation of SPARQL with Apache Spark + + + Distributed systems + + + + Roberto Garcia, Rosa Gil, Juan Manuel Gimeno, Eirik Bakke and David Karger + + + 2016-10-19T11:20:00 + Benchmarking End-User Structured Data Search and Exploration + 2016-10-19T11:40:00 + 2016-10-19T11:40:00 + 2016-10-19T11:40:00 + Benchmarking End-User Structured Data Search and Exploration + 2016-10-19T11:40:00 + 2016-10-19T11:20:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + Ontology Concept Search + + + + Ranking + Diversification + + Indexing + Explicit Query Interpretation and Diversification for Context-driven Concept Search across Ontologies + + Explicit Query Interpretation and Diversification for Context-driven Concept Search across Ontologies + + Diversification + Finding relevant concepts from a corpus of ontologies is useful in many scenarios, including document classification, web page annotation, and automatic ontology population. Millions of concepts are contained in a large number of ontologies across diverse domains. SPARQL-based query demands knowledge of the structure of ontologies and the query language, whereas more user-friendly, simple keyword-based approaches suffer from false positives as concept descriptions in ontologies may be ambiguous and overlapping. In this paper, we propose a keyword-based concept search framework that (1) exploits the structure and semantics in ontologies, by constructing contexts for each concept; (2) generates the interpretations of a query; and (3) balances relevance and diversity of search results. A comprehensive evaluation against both the domain-specific BioPortal and the general-purpose Falcons on widely-used performance metrics demonstrates that our system outperforms both. + Finding relevant concepts from a corpus of ontologies is useful in many scenarios, including document classification, web page annotation, and automatic ontology population. Millions of concepts are contained in a large number of ontologies across diverse domains. SPARQL-based query demands knowledge of the structure of ontologies and the query language, whereas more user-friendly, simple keyword-based approaches suffer from false positives as concept descriptions in ontologies may be ambiguous and overlapping. In this paper, we propose a keyword-based concept search framework that (1) exploits the structure and semantics in ontologies, by constructing contexts for each concept; (2) generates the interpretations of a query; and (3) balances relevance and diversity of search results. A comprehensive evaluation against both the domain-specific BioPortal and the general-purpose Falcons on widely-used performance metrics demonstrates that our system outperforms both. + + + Ranking + + + Indexing + + + + Ontology Concept Search + + + Query Interpretation + Query Interpretation + Explicit Query Interpretation and Diversification for Context-driven Concept Search across Ontologies + + + + + Dag Hovland + + + + + + Dag Hovland + + Dag Hovland + + 30627422b3fe20465030ad86165544107272257c + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Open Information Extraction + + + Who-Does-What: A knowledge base of people's occupations and job activities + + + knowledge base induction from text + Knowledge acquisition + + + + + System analysis and design is concerned with the creation of conceptual models. In this paper, we introduce a novel resource called "Who-Does-What" (WDW) that supports the creation and quality assurance of such models. WDW provides a knowledge base of activities for classes of people engaged in a wide range of different occupations. The resource is semi-automatically created by populating the manually-created Standard Occupational Classification (SOC) of the US Department of Labor with activities found on the Web. + + Knowledge acquisition + + + Open Information Extraction + + + System analysis and design is concerned with the creation of conceptual models. In this paper, we introduce a novel resource called "Who-Does-What" (WDW) that supports the creation and quality assurance of such models. WDW provides a knowledge base of activities for classes of people engaged in a wide range of different occupations. The resource is semi-automatically created by populating the manually-created Standard Occupational Classification (SOC) of the US Department of Labor with activities found on the Web. + + Who-Does-What: A knowledge base of people's occupations and job activities + knowledge base induction from text + + Who-Does-What: A knowledge base of people's occupations and job activities + + + + + + + + Bojan Božić + + + + Bojan Božić + + Bojan Božić + + 4fe69f25d9e2dfe3d9dc889a8ec901079b875e3b + + + + + + + + + + + + + + David Karger + + + + + + + David Karger + + + David Karger + + + + + + + + + + + + + + + Satoshi Kume + Satoshi Kume + + + + + + + + + + + f9fb89aaaa81fb8c66880b4929cb250c8857638b + Satoshi Kume + + + + + + + + + + Alasdair Gray + Alasdair Gray + + + + + Alasdair Gray + + + + + + + + + + + + + + + 2016-10-18T14:45:00 + + Towards a distributed, scalable and real-time RDF Stream Processing engine + 2016-10-18T15:00:00 + Towards a distributed, scalable and real-time RDF Stream Processing engine + + 2016-10-18T15:00:00 + 2016-10-18T14:45:00 + 2016-10-18T15:00:00 + Xiangnan Ren + 2016-10-18T15:00:00 + + + + + + + Expanding Wikidata's Parenthood Information by 178%, or How To Mine Relation Cardinality Information + + + While automated knowledge base construction so far has largely focused on fully qualified facts, e.g. <Obama, hasChild, Malia>, the Web contains also extensive amounts of cardinality information, such as that someone has two children without giving their names. In this paper we argue that the extraction of such information could substantially increase the scope of knowledge bases. For the sample of the hasChild relation in Wikidata, we show that simple regular-expression based extraction from Wikipedia can increase the size of the relation by 178. We also show how such cardinality information can be used to estimate the recall of knowledge bases. + + Incomplete information + Completeness estimation + + + Expanding Wikidata's Parenthood Information by 178%, or How To Mine Relation Cardinality Information + Knowledge bases + + + + Information extraction + Completeness estimation + + Information extraction + Incomplete information + + + Knowledge bases + + Expanding Wikidata's Parenthood Information by 178%, or How To Mine Relation Cardinality Information + While automated knowledge base construction so far has largely focused on fully qualified facts, e.g. <Obama, hasChild, Malia>, the Web contains also extensive amounts of cardinality information, such as that someone has two children without giving their names. In this paper we argue that the extraction of such information could substantially increase the scope of knowledge bases. For the sample of the hasChild relation in Wikidata, we show that simple regular-expression based extraction from Wikipedia can increase the size of the relation by 178. We also show how such cardinality information can be used to estimate the recall of knowledge bases. + + + + + + + + + + + + + + + + + + + + + + Despite developments of Semantic Web-enabling technologies, +the gap between non-expert end-users and the Semantic Web still +exists. In the field of semantic content authoring, tools for interacting +with semantic content remain directed at highly trained individuals. This +adds to the challenges of bringing user-generated content into the Semantic +Web. +In this paper, we present Seed, short for Semantic Editor, an extensible +knowledge-supported natural language text composition tool, which +targets non-experienced end-users enabling automatic as well as semiautomatic +creation of standards based semantically annotated textual +content. We point out the structure of Seed, compare it with related +work and explain how it utilizes Linked Open Data and state of the art +Natural Language Processing to realize user-friendly generation of textual +content for the Semantic Web. We also present experimental evaluation +results involving a diverse group of more than 120 participants, +which showed that Seed helped end-users easily create and interact with +semantic content with nearly no prerequisite knowledge. + Seed, an End-user Text Composition Tool for the Semantic Web + + semantic content authoring + + microdata + semantic web + + lod + microdata + + + + + + + + Seed, an End-user Text Composition Tool for the Semantic Web + lod + Seed, an End-user Text Composition Tool for the Semantic Web + + semantic content authoring + + + semantic web + + Despite developments of Semantic Web-enabling technologies, +the gap between non-expert end-users and the Semantic Web still +exists. In the field of semantic content authoring, tools for interacting +with semantic content remain directed at highly trained individuals. This +adds to the challenges of bringing user-generated content into the Semantic +Web. +In this paper, we present Seed, short for Semantic Editor, an extensible +knowledge-supported natural language text composition tool, which +targets non-experienced end-users enabling automatic as well as semiautomatic +creation of standards based semantically annotated textual +content. We point out the structure of Seed, compare it with related +work and explain how it utilizes Linked Open Data and state of the art +Natural Language Processing to realize user-friendly generation of textual +content for the Semantic Web. We also present experimental evaluation +results involving a diverse group of more than 120 participants, +which showed that Seed helped end-users easily create and interact with +semantic content with nearly no prerequisite knowledge. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + WebBrain: Joint Neural Learning of Large-Scale Commonsense Knowledge + 2016-10-20T15:50:00 + 2016-10-20T15:30:00 + + 2016-10-20T15:30:00 + 2016-10-20T15:50:00 + 2016-10-20T15:50:00 + Gerard de Melo + + WebBrain: Joint Neural Learning of Large-Scale Commonsense Knowledge + + 2016-10-20T15:50:00 + + + + + + + + + + + + + + + + + + + + + + + + + + Semantics + On the Role of Semantics for Detecting pro-ISIS Stances on Social Media + + + + Semantics + + Twitter + + + Feature Engineering + + + + Radicalisation Detection + Twitter + On the Role of Semantics for Detecting pro-ISIS Stances on Social Media + + + + Radicalisation Detection + + + On the Role of Semantics for Detecting pro-ISIS Stances on Social Media + From its start, the so-called Islamic State of Iraq and the Levant (ISIL/ISIS) has been successfully exploiting social media networks, most notoriously Twitter, to promote its propaganda and recruit new members, resulting in thousands of social media users adopting pro-ISIS stance every year. Automatic identification of pro-ISIS users on social media has, thus, become the centre of interest for various governmental and research organisations. In this paper we propose a semantic-based approach for radicalisation detection on Twitter. Unlike most previous works, which mainly rely on the lexical and contextual representation of the content published by Twitter users, our approach extracts and makes use of the underlying semantics of words exhibited by these users to identify their pro/anti-ISIS stances. Our results show that classifiers trained from words' semantics outperform those trained from lexical and network features by 2% on average F1-measure. + + From its start, the so-called Islamic State of Iraq and the Levant (ISIL/ISIS) has been successfully exploiting social media networks, most notoriously Twitter, to promote its propaganda and recruit new members, resulting in thousands of social media users adopting pro-ISIS stance every year. Automatic identification of pro-ISIS users on social media has, thus, become the centre of interest for various governmental and research organisations. In this paper we propose a semantic-based approach for radicalisation detection on Twitter. Unlike most previous works, which mainly rely on the lexical and contextual representation of the content published by Twitter users, our approach extracts and makes use of the underlying semantics of words exhibited by these users to identify their pro/anti-ISIS stances. Our results show that classifiers trained from words' semantics outperform those trained from lexical and network features by 2% on average F1-measure. + + + Feature Engineering + + + + + + + The Azkar research project focuses on the remote control of a mobile robot using the emerging Web technologies WebRTC for real time communication. One of the use cases addressed is a remote visit of the French Museum of the Great War in Meaux. For this purpose, we designed an ontology for describing the main scenes in the museum, the objects that compose them, the different trails the robot can follow in a given time period, for a targeted audience, the way points, observation points. This RDF dataset is exploited to assist the human guide in designing a trail, and possibly adapting it during the visit. In this paper we present the Azkar Museum Ontology, the RDF dataset describing some emblematic scenes of the museum, and an experiment that took place in June 2016 with a robot controlled by an operator located 800~kms from the museum. We propose to demonstrate this work in real time during the conference by organizing a remote visit from the conference demo location. + + Robotics + + Semantic Web + Semantic Web + + Semantic Web Technologies for improving remote visits of museums, using a mobile robot + Museum Data + Robotics + The Azkar research project focuses on the remote control of a mobile robot using the emerging Web technologies WebRTC for real time communication. One of the use cases addressed is a remote visit of the French Museum of the Great War in Meaux. For this purpose, we designed an ontology for describing the main scenes in the museum, the objects that compose them, the different trails the robot can follow in a given time period, for a targeted audience, the way points, observation points. This RDF dataset is exploited to assist the human guide in designing a trail, and possibly adapting it during the visit. In this paper we present the Azkar Museum Ontology, the RDF dataset describing some emblematic scenes of the museum, and an experiment that took place in June 2016 with a robot controlled by an operator located 800~kms from the museum. We propose to demonstrate this work in real time during the conference by organizing a remote visit from the conference demo location. + + Museum Data + + Semantic Web Technologies for improving remote visits of museums, using a mobile robot + + + Semantic Web Technologies for improving remote visits of museums, using a mobile robot + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Isa Guclu + + + + + Isa Guclu + + 4a6bb7ff3169ba73ad0c602322122cbd3e769f13 + + + Isa Guclu + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + We present VLog, a new system for answering arbitrary Datalog queries on top of a wide range of databases, including both relational and RDF databases. VLog is designed to perform efficiently intensive rule-based computation on large Knowledge Graphs (KGs). It adapts column-store technologies to attain high efficiency in terms of memory usage and speed, enabling us to process Datalog queries with thousands of rules over databases with hundreds of millions of tuples---in a live demonstration on a laptop. Our demonstration provides in-depth insights into the workings of VLog, and presents important new features such as support for arbitrary relational DBMS. + VLog: A Column-Oriented Datalog System for Large Knowledge Graphs + Knowledge graphs + + + + VLog: A Column-Oriented Datalog System for Large Knowledge Graphs + + Database + + Datalog + Rules + Rules + We present VLog, a new system for answering arbitrary Datalog queries on top of a wide range of databases, including both relational and RDF databases. VLog is designed to perform efficiently intensive rule-based computation on large Knowledge Graphs (KGs). It adapts column-store technologies to attain high efficiency in terms of memory usage and speed, enabling us to process Datalog queries with thousands of rules over databases with hundreds of millions of tuples---in a live demonstration on a laptop. Our demonstration provides in-depth insights into the workings of VLog, and presents important new features such as support for arbitrary relational DBMS. + + + Knowledge graphs + + + Datalog + + + Reasoning + Database + + + + Reasoning + + + VLog: A Column-Oriented Datalog System for Large Knowledge Graphs + + + + + + + + + + + + + + + + + + Kouji Kozaki + + Kouji Kozaki + + + + + + + c493b1b07fa4dacfdda46edc55ebae341758972f + + Kouji Kozaki + + + + + + + + + + + + + + + + + + + + Zlatan Dragisic + + + Zlatan Dragisic + 3419a31dd2cda521ea35433b05de5cfdad69c558 + + Zlatan Dragisic + + + + + + + + 172da8328c6d7e6164b3b7e42bea3c565c88c41d + Wen Sun + Wen Sun + + + + + + + Wen Sun + + + + + + + + + + + + + + + Oslo University Hospital + + Oslo University Hospital + + + + Oslo University Hospital + + + + + + + + + + + + + + + + + + + + + + + + Fabian M. Suchanek + + Fabian M. Suchanek + + + + + + 8246b76bd2ce30dc6d8dbef5272f0597dbab58f2 + + + + Fabian M. Suchanek + + + + + + + + + + + Thomas Ploeger + + + + + + Thomas Ploeger + + + + Thomas Ploeger + + + + MARC + RDF conversion + Exploratory Search Engine + RDF interconnexion + Exploring Linked Classical Music Catalogs with OVERTURE + In this paper, we introduce OVERTURE - a web application allowing to explore the interlinked catalogs of major music libraries including the French National Library, Radio France and the Philharmonie de Paris. We have first developed the DOREMUS ontology which is an extension of the well-known FRBRoo model for describing works and expressions as well as the creation processus. We have implemented a so-called marc2rdf tool allowing for the conversion and linking of bibliographical entries about music works, interpretations and expressions from their original MARC-format to RDF following this DOREMUS ontology. We present an exploratory search engine prototype that enables to browse through the reconciled collection of bibliographical records of classical music and to highlight the various interpretations of a work, its derivative, its performance casting as well as other rich metadata. + FRBRoo + In this paper, we introduce OVERTURE - a web application allowing to explore the interlinked catalogs of major music libraries including the French National Library, Radio France and the Philharmonie de Paris. We have first developed the DOREMUS ontology which is an extension of the well-known FRBRoo model for describing works and expressions as well as the creation processus. We have implemented a so-called marc2rdf tool allowing for the conversion and linking of bibliographical entries about music works, interpretations and expressions from their original MARC-format to RDF following this DOREMUS ontology. We present an exploratory search engine prototype that enables to browse through the reconciled collection of bibliographical records of classical music and to highlight the various interpretations of a work, its derivative, its performance casting as well as other rich metadata. + + + RDF conversion + + + + OVERTURE + + + + FRBRoo + RDF interconnexion + + + Classical Music + Exploring Linked Classical Music Catalogs with OVERTURE + + + + Exploratory Search Engine + + Exploring Linked Classical Music Catalogs with OVERTURE + + + Classical Music + + + + OVERTURE + + + MARC + + + + + + + + Structure-guiding Modular Reasoning for Expressive Ontologies + classification + + We propose a technique that combine an OWL 2 EL reasoner with an OWL 2 reasoner to classify expressive ontologies. We exploit the information implied by the ontology structure to identify a small non-EL ontology that contains necessary axioms to ensure the completeness. In the process of ontology classification, the bulk of workload is delegated to an efficient OWL 2 EL reasoner and the small part of workload is handled by a less efficient OWL 2 reasoner. Experimental results show that our approach leads to a reasonable task assignment and offers a substantial speedup in ontology classification. + + + + + + Structure-guiding Modular Reasoning for Expressive Ontologies + We propose a technique that combine an OWL 2 EL reasoner with an OWL 2 reasoner to classify expressive ontologies. We exploit the information implied by the ontology structure to identify a small non-EL ontology that contains necessary axioms to ensure the completeness. In the process of ontology classification, the bulk of workload is delegated to an efficient OWL 2 EL reasoner and the small part of workload is handled by a less efficient OWL 2 reasoner. Experimental results show that our approach leads to a reasonable task assignment and offers a substantial speedup in ontology classification. + reasoner + + reasoner + ontology + classification + + + Structure-guiding Modular Reasoning for Expressive Ontologies + modular structure + modular structure + + + + ontology + + + + + + + + + + + + + + + + + + + + + + + Scott Markel + + Scott Markel + + Scott Markel + + + + + f0c4df58bfeb861f6b2c2e97ddd40c240abd001d + + + + + + + + + + + + + + + + + + + + + + + + Gong Cheng + Gong Cheng + 90949b51ff990bbe53fa3b030fb31b3c90634551 + + Gong Cheng + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Data publication + + + + Considerable investment in RDF publishing has recently led to the birth of the Web of Data. But is this investment worth it? Are publishers aware of how their linked datasets traffic looks like? +We propose an access analytics platform for linked datasets. The system mines traffic insights from the logs of registered RDF publishers and extracts Linked Data-specific metrics not available in traditional web analytics tools. +We present a demo instance showing one month (December 2014) of real traffic to the British National Bibliography RDF dataset. + + Linked Data + Access Logs Don't Lie: Towards Traffic Analytics for Linked Data Publishers + + Access analytics + Linked Data + + + Considerable investment in RDF publishing has recently led to the birth of the Web of Data. But is this investment worth it? Are publishers aware of how their linked datasets traffic looks like? +We propose an access analytics platform for linked datasets. The system mines traffic insights from the logs of registered RDF publishers and extracts Linked Data-specific metrics not available in traditional web analytics tools. +We present a demo instance showing one month (December 2014) of real traffic to the British National Bibliography RDF dataset. + + + + + + Access Logs Don't Lie: Towards Traffic Analytics for Linked Data Publishers + + + Data publication + + + + Access Logs Don't Lie: Towards Traffic Analytics for Linked Data Publishers + + + Access analytics + + + + Prolog + + + SWISH: An Integrated Semantic Web Notebook + Online editor + Semantic Web + SWISH: An Integrated Semantic Web Notebook + + + + + Semantic Web + + Query editor + + + SPARQL editors make it easier to write and inspect their results. Notebooks already support computer- and data scientists in domains like statistics and machine learning. There is currently not an integrated notebook solution for Semantic Web (SW) programming that combines the strengths of SPARQL editors with the benefits of notebooks. SWISH gives an integrated notebook experience for the Semantic Web programmer. + Notebook + + + SPARQL editors make it easier to write and inspect their results. Notebooks already support computer- and data scientists in domains like statistics and machine learning. There is currently not an integrated notebook solution for Semantic Web (SW) programming that combines the strengths of SPARQL editors with the benefits of notebooks. SWISH gives an integrated notebook experience for the Semantic Web programmer. + Query editor + SWISH: An Integrated Semantic Web Notebook + + Notebook + + + Prolog + Online editor + + + + + + + + + + + + + + + + + + + + + + + + + + Universidad de Chile + Universidad de Chile + + + + + + + + Universidad de Chile + + + + + + + + + + + + + + + + + + + + + 2016-10-20T10:30:00 + 2016-10-20T11:50:00 + 2016-10-20T11:50:00 + 2016-10-20T11:50:00 + Ontology Matching + Ontology Matching + 2016-10-20T11:50:00 + 2016-10-20T10:30:00 + + + + + + + + + + Linhong Zhu + + Linhong Zhu + + + + 3cc0050e756a60201573e99e5b1fb9d6209b2042 + + + Linhong Zhu + + + + + + + + Evidence-based Medicine + + + Knowledge Graph + Evidence-based medicine intends to optimize clinical decision making by using evidence. Semantic query answering could help to find the most relevant evidence. However, at point of care, it still lacks time for human reading of the evidence. In this poster, we propose to build an evidence graph for clinical decision support, in which an evidence ontology is defined with extension of SWRL rules. On top of this graph, we do evidence query and evidence fusion to generate the ranking list of decision options. Our prototype implementation of the evidence graph demonstrates its assistance to decision making, by combining a variety of knowledge-driven and data-driven decision services. + Evidence-based medicine intends to optimize clinical decision making by using evidence. Semantic query answering could help to find the most relevant evidence. However, at point of care, it still lacks time for human reading of the evidence. In this poster, we propose to build an evidence graph for clinical decision support, in which an evidence ontology is defined with extension of SWRL rules. On top of this graph, we do evidence query and evidence fusion to generate the ranking list of decision options. Our prototype implementation of the evidence graph demonstrates its assistance to decision making, by combining a variety of knowledge-driven and data-driven decision services. + Knowledge Graph + + + + Building Evidence Graph for Clinical Decision Support + Clinical Decision Support + + Building Evidence Graph for Clinical Decision Support + + + + Evidence-based Medicine + Building Evidence Graph for Clinical Decision Support + + Clinical Decision Support + + + + + + + + Joanna Biega + Joanna Biega + + Joanna Biega + + + + + + + + 40559ae5422c51d3c7e9d96279f7885cb1e39206 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + R2RML + We present the recent progress of SQuaRE, the SPARQL +Query and R2RML mappings Environment which provides a graphical +interface for creating R2RML mappings which can be immediately tested +by executing SPARQL queries. SQuaRE is a web-based tool with easy +to use interface that can be applied in the ontology-based data access +applications. We describe SQuaRE’s main features, its architecture as +well as technical details. + + + Mappings + + + + + + SQuaRE: A Visual Tool For Creating R2RML Mappings + + Visual web based interface + We present the recent progress of SQuaRE, the SPARQL +Query and R2RML mappings Environment which provides a graphical +interface for creating R2RML mappings which can be immediately tested +by executing SPARQL queries. SQuaRE is a web-based tool with easy +to use interface that can be applied in the ontology-based data access +applications. We describe SQuaRE’s main features, its architecture as +well as technical details. + + SQuaRE: A Visual Tool For Creating R2RML Mappings + + Visual web based interface + OBDA + OBDA + R2RML + + Mappings + + SQuaRE: A Visual Tool For Creating R2RML Mappings + + + We present Ontop-spatial, a geospatial extension of the well-known OBDA system Ontop, that leverages the technologies of geospatial databases and enables GeoSPARQL-to-SQL translation. We showcase the functionalities of the system in real-world use cases which require data integration of different geospatial sources. + + Ontop-spatial for Geospatial Data Integration using GeoSPARQL-to-SQL Translation + GeoSPARQL + + + + + + + Open geospatial data + + + Ontop-spatial for Geospatial Data Integration using GeoSPARQL-to-SQL Translation + We present Ontop-spatial, a geospatial extension of the well-known OBDA system Ontop, that leverages the technologies of geospatial databases and enables GeoSPARQL-to-SQL translation. We showcase the functionalities of the system in real-world use cases which require data integration of different geospatial sources. + Land management + Open geospatial data + + Land management + + Ontop-spatial for Geospatial Data Integration using GeoSPARQL-to-SQL Translation + Geospatial databases + + + + + + OBDA + + + OBDA + + + + Geospatial databases + GeoSPARQL + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Christoph Lange + + + + + + + Christoph Lange + + + + + Christoph Lange + + + + + + + + + + + + + + + + + Carlos Badenes + + + + + ef698b490b60e35e4fb9ee704d641f85aed03d9e + + Carlos Badenes + + + + + + Carlos Badenes + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + QA4LOV: A Natural Language Interface to Linked Open Vocabulary + + 2016-10-19T18:00:00 + + 2016-10-19T18:00:00 + + Ghislain Auguste Atemezing and Pierre-Yves Vandenbussche + 2016-10-19T21:00:00 + QA4LOV: A Natural Language Interface to Linked Open Vocabulary + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ranking + + + + + + + + + + Instance matching is the problem of finding the instances that describe the same object. It can be viewed as a classification problem, where a pair of two instances is predicted as match or non-match. A common limitation of existing classifier-based matching systems is the absent of instance pairs ranking. +We propose using a ranking feature to enhance the classifier in instance matching. +Experiments on real datasets confirm the significant improvement when applying our method. + instance matching + Ranking Feature for Classifier-based Instance Matching + + Ranking Feature for Classifier-based Instance Matching + ranking + Instance matching is the problem of finding the instances that describe the same object. It can be viewed as a classification problem, where a pair of two instances is predicted as match or non-match. A common limitation of existing classifier-based matching systems is the absent of instance pairs ranking. +We propose using a ranking feature to enhance the classifier in instance matching. +Experiments on real datasets confirm the significant improvement when applying our method. + classification + + + + Ranking Feature for Classifier-based Instance Matching + classification + + instance matching + + + + + + + + + + + + + + Corine Deliot + + Corine Deliot + + + + + Corine Deliot + + fa2689cea581553c242165050da94c9aee4b8fd0 + + + + + modeling tool + OWLAx: A Protege Plugin to Support Ontology Axiomatization through Diagramming + class diagram + + + + + OWLAx: A Protege Plugin to Support Ontology Axiomatization through Diagramming + Protege plugin + Protege plugin + OWLAx + axiomatization + + + modeling tool + OWLAx + Once the conceptual overview, in terms of a somewhat informal class diagram, has been designed in the course of engineering an ontology, the process of adding many of the appropriate logical axioms is mostly a routine task. We provide a Protege plugin which supports this task, together with a visual user interface, based on established methods for ontology design pattern modeling. + + Protege + class diagram + + axiomatization + + + + + Once the conceptual overview, in terms of a somewhat informal class diagram, has been designed in the course of engineering an ontology, the process of adding many of the appropriate logical axioms is mostly a routine task. We provide a Protege plugin which supports this task, together with a visual user interface, based on established methods for ontology design pattern modeling. + + Protege + + OWLAx: A Protege Plugin to Support Ontology Axiomatization through Diagramming + + + + + + + + + + + + + + + Marieke van Erp + Marieke van Erp + + + + + + Marieke van Erp + + + + + + + + + + + + + + cecdbe0a45292f05f6f5876fb671c7f23303f6bd + + Gerhard Wohlgenannt + + + + Gerhard Wohlgenannt + + Gerhard Wohlgenannt + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Birte Glimm + Birte Glimm + + + + Birte Glimm + d9e3004543dab6b7586ec0c3846985b999320232 + + + + + + + + + + Human-Machine Collaboration over Linked Data + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Human-Machine Collaboration over Linked Data + + 2016-10-19T21:00:00 + Paolo Pareti + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 739d909c45670ed23a967be0bb5ad067dc8a408b + + + + + Petar Ristoski + Petar Ristoski + + + + Petar Ristoski + + + + + + RDF + + visualisation + + + An interactive visualisation for RDF data + We demonstrate a visualisation aimed at facilitating SPARQL-fluent +users to produce queries over a dataset they are not familiar with. +This visualisation consists of a labelled graph whose nodes are the different types of entities in the RDF dataset, +and where two types are related if entities of these types appear related in the RDF dataset. To avoid a visual overload when +the number of types in a dataset is too big, the graph groups together all types that are subclass of a +more general type, and users are given the option of navigating through this hierarchy of types, dividing type nodes +into subtypes as they see fit. +We illustrate our visualisation using the Linked Movie Database dataset, and offer as well the visualisation of DBpedia. + + + An interactive visualisation for RDF data + RDF + + An interactive visualisation for RDF data + + + endpoint + + endpoint + visualisation + + + + + + + + We demonstrate a visualisation aimed at facilitating SPARQL-fluent +users to produce queries over a dataset they are not familiar with. +This visualisation consists of a labelled graph whose nodes are the different types of entities in the RDF dataset, +and where two types are related if entities of these types appear related in the RDF dataset. To avoid a visual overload when +the number of types in a dataset is too big, the graph groups together all types that are subclass of a +more general type, and users are given the option of navigating through this hierarchy of types, dividing type nodes +into subtypes as they see fit. +We illustrate our visualisation using the Linked Movie Database dataset, and offer as well the visualisation of DBpedia. + + + + + + + + + + + + + + + + + + + + + + Multi-level semantic labelling of numerical values + + + + With the success of Open Data a huge amount of tabular data sources +became available that could potentially be mapped and linked into the Web of +(Linked) Data. Most existing approaches to “semantically label” such tabular +data rely on mappings of textual information to classes, properties, or instances +in RDF knowledge bases in order to link – and eventually transform – tabular +data into RDF. However, as we will illustrate, Open Data tables typically contain +a large portion of numerical columns and/or non-textual headers; therefore +solutions that solely focus on textual “cues” are only partially applicable for mapping +such data sources. We propose an approach to find and rank candidates of +semantic labels and context descriptions for a given bag of numerical values. To +this end, we apply a hierarchical clustering over information taken from DBpedia +to build a background knowledge graph of possible “semantic contexts” for +bags of numerical values, over which we perform a nearest neighbour search to +rank the most likely candidates. Our evaluation shows that our approach can assign +fine-grained semantic labels, when there is enough supporting evidence in +the background knowledge graph. In other cases, our approach can nevertheless +assign high level contexts to the data, which could potentially be used in combination +with other approaches to narrow down the search space of possible labels. + numerical data + With the success of Open Data a huge amount of tabular data sources +became available that could potentially be mapped and linked into the Web of +(Linked) Data. Most existing approaches to “semantically label” such tabular +data rely on mappings of textual information to classes, properties, or instances +in RDF knowledge bases in order to link – and eventually transform – tabular +data into RDF. However, as we will illustrate, Open Data tables typically contain +a large portion of numerical columns and/or non-textual headers; therefore +solutions that solely focus on textual “cues” are only partially applicable for mapping +such data sources. We propose an approach to find and rank candidates of +semantic labels and context descriptions for a given bag of numerical values. To +this end, we apply a hierarchical clustering over information taken from DBpedia +to build a background knowledge graph of possible “semantic contexts” for +bags of numerical values, over which we perform a nearest neighbour search to +rank the most likely candidates. Our evaluation shows that our approach can assign +fine-grained semantic labels, when there is enough supporting evidence in +the background knowledge graph. In other cases, our approach can nevertheless +assign high level contexts to the data, which could potentially be used in combination +with other approaches to narrow down the search space of possible labels. + semantic labelling + + numerical data + + + + + + + + + Multi-level semantic labelling of numerical values + + + + semantic labelling + Open Data + Open Data + + Multi-level semantic labelling of numerical values + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Politecnico di Milano + + + + Politecnico di Milano + + Politecnico di Milano + + + + + + + + + + data usage + QA4LOV: A Natural Language Interface to Linked Open Vocabulary + + user experience + + + + There is an increasing presence of structured data due to the adoption of Linked data principles on the web. At the same time, web users have different skills and want to be able to interact with Linked datasets in various manner, such as asking questions in natural language. Over the last years, the QALD challenges series are becoming the references for benchmarking question answering systems. However, QALD questions are targeted on datasets, not on vocabulary catalogues. This paper proposed a first implementation of Query Answering system (QA) applied to the Linked Open Vocabularies (LOV) catalogue, mainly focused on metadata information retrieval. The goal is to provide to end users yet another access to metadata information available in LOV using natural language questions. + + + Vocabulary Catalogue + + Vocabulary Catalogue + + + Question Answering + + + + + data usage + user experience + QA4LOV: A Natural Language Interface to Linked Open Vocabulary + Question Answering + + QA4LOV: A Natural Language Interface to Linked Open Vocabulary + There is an increasing presence of structured data due to the adoption of Linked data principles on the web. At the same time, web users have different skills and want to be able to interact with Linked datasets in various manner, such as asking questions in natural language. Over the last years, the QALD challenges series are becoming the references for benchmarking question answering systems. However, QALD questions are targeted on datasets, not on vocabulary catalogues. This paper proposed a first implementation of Query Answering system (QA) applied to the Linked Open Vocabularies (LOV) catalogue, mainly focused on metadata information retrieval. The goal is to provide to end users yet another access to metadata information available in LOV using natural language questions. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-21T10:30:00 + 2016-10-21T11:50:00 + 2016-10-21T11:50:00 + 2016-10-21T11:50:00 + 2016-10-21T10:30:00 + Data Mining + 2016-10-21T11:50:00 + Data Mining + + + + + + + + + + + + + Filip De Turck + + + + a7caf54a202dd5d2d7ba5a707601e3a5b06541fc + + + + + + Filip De Turck + + + + Filip De Turck + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + A Practical Acyclicity Notion for Query Answering over Horn-SRIQ Ontologies + Chase + + + A Practical Acyclicity Notion for Query Answering over Horn-SRIQ Ontologies + + + Reasoning + Acyclicity notions + + Reasoning + + + + Chase + + + Horn Description Logics + + + + + + A Practical Acyclicity Notion for Query Answering over Horn-SRIQ Ontologies + Conjunctive query answering + + Conjunctive query answering + Horn Description Logics + Conjunctive query answering over expressive Horn Description Logic ontologies is a relevant and challenging problem which, in some cases, can be addressed by application of the chase algorithm. +In this paper, we define a novel acyclicity notion which provides sufficient condition for termination of the restricted chase over Horn-SRIQ ontologies. +We show that our notions generalize most of the existing acyclicity conditions (both theoretically and empirically) and its use results in a more efficient reasoning procedure. +Furthermore, we implement a materialization based reasoner for acyclic ontologies which vastly outperforms state-of-the-art reasoners. + + Conjunctive query answering over expressive Horn Description Logic ontologies is a relevant and challenging problem which, in some cases, can be addressed by application of the chase algorithm. +In this paper, we define a novel acyclicity notion which provides sufficient condition for termination of the restricted chase over Horn-SRIQ ontologies. +We show that our notions generalize most of the existing acyclicity conditions (both theoretically and empirically) and its use results in a more efficient reasoning procedure. +Furthermore, we implement a materialization based reasoner for acyclic ontologies which vastly outperforms state-of-the-art reasoners. + + Acyclicity notions + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Suvodeep Mazumdar + + + Suvodeep Mazumdar + + 9b8c784e7ba2e9fe6861bcd825c5075ff1c3ba5f + + + Suvodeep Mazumdar + + + + + + + + + Translating Ontologies in a Real-World Setting with ESSOT + + + + Collaborative Ontology Management Platform + + Context-based Concept Translation + + Ontology Translation + + To enable knowledge access across languages, ontologies that are often represented only in English, need to be translated into different languages. +The main challenge in translating ontologies is to find the right term with respect to the domain modeled by ontology itself. +Machine translation services may help in this task; however, a crucial requirement is to have translations validated by experts before the ontologies are deployed. +Real-world applications must implement a support system addressing this task for relieve experts work in validating all translations. +In this paper, we present ESSOT, an Expert Supporting System for Ontology Translation. +The peculiarity of this system is to exploit semantic information of the concept's context for improving the quality of label translations. +The system has been tested both within the Organic.Lingua project by translating the modeled ontology in three languages and on other multilingual ontologies in order to evaluate the effectiveness of the system in other contexts. +The results have been compared with the translations provided by the Microsoft Translator API and the improvements demonstrated the viability of the proposed approach. + + + Collaborative Ontology Management Platform + Multilingual Ontology Management + Context-based Concept Translation + + + + + Translating Ontologies in a Real-World Setting with ESSOT + Ontology Translation + + + Translating Ontologies in a Real-World Setting with ESSOT + Multilingual Ontology Management + + To enable knowledge access across languages, ontologies that are often represented only in English, need to be translated into different languages. +The main challenge in translating ontologies is to find the right term with respect to the domain modeled by ontology itself. +Machine translation services may help in this task; however, a crucial requirement is to have translations validated by experts before the ontologies are deployed. +Real-world applications must implement a support system addressing this task for relieve experts work in validating all translations. +In this paper, we present ESSOT, an Expert Supporting System for Ontology Translation. +The peculiarity of this system is to exploit semantic information of the concept's context for improving the quality of label translations. +The system has been tested both within the Organic.Lingua project by translating the modeled ontology in three languages and on other multilingual ontologies in order to evaluate the effectiveness of the system in other contexts. +The results have been compared with the translations provided by the Microsoft Translator API and the improvements demonstrated the viability of the proposed approach. + + + + + + + + + + + + + + + c7c523526586e7f6e0f7572f17789b43434485e1 + + + + + Christoforos Svingos + Christoforos Svingos + + + + + Christoforos Svingos + + + + + + + + + + + + + + + + + 2016-10-21T15:50:00 + 2016-10-21T15:50:00 + Multi-level semantic labelling of numerical values + + 2016-10-21T15:50:00 + Sebastian Neumaier, Jürgen Umbrich, Josiane Xavier Parreira and Axel Polleres + 2016-10-21T15:30:00 + + 2016-10-21T15:50:00 + + Multi-level semantic labelling of numerical values + 2016-10-21T15:30:00 + + + TNO - Netherlands Organization for Applied Scientific Research + + + + TNO - Netherlands Organization for Applied Scientific Research + + + + + TNO - Netherlands Organization for Applied Scientific Research + + + + + Alexander Panchenko + + + + Alexander Panchenko + + + + Alexander Panchenko + cf057f91ef698401ee2a6c1560dc5972f60b3a7c + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Nathalie Hernandez + + + Nathalie Hernandez + + + + + b431523ad1b0cb8a4ce91720831c9927ae6405ba + + Nathalie Hernandez + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Conformity + Mappings + The entity type is considered as very important in DBpedia. Since this information is inconsistently described in different languages, it is difficult to recognize the most suitable type of an entity. We propose a method to predict the entity type based on a novel conformity measure. We combine the consideration of the specific-level of and the majority voting. The experiment result shows that our method can suggest informative types and outperforms the baselines. + Conformity + + Type Prediction for Entities in DBpedia by Aggregating Multilingual Resources + + + Consistency + Onology + Type Prediction for Entities in DBpedia by Aggregating Multilingual Resources + + Onology + DBpedia + Type Prediction for Entities in DBpedia by Aggregating Multilingual Resources + + + + + + + + + + + + The entity type is considered as very important in DBpedia. Since this information is inconsistently described in different languages, it is difficult to recognize the most suitable type of an entity. We propose a method to predict the entity type based on a novel conformity measure. We combine the consideration of the specific-level of and the majority voting. The experiment result shows that our method can suggest informative types and outperforms the baselines. + + + + Mappings + + + + DBpedia + Consistency + + + + + 2016-10-20T11:10:00 + 2016-10-20T11:10:00 + 2016-10-20T11:10:00 + + 2016-10-20T10:50:00 + SPARQLGX: Efficient Distributed Evaluation of SPARQL with Apache Spark + 2016-10-20T11:10:00 + SPARQLGX: Efficient Distributed Evaluation of SPARQL with Apache Spark + Damien Graux, Louis Jachiet, Pierre Geneves and Nabil Layaida + 2016-10-20T10:50:00 + + + 46888155168581432fee8e6ada04ab13add97543 + Liang Zhao + Liang Zhao + + + + + Liang Zhao + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Thierry Bergeron + + + + + + Thierry Bergeron + + + Thierry Bergeron + afde5c5a9ef2d57456432187fda4e8961c60e7dd + + + + + + + 2016-10-19T21:00:00 + John P. Mccrae and Philipp Cimiano + LIXR: Quick, succinct conversion of XML to RDF + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + LIXR: Quick, succinct conversion of XML to RDF + + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + Democritus University of Thrace + Democritus University of Thrace + Democritus University of Thrace + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Konstantina Bereta + + + + 6dc2e6c44ba12728237ec901f3f5190970a8dbf3 + + Konstantina Bereta + + + + + Konstantina Bereta + + + + + + + + + + + + + + + + + + + + CubeQA—Question Answering on RDF Data Cubes + + Data Cube + + CubeQA—Question Answering on RDF Data Cubes + Data Cube + Statistical data in the form of RDF Data Cubes is becoming increasingly valuable as it influences decisions in areas such as health care, policy and finance. While a growing amount is becoming freely available through the open data movement, this data is opaque to laypersons. Semantic Question Answering (SQA) technologies provide access via free-form natural language queries but general SQA systems cannot process RDF Data Cubes. On the intersection between RDF Data Cubes and SQA, we create a new subfield of SQA, called RDCQA. We create an RDQCA benchmark as task 3 of the QALD-6 evaluation challenge, to stimulate further research and enable quantitative comparison between RDCQA systems. We design and evaluate the CubeQA algorithm, which +is the first RDCQA system and achieves a global F 1 score of 0.43 on the QALD6T3-test dataset, showing that RDCQA is feasible. + + + + Question Answering + + + + Question Answering + + + Statistical data in the form of RDF Data Cubes is becoming increasingly valuable as it influences decisions in areas such as health care, policy and finance. While a growing amount is becoming freely available through the open data movement, this data is opaque to laypersons. Semantic Question Answering (SQA) technologies provide access via free-form natural language queries but general SQA systems cannot process RDF Data Cubes. On the intersection between RDF Data Cubes and SQA, we create a new subfield of SQA, called RDCQA. We create an RDQCA benchmark as task 3 of the QALD-6 evaluation challenge, to stimulate further research and enable quantitative comparison between RDCQA systems. We design and evaluate the CubeQA algorithm, which +is the first RDCQA system and achieves a global F 1 score of 0.43 on the QALD6T3-test dataset, showing that RDCQA is feasible. + Semantic Web + + + Semantic Web + + CubeQA—Question Answering on RDF Data Cubes + + + + + + + + + + + + + + + + + + + + + Ganesh Ramakrishnan + Ganesh Ramakrishnan + + + + + + + Ganesh Ramakrishnan + b36f5c490b830f5fd6580cebb8a587ead9acb559 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Seoul National University + + + Seoul National University + Seoul National University + + + + + + + + + + + + + + + + + + + Using word2vec to Build a Simple Ontology Learning System + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + Using word2vec to Build a Simple Ontology Learning System + 2016-10-19T21:00:00 + Gerhard Wohlgenannt and Filip Minic + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Reducing the Network Load of Triple Pattern Fragments by Supporting Bind Joins + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Olaf Hartig and Carlos Buil Aranda + 2016-10-19T21:00:00 + Reducing the Network Load of Triple Pattern Fragments by Supporting Bind Joins + + + + + + + + + + + + + + + + + + + + + + + + + + + EXISTStential Aspects of SPARQL + EXISTStential Aspects of SPARQL + SPARQL + + The SPARQL 1.1 Query Language cite{SPARQL} permits patterns inside {sf FILTER} +expressions using the {sf EXISTS} construct, specified by using substitution. +Substitution destroys some of the aspects of SPARQL that make +it suitable as a data access language. As well, substitution causes +problems in the SPARQL algebra and produces counterintuitive results. +Fixing the problems with {sf EXISTS} is best done with a completely different +definition that does not use substitution at all. + + + + + + + + SPARQL + + The SPARQL 1.1 Query Language cite{SPARQL} permits patterns inside {sf FILTER} +expressions using the {sf EXISTS} construct, specified by using substitution. +Substitution destroys some of the aspects of SPARQL that make +it suitable as a data access language. As well, substitution causes +problems in the SPARQL algebra and produces counterintuitive results. +Fixing the problems with {sf EXISTS} is best done with a completely different +definition that does not use substitution at all. + + EXISTStential Aspects of SPARQL + Semantic Web + RDF + + + + RDF + + Semantic Web + + + + René Speck + + René Speck + + + + + René Speck + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Marco Rospocher + Marco Rospocher + Marco Rospocher + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Towards Building Open Knowledge Base From Programming Question-Answering Communities + + Wei Emma Zhang, Ermyas Abebe, Quan Z. Sheng and Kerry Taylor + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + Towards Building Open Knowledge Base From Programming Question-Answering Communities + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Nick Portokallidis + Nick Portokallidis + + + + + + + + Nick Portokallidis + + + + + 2016-10-19T21:00:00 + Adam Sotona + How to feed Apache HBase with petabytes of RDF data: An extremely scalable RDF store based on Eclipse RDF4J framework and Apache HBase database + 2016-10-19T18:00:00 + How to feed Apache HBase with petabytes of RDF data: An extremely scalable RDF store based on Eclipse RDF4J framework and Apache HBase database + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + 2016-10-19T14:40:00 + 2016-10-19T14:40:00 + + Building Urban LOD for Solving Illegally Parked Bicycles in Tokyo + 2016-10-19T14:20:00 + 2016-10-19T14:40:00 + + Shusaku Egami, Takahiro Kawamura and Akihiko Ohsuga + 2016-10-19T14:20:00 + 2016-10-19T14:40:00 + + Building Urban LOD for Solving Illegally Parked Bicycles in Tokyo + + + + + + + + + + + + + + + + Craig Knoblock + + + + + Craig Knoblock + + Craig Knoblock + + + + + 2c2715555efac793759255fe12d117541cf52a37 + + + + + + + + + + + + + + + + + + + + + + + Marco Balduini + cfab487eaef36c19086e0179c2fe26284f2d913a + + + + + + Marco Balduini + + Marco Balduini + + + + + + + + + + + + + + + + + + + + + Marcelo Arenas + 263fbdd4ad0d4f74a9a210582a03d51d7e04b904 + + + + + + + + + Marcelo Arenas + Marcelo Arenas + + + + + + + Heshan Du + + + + + + + Heshan Du + + 4dc27b50f093d4b0ab10a3ccd64e73706971b8ca + + + + Heshan Du + + + + + + + + + + + + + Shen Gao, Daniele Dell'Aglio, Soheila Dehghanzadeh, Abraham Bernstein, Emanuele Della Valle and Alessandra Mileo + + 2016-10-20T13:50:00 + 2016-10-20T13:50:00 + 2016-10-20T13:50:00 + 2016-10-20T13:50:00 + 2016-10-20T13:30:00 + Planning Ahead: Stream-Driven Linked-Data Access under Update-Budget Constraints + + 2016-10-20T13:30:00 + + Planning Ahead: Stream-Driven Linked-Data Access under Update-Budget Constraints + + + Stasinos Konstantopoulos + + + + Stasinos Konstantopoulos + + Stasinos Konstantopoulos + + c27c2c6e0668abc64be17d28decbae5ce6b9de93 + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + Kai Lenz, Hiroshi Masuya and Norio Kobayashi + RIKEN MetaDatabase: a database publication platform for RIKENs life-science researchers that promotes research collaborations over different research area + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + RIKEN MetaDatabase: a database publication platform for RIKENs life-science researchers that promotes research collaborations over different research area + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + Learning to Assess Linked Data Relationships Using Genetic Programming + Learning to Assess Linked Data Relationships Using Genetic Programming + + 2016-10-20T14:10:00 + 2016-10-20T14:10:00 + Ilaria Tiddi, Mathieu d'Aquin and Enrico Motta + 2016-10-20T13:50:00 + 2016-10-20T14:10:00 + 2016-10-20T13:50:00 + + + 2016-10-20T14:10:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + A Survey on Challenges in Web Markup Data for Entity Retrieval + + + Entity Retrieval + + + Web Markup + + Embedded markup based on Microdata, RDFa, and Microformats have become prevalent on the Web and constitute an unprecedented data source. RDF statements from markup are highly redundant, co-references are very frequent yet explicit links are missing, and with numerous errors in such statements. + +We present a thorough analysis on the challenges associated with markup data in the context of entity retrieval. We analyze four main factors: (i) co-references, (ii) redundancy, (iii) inconsistencies, and (iv) accessibility of information in the case of URLs. We conclude with general guidelines on how to avoid such challenges when dealing with embedded markup data. + + + Web Markup + + + + + + + A Survey on Challenges in Web Markup Data for Entity Retrieval + Embedded markup based on Microdata, RDFa, and Microformats have become prevalent on the Web and constitute an unprecedented data source. RDF statements from markup are highly redundant, co-references are very frequent yet explicit links are missing, and with numerous errors in such statements. + +We present a thorough analysis on the challenges associated with markup data in the context of entity retrieval. We analyze four main factors: (i) co-references, (ii) redundancy, (iii) inconsistencies, and (iv) accessibility of information in the case of URLs. We conclude with general guidelines on how to avoid such challenges when dealing with embedded markup data. + Knowledge Base Augmentation + Knowledge Base Augmentation + + Web Data Commons + A Survey on Challenges in Web Markup Data for Entity Retrieval + + Web Data Commons + + + + Entity Retrieval + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + + Stasinos Konstantopoulos, Angelos Charalambidis, Giannis Mouchakis, Antonis Troumpoukis, Jürgen Jakobitsch and Vangelis Karkaletsis + Semantic Web Technologies and Big Data Infrastructures: SPARQL Federated Querying of Heterogeneous Big Data Stores + Semantic Web Technologies and Big Data Infrastructures: SPARQL Federated Querying of Heterogeneous Big Data Stores + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + 9c33f970b7c8ccb56bc9edc435e83941a3b379f8 + + + + + Chris Biemann + + + + + Chris Biemann + + + Chris Biemann + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T11:40:00 + 2016-10-19T12:00:00 + DeFacto - Temporal and multilingual Deep Fact Validation + 2016-10-19T12:00:00 + DeFacto - Temporal and multilingual Deep Fact Validation + 2016-10-19T11:40:00 + 2016-10-19T12:00:00 + + Daniel Gerber, Diego Esteves, Jens Lehmann, Lorenz Bühmann, Ricardo Usbeck, Axel-Cyrille Ngonga Ngomo and René Speck + + 2016-10-19T12:00:00 + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Enriching Answers in Question Answering Systems using Linked Data + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + Rivindu Perera, Parma Nand and Gisela Klette + 2016-10-19T21:00:00 + + + 2016-10-19T21:00:00 + Enriching Answers in Question Answering Systems using Linked Data + + + + + + Newcastle University + + Newcastle University + + + + Newcastle University + + + + + + University of Jyväskylä + + + + + University of Jyväskylä + + + + + University of Jyväskylä + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Paolo Pareti + + + + Paolo Pareti + + 781ab0cdaf783bea7cb49f37df9d31aa3b32e922 + + + Paolo Pareti + + + SPARQL + Nowadays, governments and public agencies publish open data at an exponentially growing rate on dedicated portals. These open data have a problem: they don’t have a well defined structure, because the focus is on publishing data and not on how they are used. GovLOD is a platform that aims to transform the information found in these heterogeneous files in Linked Open Data using RDF triples. + + RDF + + SPARQL + GovLOD: Towards a Linked Open Data Portal + Nowadays, governments and public agencies publish open data at an exponentially growing rate on dedicated portals. These open data have a problem: they don’t have a well defined structure, because the focus is on publishing data and not on how they are used. GovLOD is a platform that aims to transform the information found in these heterogeneous files in Linked Open Data using RDF triples. + + + GovLOD: Towards a Linked Open Data Portal + + Linked Open Data + OCR + + + GovLOD: Towards a Linked Open Data Portal + + + + OCR + + + + RDF + + Linked Open Data + + + + Theofilos Mailis + + + + + + + + Theofilos Mailis + + Theofilos Mailis + + + 0cccd26a3c6ad39b1a942b4ae2bbe42f119df5ed + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Xinqi Qian + + + Xinqi Qian + + + + + + Xinqi Qian + + + + 63bbd783ee647baf05e503e74387e94aeffb4f57 + + + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + Agriculture Activity Ontology : An ontology for core vocabulary of agriculture activity + Agriculture Activity Ontology : An ontology for core vocabulary of agriculture activity + + Joo Sungmin, Seiji Koide, Hideaki Takeda, Daisuke Horyu, Akane Takezaki and Tomokazu Yoshida + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + d88d0e2e1df2be16c9319b862d2a2e00a4a89a9f + + Jasper Roes + Jasper Roes + + + + Jasper Roes + + + + + + + + + + + + + + + + + + + + + + + Kaiser Permanente / ITHSDO + + Kaiser Permanente / ITHSDO + + + Kaiser Permanente / ITHSDO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Rivindu Perera, Parma Nand and Gisela Klette + + + Lexicalizing DBpedia with Realization Enabled Ensemble Architecture: RealText-lex2 Approach + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Lexicalizing DBpedia with Realization Enabled Ensemble Architecture: RealText-lex2 Approach + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Freddie Venegas + + + + + Freddie Venegas + + + 690718ca20d735c96fd000d31b024c11d677bd10 + Freddie Venegas + + + + + + + + + + + + + + + + + + + + + knowledge base + + + knowledge base + + + Wikipedia + + YAGO is a large knowledge base that is built automatically from Wikipedia, WordNet and GeoNames. The project combines information from 10 Wikipedias of different languages, thus giving the knowledge a multilingual dimension. It also attaches spatial and temporal information to many facts, and thus allows the user to query the data over space and time. YAGO focuses on extraction quality and achieves a manually evaluated precision of 95%. In this paper, we explain from a general perspective how YAGO is built from its sources, how its quality is evaluated, how a user can access it, and how other projects utilize it. + YAGO is a large knowledge base that is built automatically from Wikipedia, WordNet and GeoNames. The project combines information from 10 Wikipedias of different languages, thus giving the knowledge a multilingual dimension. It also attaches spatial and temporal information to many facts, and thus allows the user to query the data over space and time. YAGO focuses on extraction quality and achieves a manually evaluated precision of 95%. In this paper, we explain from a general perspective how YAGO is built from its sources, how its quality is evaluated, how a user can access it, and how other projects utilize it. + YAGO: a multilingual knowledge base from Wikipedia, Wordnet, and Geonames + + WordNet + + + + WordNet + + YAGO: a multilingual knowledge base from Wikipedia, Wordnet, and Geonames + Geonames + + YAGO: a multilingual knowledge base from Wikipedia, Wordnet, and Geonames + Geonames + + + + + + + + + + + Wikipedia + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Dimitris Kontokostas + + + Dimitris Kontokostas + + + + 3e57edc1ccd259391e188db92889f8aafb2f9162 + + Dimitris Kontokostas + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Antonis Troumpoukis + + + Antonis Troumpoukis + + + Antonis Troumpoukis + + + + 5d383f8a98a158ff54afb898af352c5799b0b5b3 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + WU Vienna + + + + + + WU Vienna + WU Vienna + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Universitat de Lleida + + Universitat de Lleida + Universitat de Lleida + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Jelle Nelis + + + + + + + + 89b8e847d443c5e54dad39f679ccb76db7edeccc + Jelle Nelis + Jelle Nelis + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The OWL Reasoner Evaluation (ORE) 2015 Resources + The OWL Reasoner Evaluation (ORE) 2015 Resources + 2016-10-21T16:10:00 + Bijan Parsia, Nicolas Matentzoglu, Rafael S. Gonçalves, Birte Glimm and Andreas Steigmiller + + 2016-10-21T16:10:00 + + 2016-10-21T16:30:00 + 2016-10-21T16:30:00 + 2016-10-21T16:30:00 + + 2016-10-21T16:30:00 + + + + + + + + + + + + + + + + + + + + + + + + + 06ccd0bcd02f785b9c5f1a0f85a08a77fbe3f86d + + + + + Martina Hodrius + Martina Hodrius + + + Martina Hodrius + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Julian Dolby + + + + + + Julian Dolby + + + Julian Dolby + ab2bddc57e5d65d6f1e31ed59c14e0063bc0fb65 + + + + + + + + + + + + + + + + + + + fa510b49182ab0643e6a0c3bfd30588ae83a77fb + Conrad Bielski + + Conrad Bielski + + Conrad Bielski + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + emergent schema + emergent schema + + Query optimization + We build on our earlier finding that more than 95% of the triples in actual RDF triple graphs have a remarkably tabular structure, whose schema does not necessarily follow from explicit metadata such as ontologies, but which an RDF store can automatically derive by looking at the data using so-called ``emergent schema'' detection techniques. In this paper we investigate how computers and in particular RDF stores can take advantage from this emergent schema to more compactly store RDF data and more efficiently optimize and execute SPARQL queries. To this end, we contribute techniques for efficient emergent schema aware RDF storage and new query operator algorithms for emergent schema aware scans and joins. In all, these techniques allow RDF schema processors fully catch up with relational database techniques in terms of rich physical database design options and efficiency, without requiring a rigid upfront schema structure definition. + RDF + + + SPARQL + + Exploiting Emergent Schemas to make RDF systems more efficient + SPARQL + + + Query optimization + + + + + + Exploiting Emergent Schemas to make RDF systems more efficient + Exploiting Emergent Schemas to make RDF systems more efficient + We build on our earlier finding that more than 95% of the triples in actual RDF triple graphs have a remarkably tabular structure, whose schema does not necessarily follow from explicit metadata such as ontologies, but which an RDF store can automatically derive by looking at the data using so-called ``emergent schema'' detection techniques. In this paper we investigate how computers and in particular RDF stores can take advantage from this emergent schema to more compactly store RDF data and more efficiently optimize and execute SPARQL queries. To this end, we contribute techniques for efficient emergent schema aware RDF storage and new query operator algorithms for emergent schema aware scans and joins. In all, these techniques allow RDF schema processors fully catch up with relational database techniques in terms of rich physical database design options and efficiency, without requiring a rigid upfront schema structure definition. + RDF + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T09:00:00 + 2016-10-19T09:30:00 + 2016-10-19T09:30:00 + 2016-10-19T09:30:00 + 2016-10-19T09:00:00 + 2016-10-19T09:30:00 + Opening Ceremony + Opening Ceremony + + + + + + + + + + + + + + + + + + + + + + + Scott Kimberly + + + + + + 4b4e9dce5c1c6a46bb6071cbab4f7f6e7b14ebf3 + + + Scott Kimberly + + + Scott Kimberly + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Domenico Fabio Savo + + + + + f74605d82f83f7f00a084b4540127266437a0dbf + + + + Domenico Fabio Savo + + + Domenico Fabio Savo + + + + + + + + + Akihiko Ohsuga + + + 9aa155d3f61640d018c23a3964285756f30be617 + Akihiko Ohsuga + + Akihiko Ohsuga + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Xavier Oriol + + + + Xavier Oriol + + + + + + + + 9c74a8e7a43688d61046ed2ec2cc3134bb9d40c2 + + Xavier Oriol + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Optimization Techniques for 2-hop Labeling of Dynamic Directed Acyclic Graphs + 2016-10-18T14:45:00 + 2016-10-18T14:45:00 + 2016-10-18T14:30:00 + Optimization Techniques for 2-hop Labeling of Dynamic Directed Acyclic Graphs + + 2016-10-18T14:45:00 + + 2016-10-18T14:30:00 + 2016-10-18T14:45:00 + Jinhyun Ahn + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Hasso-Plattner-Institute + + + + Hasso-Plattner-Institute + + Hasso-Plattner-Institute + + + + + + + + + + + + + + + + + + + + + LAAS-CNRS + LAAS-CNRS + + + + + + + LAAS-CNRS + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + INRIA / Univ. Grenoble + INRIA / Univ. Grenoble + + + + + INRIA / Univ. Grenoble + + + + + + + + + + + + + + Extending SPARQL for data analytic tasks + 2016-10-20T10:50:00 + Extending SPARQL for data analytic tasks + 2016-10-20T10:30:00 + 2016-10-20T10:50:00 + Kavitha Srinivas, Julian Dolby, Achille Fokoue, Mariano Rodríguez Muro and Wen Sun + 2016-10-20T10:30:00 + 2016-10-20T10:50:00 + + 2016-10-20T10:50:00 + + + + Airbus Defence and Space + + + + + Airbus Defence and Space + + + + Airbus Defence and Space + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-17T09:00:00 + 2016-10-21T18:00:00 + 2016-10-21T18:00:00 + 2016-10-21T18:00:00 + 2016-10-17T09:00:00 + ISWC 2016 + 2016-10-21T18:00:00 + + + 2016-10-20T16:30:00 + 2016-10-20T16:10:00 + 2016-10-20T16:30:00 + + + 2016-10-20T16:30:00 + 2016-10-20T16:30:00 + 2016-10-20T16:10:00 + Semantic Sensitive Simultaneous Tensor Factorization + Semantic Sensitive Simultaneous Tensor Factorization + Makoto Nakatsuji + + + + + + + + + + + + + + + + + + + + + + + + + + Eugene Siow + + + + + 82546c7d01fb2cfa7aebabf9051508329b4ed818 + + + Eugene Siow + + + Eugene Siow + + + + + + + + + + + + + + + + + + Monika Solanki + + + + + + + + Monika Solanki + Monika Solanki + 9a9a3db0c71354edefbcd034fb6c43c2645cfdcc + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Query Interpretation + The amount of entities in large knowledge bases available on the Web has been increasing rapidly, making it possible to propose new ways of intelligent information access. In addition, there is an impending need for technologies that can enable cross-lingual information access. As a simple and intuitive way of specifying information needs, keyword queries enjoy widespread usage, but suffer from the challenges including ambiguity, incompleteness and cross-linguality. In this paper, we present a knowledge base approach to cross-lingual keyword query interpretation by transforming keyword queries in different languages to their semantic representation, which can facilitate query disambiguation and expansion, and also bridge the language barriers of queries. The experimental results show that our approach achieves both high efficiency and effectiveness and considerably outperforms the baselines. + A Knowledge Base Approach to Cross-lingual Keyword Query Interpretation + Cross-lingual + + Query Interpretation + + + + + The amount of entities in large knowledge bases available on the Web has been increasing rapidly, making it possible to propose new ways of intelligent information access. In addition, there is an impending need for technologies that can enable cross-lingual information access. As a simple and intuitive way of specifying information needs, keyword queries enjoy widespread usage, but suffer from the challenges including ambiguity, incompleteness and cross-linguality. In this paper, we present a knowledge base approach to cross-lingual keyword query interpretation by transforming keyword queries in different languages to their semantic representation, which can facilitate query disambiguation and expansion, and also bridge the language barriers of queries. The experimental results show that our approach achieves both high efficiency and effectiveness and considerably outperforms the baselines. + + + Semantic Search + + + + A Knowledge Base Approach to Cross-lingual Keyword Query Interpretation + + + + + Cross-lingual + A Knowledge Base Approach to Cross-lingual Keyword Query Interpretation + Semantic Search + + + + + + + + + + + + + + + Jacopo Urbani + adee22b352dd8781301e45e715855e4d92dc034b + Jacopo Urbani + + + + + Jacopo Urbani + + + + + + Semantic Web Company + Semantic Web Company + + + + + + Semantic Web Company + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Wouter Maroy + + + + + 1d8e3247b84f8cc2c9c6a2d21b45da4cc7d3ffc0 + Wouter Maroy + + + Wouter Maroy + + + + + + + + + + + + + + + + + + + + + + + + + + + Stephan Grimm + + + + Stephan Grimm + e74280013eee6aa91402c3d32e196248801c4c52 + + Stephan Grimm + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Kazunari Hashimoto + + Kazunari Hashimoto + + + + + 6d66e2d997ed04b84e2c08f1ca218495b2ca16d5 + + Kazunari Hashimoto + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Muhammad Saleem + + + + Muhammad Saleem + Muhammad Saleem + + + + + + + + + Yosky Kataoka + + + + + + Yosky Kataoka + Yosky Kataoka + b25d3c4e6549cabd57a39ce40b0cd55d5234920c + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Petar Ristoski, Gerben Klaas Dirk de Vries and Heiko Paulheim + 2016-10-20T16:10:00 + 2016-10-20T15:50:00 + 2016-10-20T15:50:00 + + A Collection of Benchmark Datasets for Systematic Evaluations of Machine Learning on the Semantic Web + A Collection of Benchmark Datasets for Systematic Evaluations of Machine Learning on the Semantic Web + 2016-10-20T16:10:00 + 2016-10-20T16:10:00 + 2016-10-20T16:10:00 + + + + Climate Change + We introduce EnergyUse, a collaborative website designed for raising climate change awareness by offering users the ability to view and compare the actual energy consumption of various appliances, and to share and discuss energy conservation tips in an open and social environment. The platform collects data from smart plugs, and exports appliance consumption and community generated energy tips as linked data. EnergyUse is supported by multiples automatic processes that semantically link related contributions, generate appliances descriptions and publish consumption data using the EnergyUse ontology. + Energy Monitors + Semantic Collective Platforms + + + Monitoring, Discussing and Publishing Energy Consumption Data using EnergyUse + + + + + Monitoring, Discussing and Publishing Energy Consumption Data using EnergyUse + + Semantic Collective Platforms + + + + + Energy Monitors + Monitoring, Discussing and Publishing Energy Consumption Data using EnergyUse + + + + Climate Change + Energy Consumption + + We introduce EnergyUse, a collaborative website designed for raising climate change awareness by offering users the ability to view and compare the actual energy consumption of various appliances, and to share and discuss energy conservation tips in an open and social environment. The platform collects data from smart plugs, and exports appliance consumption and community generated energy tips as linked data. EnergyUse is supported by multiples automatic processes that semantically link related contributions, generate appliances descriptions and publish consumption data using the EnergyUse ontology. + Energy Consumption + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Studying Metadata for better client-server trade-offs in Linked Data publishing + + Studying Metadata for better client-server trade-offs in Linked Data publishing + 2016-10-18T14:15:00 + Miel Vander Sande + 2016-10-18T14:15:00 + 2016-10-18T14:00:00 + 2016-10-18T14:15:00 + 2016-10-18T14:15:00 + + 2016-10-18T14:00:00 + + + + + 4fb9b67420f8f10a024c13f62f3119f2d77f3319 + Joo Sungmin + + Joo Sungmin + + + + + + + + Joo Sungmin + + + + + + + + + + + + + + + + + + a27d42b6a587d5e289e2b7b901dc097b9c1a777e + + + + + + Renzo Angles + + Renzo Angles + Renzo Angles + + + + + + + + + + + + + + + Genetic Programming + The goal of this work is to learn a measure supporting the detection of strong relationships between Linked Data entities. Such relationships can be represented as paths of entities and properties, and can be obtained through a blind graph search process traversing Linked Data. The challenge here is therefore the design of a cost-function that is able to detect the strongest relationship between two given entities, by objectively assessing the value of a given path. To achieve this, we use a Genetic Programming approach in a supervised learning method to generate path evaluation functions that compare well with human evaluations. We show how such a cost-function can be generated only using basic topological features of the nodes of the paths as they are being traversed (i.e. without knowledge of the whole graph), and how it can be improved through introducing a very small amount of knowledge about the vocabularies of the properties that connect nodes in the graph. + + + + Genetic Programming + + + Learning to Assess Linked Data Relationships Using Genetic Programming + + Learning to Assess Linked Data Relationships Using Genetic Programming + + + Linked Data + + + Entity Relatedness + + + Entity Relatedness + + The goal of this work is to learn a measure supporting the detection of strong relationships between Linked Data entities. Such relationships can be represented as paths of entities and properties, and can be obtained through a blind graph search process traversing Linked Data. The challenge here is therefore the design of a cost-function that is able to detect the strongest relationship between two given entities, by objectively assessing the value of a given path. To achieve this, we use a Genetic Programming approach in a supervised learning method to generate path evaluation functions that compare well with human evaluations. We show how such a cost-function can be generated only using basic topological features of the nodes of the paths as they are being traversed (i.e. without knowledge of the whole graph), and how it can be improved through introducing a very small amount of knowledge about the vocabularies of the properties that connect nodes in the graph. + + + Learning to Assess Linked Data Relationships Using Genetic Programming + + Linked Data + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-18T17:00:00 + 2016-10-18T17:30:00 + 2016-10-18T17:30:00 + 2016-10-18T17:30:00 + Poster 3 + 2016-10-18T17:00:00 + 2016-10-18T17:30:00 + Poster 3 + + + + + + + + + + + + + + Ruben Verborgh + + Ruben Verborgh + + + fb22bc1100f1f5b282380024f58bf4e906fd3e69 + + + + Ruben Verborgh + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Feature Generation using Ontologies during Induction of Decision Trees on Linked Data + 2016-10-18T14:15:00 + 2016-10-18T14:30:00 + 2016-10-18T14:30:00 + Feature Generation using Ontologies during Induction of Decision Trees on Linked Data + + 2016-10-18T14:30:00 + 2016-10-18T14:15:00 + Yordan Terziev + + 2016-10-18T14:30:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Eiki Takayama + + 1848905605caa9952b93550a4b5ba43de98da8d2 + + + + Eiki Takayama + + + Eiki Takayama + + + + Lijuan Xue + + + Lijuan Xue + + + Lijuan Xue + + dec36ae14a493558407a5f9c57d3dadea131f528 + + + + + + + + + Vienna University of Economics and Business + Vienna University of Economics and Business + + Vienna University of Economics and Business + + + + + + + + + + + + + + + + + + + + + Yordan Terziev + + Yordan Terziev + 6d4a545ed7db4840477a964fcec27d83c3deb354 + + + + + + Yordan Terziev + + + + + + + 0eddeb7726394f5c00d415d6e0e93075fd6be528 + + + + + + Thomas Wilmering + Thomas Wilmering + + + + Thomas Wilmering + + + + + + + + In this demo we present an extension of SPARQL which allows queries to connect to JSON APIs and integrate the obtained information into query answers. We achieve this by adding a new operator to SPARQL, and implement this extension on top of the Jena framework in order to illustrate how it functions with real world APIs. + + In this demo we present an extension of SPARQL which allows queries to connect to JSON APIs and integrate the obtained information into query answers. We achieve this by adding a new operator to SPARQL, and implement this extension on top of the Jena framework in order to illustrate how it functions with real world APIs. + + + + + + JSON + + + + API + API + Incorporating API data into SPARQL query answers + + + + SPARQL + + + SPARQL + + + Incorporating API data into SPARQL query answers + JSON + + Incorporating API data into SPARQL query answers + + + + + + + + + + + + + Universite Libre de Bruxelles + + + Université Libre de Bruxelles + Universite Libre de Bruxelles + + Université Libre de Bruxelles + Université Libre de Bruxelles + + + Universite Libre de Bruxelles + + + + + + + 2016-10-19T11:20:00 + + 2016-10-19T11:00:00 + 2016-10-19T11:20:00 + Bahaa Eldesouky, Menna Bakry, Heiko Maus and Andreas Dengel + Seed, an End-user Text Composition Tool for the Semantic Web + 2016-10-19T11:00:00 + + 2016-10-19T11:20:00 + Seed, an End-user Text Composition Tool for the Semantic Web + + 2016-10-19T11:20:00 + + + + + + + + + + + + + + + fluid Operations AG + fluid Operations AG + fluid Operations AG + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + information extraction + information extraction + Full-fledged enterprise information can be a great weapon in investment analysis. However, enterprise information is scattered in different databases and websites. The information from a single source is incomplete and also suffers from noise. It is not an easy task to integrate and utilize information from diverse sources in real business scenarios. In this paper, we present an approach to build knowledge graphs (KGs) by exploiting semantic technologies to reconcile the data from diverse sources incrementally. We build a national-wide enterprise KG which incorporates information about 40,000,000 enterprises in China. We also provide querying about enterprises and data visualization capabilities as well as novel investment analysis scenarios, including finding an enterprise's real controllers, innovative enterprise analysis, enterprise path discovery and so on. The KG and its applications are currently used by two security companies in their investment banking businesses. + + + Building and Exploring National-wide Enterprise Knowledge Graphs for Investment Analysis in an Incremental Way + + + data fusion + + D2R + + investment analysis + D2R + knowledge graphs + + + investment analysis + + Building and Exploring National-wide Enterprise Knowledge Graphs for Investment Analysis in an Incremental Way + + + Building and Exploring National-wide Enterprise Knowledge Graphs for Investment Analysis in an Incremental Way + + + + + + data fusion + Full-fledged enterprise information can be a great weapon in investment analysis. However, enterprise information is scattered in different databases and websites. The information from a single source is incomplete and also suffers from noise. It is not an easy task to integrate and utilize information from diverse sources in real business scenarios. In this paper, we present an approach to build knowledge graphs (KGs) by exploiting semantic technologies to reconcile the data from diverse sources incrementally. We build a national-wide enterprise KG which incorporates information about 40,000,000 enterprises in China. We also provide querying about enterprises and data visualization capabilities as well as novel investment analysis scenarios, including finding an enterprise's real controllers, innovative enterprise analysis, enterprise path discovery and so on. The KG and its applications are currently used by two security companies in their investment banking businesses. + + knowledge graphs + + + + + + + + + + + + + + + + + + + + + + + + + + + + fbd2145490116fd5dc78374fdbc51bf12ed646bb + + + + Le Tuan Anh + + Le Tuan Anh + + + + Le Tuan Anh + + + + + + + + + SPORTAL: Searching for Public SPARQL Endpoints + + web querying + + + web querying + + + + SPORTAL: Searching for Public SPARQL Endpoints + + linked data + sparql + + + + SPORTAL: Searching for Public SPARQL Endpoints + There are hundreds of SPARQL endpoints on the Web, but finding an endpoint relevant to a client's needs is difficult: each endpoint acts like a black box, often without a description of its content. Herein we briefly describe SPORTAL: a system that collects meta-data about the content of endpoints and collects them into a central catalogue over which clients can search. SPORTAL sends queries to individual endpoints offline to learn about their content, generating a best-effort VoID description for each endpoint. These descriptions can then be searched and queried over by clients in the SPORTAL user interface, for example, to find endpoints that contain instances of a given class, or triples with a given predicate, or more complex requests such as endpoints with at least 1,000 images of people. Herein we give a brief overview of SPORTAL, its design and functionality, and the features that shall be demoed at the conference. + + There are hundreds of SPARQL endpoints on the Web, but finding an endpoint relevant to a client's needs is difficult: each endpoint acts like a black box, often without a description of its content. Herein we briefly describe SPORTAL: a system that collects meta-data about the content of endpoints and collects them into a central catalogue over which clients can search. SPORTAL sends queries to individual endpoints offline to learn about their content, generating a best-effort VoID description for each endpoint. These descriptions can then be searched and queried over by clients in the SPORTAL user interface, for example, to find endpoints that contain instances of a given class, or triples with a given predicate, or more complex requests such as endpoints with at least 1,000 images of people. Herein we give a brief overview of SPORTAL, its design and functionality, and the features that shall be demoed at the conference. + sparql + + + linked data + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Jérôme Euzenat + + + + + Jérôme Euzenat + 8347bc1d33e9e27f680aaaffa43de37a27f12991 + + + Jérôme Euzenat + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Traversal-based Query Execution + Traversal-based Query Execution + + + Walking without a Map: Ranking-Based Traversal for Querying Linked Data + + The emergence of Linked Data on the WWW has spawned research interest in an online execution of declarative queries over this data. A particularly interesting approach is traversal-based query execution which fetches data by traversing data links and, thus, is able to make use of up-to-date data from initially unknown data sources. While the downside of this approach is the delay before the query engine completes a query execution, user perceived response time may be improved significantly by returning as many elements of the result set as soon as possible. To this end, the query engine requires a traversal strategy that enables the engine to fetch result-relevant data as early as possible. The challenge for such a strategy is that the query engine does not know a priori what data sources will be discovered during the query execution and which of them contain result-relevant data. In this paper, we investigate 14 different approaches to rank traversal steps and achieve a variety of traversal strategies. We experimentally study their impact on response times and compare them to a baseline that resembles a breadth-first traversal. While our experiments show that some of the approaches can achieve noteworthy improvements over the baseline in a significant number of cases, we also observe that for every approach, there is a non-negligible chance to achieve response times that are worse than the baseline. + Query Processing + + + + + + The emergence of Linked Data on the WWW has spawned research interest in an online execution of declarative queries over this data. A particularly interesting approach is traversal-based query execution which fetches data by traversing data links and, thus, is able to make use of up-to-date data from initially unknown data sources. While the downside of this approach is the delay before the query engine completes a query execution, user perceived response time may be improved significantly by returning as many elements of the result set as soon as possible. To this end, the query engine requires a traversal strategy that enables the engine to fetch result-relevant data as early as possible. The challenge for such a strategy is that the query engine does not know a priori what data sources will be discovered during the query execution and which of them contain result-relevant data. In this paper, we investigate 14 different approaches to rank traversal steps and achieve a variety of traversal strategies. We experimentally study their impact on response times and compare them to a baseline that resembles a breadth-first traversal. While our experiments show that some of the approaches can achieve noteworthy improvements over the baseline in a significant number of cases, we also observe that for every approach, there is a non-negligible chance to achieve response times that are worse than the baseline. + SPARQL + + + Linked Data Queries + SPARQL + Query Processing + + + Walking without a Map: Ranking-Based Traversal for Querying Linked Data + Linked Data Queries + + + Walking without a Map: Ranking-Based Traversal for Querying Linked Data + + + + + knowledge graphs + + faceted search + + + + OWL + + knowledge graphs + SemFacet: Faceted Search over Ontology Enhanced Knowledge Graphs + In this demo we present the SemFacet system for faceted search over ontology enhanced Knowledge Graphs (KGs) stored in RDF. SemFacet allows users to query KGs with relatively complex SPARQL queries via an intuitive Amazon-like interface. SemFacet can compute faceted interfaces over large scale RDF datasets by relying on incremental algorithms and over large ontologies by exploiting ontology projection techniques. SemFacet relies on an in-memory triple store and current implementation bundles JRDFox, Sesame, Stardog, and PAGOdA. During the demonstration the attendees can try SemFacet by exploring Yago KG. + + SPARQL + + RDF + + faceted search + + + OWL + + + + In this demo we present the SemFacet system for faceted search over ontology enhanced Knowledge Graphs (KGs) stored in RDF. SemFacet allows users to query KGs with relatively complex SPARQL queries via an intuitive Amazon-like interface. SemFacet can compute faceted interfaces over large scale RDF datasets by relying on incremental algorithms and over large ontologies by exploiting ontology projection techniques. SemFacet relies on an in-memory triple store and current implementation bundles JRDFox, Sesame, Stardog, and PAGOdA. During the demonstration the attendees can try SemFacet by exploring Yago KG. + + + + RDF + + + SemFacet: Faceted Search over Ontology Enhanced Knowledge Graphs + + SemFacet: Faceted Search over Ontology Enhanced Knowledge Graphs + SPARQL + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + smart appliances + + standardization + Household appliances are set to become highly intelligent, smart and networked devices in the near future. Systematically deployed on the Internet of Things (IoT), they would be able to form complete energy consuming, producing, and managing ecosystems. Smart systems are technically very heterogeneous, and standardized interfaces on a sensor and device level are therefore needed. However, standardization in IoT has largely focused at the technical communication level, leading to a large number of different solutions based on various standards and protocols, with limited attention to the common semantics contained in the message data structures exchanged at the technical level. The Smart Appliance REFerence ontology (SAREF) is a shared model of consensus developed in close interaction with the industry and with the support of the European Commission. It is published as a technical specification by ETSI and provides an important contribution to achieve semantic interoperability for smart appliances. This paper builds on the success achieved in standardizing SAREF and presents SAREF4EE, an extension of SAREF. SAREF4EE has been created in collaboration with the EEBus and Energy@Home industry associations to interconnect their (different) data models. By using SAREF4EE, smart appliances from different manufacturers that support the EEBus or Energy@Home standards can easily communicate with each other using any energy management system at home or in the cloud. + Internet of Things + + + + semantic interoperability + semantic interoperability + + + + + + + + + smart appliances + + + Internet of Things + + + Ontology + standardization + + Interoperability for Smart Appliances in the IoT World + + Ontology + Interoperability for Smart Appliances in the IoT World + Household appliances are set to become highly intelligent, smart and networked devices in the near future. Systematically deployed on the Internet of Things (IoT), they would be able to form complete energy consuming, producing, and managing ecosystems. Smart systems are technically very heterogeneous, and standardized interfaces on a sensor and device level are therefore needed. However, standardization in IoT has largely focused at the technical communication level, leading to a large number of different solutions based on various standards and protocols, with limited attention to the common semantics contained in the message data structures exchanged at the technical level. The Smart Appliance REFerence ontology (SAREF) is a shared model of consensus developed in close interaction with the industry and with the support of the European Commission. It is published as a technical specification by ETSI and provides an important contribution to achieve semantic interoperability for smart appliances. This paper builds on the success achieved in standardizing SAREF and presents SAREF4EE, an extension of SAREF. SAREF4EE has been created in collaboration with the EEBus and Energy@Home industry associations to interconnect their (different) data models. By using SAREF4EE, smart appliances from different manufacturers that support the EEBus or Energy@Home standards can easily communicate with each other using any energy management system at home or in the cloud. + + Interoperability for Smart Appliances in the IoT World + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Semantic annotation + + ontology + + + + + A Reuse-based Annotation Approach for Medical Documents + + + A Reuse-based Annotation Approach for Medical Documents + + + medical documents + Annotations are useful to semantically enrich documents and other datasets with concepts of standardized vocabularies and ontologies. In the medical domain, many documents are not annotated at all and manual annotation is a difficult and time-consuming process. Therefore, automatic annotation methods become necessary to support human annotators with recommendations. We propose a reuse-based annotation approach that clusters items in medical documents according to verified ontology-based annotations. We identify a set of representative features for annotation clusters and propose a context-based selection strategy that considers the semantic relatedness and frequent co-occurrences of annotated concepts. We evaluate our methods and the annotation tool MetaMap based on reference mappings between medical forms and the Unified Medical Language System. + + + + Annotations are useful to semantically enrich documents and other datasets with concepts of standardized vocabularies and ontologies. In the medical domain, many documents are not annotated at all and manual annotation is a difficult and time-consuming process. Therefore, automatic annotation methods become necessary to support human annotators with recommendations. We propose a reuse-based annotation approach that clusters items in medical documents according to verified ontology-based annotations. We identify a set of representative features for annotation clusters and propose a context-based selection strategy that considers the semantic relatedness and frequent co-occurrences of annotated concepts. We evaluate our methods and the annotation tool MetaMap based on reference mappings between medical forms and the Unified Medical Language System. + A Reuse-based Annotation Approach for Medical Documents + medical documents + UMLS + UMLS + + ontology + Semantic annotation + + + + + + + + + + + + eb165935f24f2953766c8aea84e423e7e6161ff4 + + + Peter Mechant + Peter Mechant + Peter Mechant + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + USC / Information Sciences Institute + USC / Information Sciences Institute + + + + + + USC / Information Sciences Institute + + + + + + + + + 2016-10-19T15:20:00 + 2016-10-19T15:00:00 + Laura M. Daniele, Monika Solanki, Frank Den Hartog and Jasper Roes + + Interoperability for Smart Appliances in the IoT World + 2016-10-19T15:20:00 + + Interoperability for Smart Appliances in the IoT World + + 2016-10-19T15:20:00 + 2016-10-19T15:00:00 + 2016-10-19T15:20:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Mannheim + + + University of Mannheim + + University of Mannheim + + + + + + + + + Marco Brambilla + + + + + + Marco Brambilla + + 2bffe0887e5bf93e4fe6db921c7f6798a9e65201 + Marco Brambilla + + + + + + + + + + + + + + + + + + + + + + eb6f0b160b878341c5b8feb6b52ab68305ec6433 + + + Giancarlo Guizzardi + Giancarlo Guizzardi + Giancarlo Guizzardi + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Francois Goasdoue + + + Francois Goasdoue + 226862c37f70b29f9a1b6304a1dad68473ff91c9 + + Francois Goasdoue + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Keywords + + + + + + + + + OBDA + + + + + + + + In ontology-based data access (OBDA) the users access relational databases (RDBs) via ontologies that mediate between the users and the data. Ontologies are connected to data via declarative ontology-to-RDB mappings that relate each ontological term to an SQL query. In this demo we present our system KeywDB that facilitates construction of ontology-to-RDB mappings in an interactive fashion. In KeywDB users provide examples of entities for classes that require mappings and the system returnes a ranked list of such mappings. In doing so KeywDB relies on techniques for keyword query answering over RDBs. During the demo the attendees will try KeywDB with NorthWind and NPD FP databases and collections of mappings that we prepare. + + + + + KeywDB: A System for Keyword-Driven Ontology-to-RDB Mapping Construction + + + + + OBDA + + KeywDB: A System for Keyword-Driven Ontology-to-RDB Mapping Construction + + Mapping construction + KeywDB: A System for Keyword-Driven Ontology-to-RDB Mapping Construction + + + In ontology-based data access (OBDA) the users access relational databases (RDBs) via ontologies that mediate between the users and the data. Ontologies are connected to data via declarative ontology-to-RDB mappings that relate each ontological term to an SQL query. In this demo we present our system KeywDB that facilitates construction of ontology-to-RDB mappings in an interactive fashion. In KeywDB users provide examples of entities for classes that require mappings and the system returnes a ranked list of such mappings. In doing so KeywDB relies on techniques for keyword query answering over RDBs. During the demo the attendees will try KeywDB with NorthWind and NPD FP databases and collections of mappings that we prepare. + Keywords + Mapping construction + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-21T11:50:00 + 2016-10-21T11:50:00 + 2016-10-21T11:30:00 + 2016-10-21T11:50:00 + Building event-centric knowledge graphs from news + + Marco Rospocher, Marieke van Erp, Piek Vossen, Antske Fokkens, Itziar Aldabe, German Rigau, Aitor Soroa, Thomas Ploeger, Tessel Bogaard + Building event-centric knowledge graphs from news + 2016-10-21T11:30:00 + + + 2016-10-21T11:50:00 + + + Haiphong University + + + Haiphong University + + + + + + + + Haiphong University + + + + 2016-10-18T15:15:00 + 2016-10-18T15:45:00 + 2016-10-18T15:45:00 + 2016-10-18T15:45:00 + Poster 2 + 2016-10-18T15:45:00 + Poster 2 + 2016-10-18T15:15:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + GeoSPARQL + + + + + geospatial OBDA + GeoSPARQL + Ontop of Geospatial Databases + + + + + In this paper we propose an OBDA approach for accessing geospatial data stored in geospatial relational databases, using the OGC standard GeoSPARQL and R2RML or OBDA mappings. We introduce extensions to existing SPARQL-to-SQL approaches to support GeoSPARQL features. We describe the implementation of our approach in the system ontop-spatial, an extension of the OBDA system Ontop for creating virtual geospatial RDF graphs on top of geospatial relational databases. Last, we present an experimental evaluation of our system using workload and queries from a recent benchmark. In order to measure the performance of our system, we compare it to the state-of-the-art geospatial RDF store, and confirm its efficiency. + + + In this paper we propose an OBDA approach for accessing geospatial data stored in geospatial relational databases, using the OGC standard GeoSPARQL and R2RML or OBDA mappings. We introduce extensions to existing SPARQL-to-SQL approaches to support GeoSPARQL features. We describe the implementation of our approach in the system ontop-spatial, an extension of the OBDA system Ontop for creating virtual geospatial RDF graphs on top of geospatial relational databases. Last, we present an experimental evaluation of our system using workload and queries from a recent benchmark. In order to measure the performance of our system, we compare it to the state-of-the-art geospatial RDF store, and confirm its efficiency. + + + Ontop of Geospatial Databases + geospatial databases + + geospatial OBDA + geospatial databases + + Ontop of Geospatial Databases + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 4806a6b7bed87a607cf39dc4db433512253f594f + + + Allan Third + + Allan Third + + + + + Allan Third + + + + + + 2016-10-18T12:00:00 + 2016-10-18T12:30:00 + 2016-10-18T12:30:00 + 2016-10-18T12:30:00 + 2016-10-18T12:00:00 + Poster 1 + Poster 1 + 2016-10-18T12:30:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Ralf Möller + Ralf Möller + + eab5f5d1e7d7c9a899ff51817294075e192ce2fc + + Ralf Möller + + + + + + Eleni Kaldoudi + Eleni Kaldoudi + + + + Eleni Kaldoudi + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 24544b392104f565a6dee56a057f2ad429b32949 + Louis Jachiet + + + + + + + Louis Jachiet + + Louis Jachiet + + + + + + National Agriculture and Food Research Organization + + + + + + + + National Agriculture and Food Research Organization + + + National Agriculture and Food Research Organization + + + + + + + + + Hitachi, Ltd. + + + Hitachi, Ltd. + Hitachi, Ltd. + + + + + + + + + + Due to the growing need to timely process and derive valuable information and knowledge from data produced in the Semantic Web, RDF stream processing (RSP) has emerged as an important research domain. Of course, modern RSP have to address the volume and velocity characteristics encountered in the Big Data era. This comes at the price of designing high throughput, low latency, fault tolerant, highly available and scalable engines. The cost of implementing such systems from scratch is very high and usually one prefers to program components on top of a framework that possesses these properties, e.g., Apache Hadoop or Apache Spark. The research conducting in this PhD adopts this approach and aims to create a production-ready RSP engine which will be based on domain standards, e.g., Apache Kafka and Spark Streaming. In a nutshell, the engine aims to i) address basic event modeling - to guarantee the completeness of input data in window operators, ii) process real-time RDF stream in a distributed manner - efficient RDF stream handling is required; iii) support and extend common continuous SPARQL syntax - easy-to-use, adapt to the industrial needs and iv) support reasoning services at both the data preparation and query processing levels. + + Distributed Computing + + + + + + Semantic Web + + Semantic Web + + RDF + Towards a distributed, scalable and real-time RDF Stream Processing engine + + Due to the growing need to timely process and derive valuable information and knowledge from data produced in the Semantic Web, RDF stream processing (RSP) has emerged as an important research domain. Of course, modern RSP have to address the volume and velocity characteristics encountered in the Big Data era. This comes at the price of designing high throughput, low latency, fault tolerant, highly available and scalable engines. The cost of implementing such systems from scratch is very high and usually one prefers to program components on top of a framework that possesses these properties, e.g., Apache Hadoop or Apache Spark. The research conducting in this PhD adopts this approach and aims to create a production-ready RSP engine which will be based on domain standards, e.g., Apache Kafka and Spark Streaming. In a nutshell, the engine aims to i) address basic event modeling - to guarantee the completeness of input data in window operators, ii) process real-time RDF stream in a distributed manner - efficient RDF stream handling is required; iii) support and extend common continuous SPARQL syntax - easy-to-use, adapt to the industrial needs and iv) support reasoning services at both the data preparation and query processing levels. + Towards a distributed, scalable and real-time RDF Stream Processing engine + Stream Processing + + + RSP + RDF + Towards a distributed, scalable and real-time RDF Stream Processing engine + Stream Processing + + RSP + + Distributed Computing + + + + + + data integration + + class-class relationships + Data Acquisition by Traversing Class-Class Relationships over the Linked Open Data + + + data integration + linked data + Linked Open Data (LOD) is a powerful mechanism for linking different datasets published on the Web, which is expected to create new value of data through mash-up over various datasets on the Web. One of the important needs to obtain data from LOD is to find a path of resources connecting given two classes, each of which has an end resource of the path. +In this study, the two technologies for the approach are introduced: a labeled multi graph named class graph to compute class-class relationships and an RDF specification named SPARQL Builder Metadata to obtain and store required metadata for construction of a class graph. In addition, as a practical application, we introduce the SPARQL Builder system, which assists users in writing semantic queries for LOD. + Linked Open Data (LOD) is a powerful mechanism for linking different datasets published on the Web, which is expected to create new value of data through mash-up over various datasets on the Web. One of the important needs to obtain data from LOD is to find a path of resources connecting given two classes, each of which has an end resource of the path. +In this study, the two technologies for the approach are introduced: a labeled multi graph named class graph to compute class-class relationships and an RDF specification named SPARQL Builder Metadata to obtain and store required metadata for construction of a class graph. In addition, as a practical application, we introduce the SPARQL Builder system, which assists users in writing semantic queries for LOD. + + + + + Data Acquisition by Traversing Class-Class Relationships over the Linked Open Data + Data Acquisition by Traversing Class-Class Relationships over the Linked Open Data + + + + class-class relationships + + + + + + + linked data + + + + + + + + + + + + + + Ordnance Survey Ireland + + Ordnance Survey Ireland + Ordnance Survey Ireland + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Guus Schreiber + + Guus Schreiber + Guus Schreiber + + + + + + + + + + + + 6af8641a60a97dbcc8df40418611e5ada4884405 + + + + + + Sebastian Brandt + Sebastian Brandt + + + + + + Sebastian Brandt + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Vanessa Lopez, Pierpaolo Tommasi, Spyros Kotoulas and Jiewen Wu + 2016-10-21T14:30:00 + + 2016-10-21T14:30:00 + + QuerioDALI: Question Answering over Dynamic and Linked Knowledge Graphs + 2016-10-21T14:30:00 + QuerioDALI: Question Answering over Dynamic and Linked Knowledge Graphs + 2016-10-21T14:10:00 + 2016-10-21T14:30:00 + + 2016-10-21T14:10:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + disambiguation + + + + Qur'an + + Qur'an + + In this paper we illustrate how we harness the power of crowds and specialized experts through automated knowledge acquisition workflows for semantic annotation in specialized and knowledge intensive domains. We undertake the special case of the Arabic script of the Qur'an, a widely studied manuscript, and apply a hybrid methodology of traditional 'crowdsourcing' augmented with 'expertsourcing' for semantically annotating its verses. We demonstrate that our proposed hybrid method presents a promising approach for achieving reliable annotations in an efficient and scalable manner, especially in cases where a high level of accuracy is required in knowledge intense and sensitive domains. + + + Harnessing Crowds and Experts for Semantic Annotation of the Qur'an + + Harnessing Crowds and Experts for Semantic Annotation of the Qur'an + In this paper we illustrate how we harness the power of crowds and specialized experts through automated knowledge acquisition workflows for semantic annotation in specialized and knowledge intensive domains. We undertake the special case of the Arabic script of the Qur'an, a widely studied manuscript, and apply a hybrid methodology of traditional 'crowdsourcing' augmented with 'expertsourcing' for semantically annotating its verses. We demonstrate that our proposed hybrid method presents a promising approach for achieving reliable annotations in an efficient and scalable manner, especially in cases where a high level of accuracy is required in knowledge intense and sensitive domains. + disambiguation + + + + + semantic annotation + + + + semantic annotation + + + Harnessing Crowds and Experts for Semantic Annotation of the Qur'an + + + + + + + + + + + + + + + + + + + + c31290b982c8a8bf62516fc1b00799c03cab7b70 + + + + + Jonas Bulegon Gassen + + + + Jonas Bulegon Gassen + Jonas Bulegon Gassen + + + + + + + + + + + + + + Steffen Lohmann + + Steffen Lohmann + + + 2c3ceffc433179f5b0b2a067fccf89d08d4cad28 + + Steffen Lohmann + + + + + + + + + + + + + + + + + + + + + + + + + + + Mondeca + Mondeca + + + + Mondeca + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 8e4a9cdb6f8940a89d0ade82e430471527ac62fe + Kazuhisa Seta + + Kazuhisa Seta + + + Kazuhisa Seta + + + + + + + 2016-10-20T18:30:00 + 2016-10-20T21:30:00 + 2016-10-20T21:30:00 + 2016-10-20T21:30:00 + 2016-10-20T18:30:00 + 2016-10-20T21:30:00 + Dinner + Dinner + + + 2016-10-19T18:00:00 + + Makoto Urakawa, Masaru Miyazaki, Hiroshi Fujisawa, Masahide Naemura and Ichiro Yamada + Constructing Curriculum Ontology and Dynamic Learning Path Based on Resource Description Framework + 2016-10-19T21:00:00 + + Constructing Curriculum Ontology and Dynamic Learning Path Based on Resource Description Framework + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + + + + + + + + + + Open Microscopy Environment + Microscopy image + + + + Microscopy image + Open Microscopy Environment + Development of an Ontology for an Integrated Image Analysis Platform to enable Global Sharing of Microscopy Imaging Data + Imaging data are fundamental to life sciences. We aimed to construct a microscopy ontology for an integrated metadata database of optical and electron microscopy images combined with various bio-entities. To realise this, we applied the Resource Description Framework (RDF) to an Open Microscopy Environment (OME) data model, which is the de facto standard to describe optical microscopy images and experimental data. We translated the XML-based OME metadata into the base concept of Web Ontology Language (OWL) as a trial of developing microscopy ontology. We describe the OWL-based ontology of microscopy imaging data and propose 18 upper-level concepts of ontology with missing concepts such as electron microscopy, phenotype data, biosample, and imaging conditions. + + Metadata + Development of an Ontology for an Integrated Image Analysis Platform to enable Global Sharing of Microscopy Imaging Data + + + + + + + + + + Imaging data are fundamental to life sciences. We aimed to construct a microscopy ontology for an integrated metadata database of optical and electron microscopy images combined with various bio-entities. To realise this, we applied the Resource Description Framework (RDF) to an Open Microscopy Environment (OME) data model, which is the de facto standard to describe optical microscopy images and experimental data. We translated the XML-based OME metadata into the base concept of Web Ontology Language (OWL) as a trial of developing microscopy ontology. We describe the OWL-based ontology of microscopy imaging data and propose 18 upper-level concepts of ontology with missing concepts such as electron microscopy, phenotype data, biosample, and imaging conditions. + + + RDF/OWL + + Metadata + + Development of an Ontology for an Integrated Image Analysis Platform to enable Global Sharing of Microscopy Imaging Data + + RDF/OWL + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Benjamin Cogrel + Benjamin Cogrel + + + + + + + Benjamin Cogrel + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-20T14:50:00 + A Probabilistic Model for Time-Aware Entity Recommendation + + 2016-10-20T14:30:00 + 2016-10-20T14:50:00 + + A Probabilistic Model for Time-Aware Entity Recommendation + + Lei Zhang and Achim Rettinger + 2016-10-20T14:30:00 + 2016-10-20T14:50:00 + 2016-10-20T14:50:00 + + + + + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + Christophe Debruyne, Eamonn Clinton, Lorraine McNerney, Atul Nautiyal and Declan O'Sullivan + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + Serving Ireland's Geospatial Information as Linked Data + 2016-10-19T21:00:00 + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Serving Ireland's Geospatial Information as Linked Data + + + + + + + + + + + + + + + + + + + + + + + + + + Semantic MediaWiki + + + Information extraction + + + In the development departments of some manufacturing companies, +there are weekly reports describing the status of events but they are poorly structured plain texts. +In this report, we propose a method for constructing semantic networks of development activities from weekly reports. +Our ontology-based method extracts things like events, status and agents from the reports and constructs relations +between them and creates Semantic MediaWiki pages from the semantic networks to visualize development activities. +We show a use case to apply the method to actual weekly reports and internal documents of a development department. + + Constructing Semantic Networks of Development Activities from Weekly Reports + + + Development activity + + + Constructing Semantic Networks of Development Activities from Weekly Reports + + + + Constructing Semantic Networks of Development Activities from Weekly Reports + Semantic MediaWiki + Development activity + + Information extraction + In the development departments of some manufacturing companies, +there are weekly reports describing the status of events but they are poorly structured plain texts. +In this report, we propose a method for constructing semantic networks of development activities from weekly reports. +Our ontology-based method extracts things like events, status and agents from the reports and constructs relations +between them and creates Semantic MediaWiki pages from the semantic networks to visualize development activities. +We show a use case to apply the method to actual weekly reports and internal documents of a development department. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Xin Wang + + edd615f4b8d0a61bdd8770ec087c4b2ff442125f + + Xin Wang + Xin Wang + + + + + + + Queen Mary University of London + + + + Queen Mary University of London + + + Queen Mary University of London + + + + + + + + + + + + Shanghai Polytechnic University, China + Shanghai Polytechnic University, China + + + Shanghai Polytechnic University, China + + + + + + + Triple Extraction + Data Mining + + Extracting Semantic Information for e-Commerce + + Extracting Semantic Information for e-Commerce + + Ontology Learning + + e-commerce + + + + Rakuten + Rakuten Ichiba uses a taxonomy to organize the items it sells. Currently, the taxonomy classes that are relevant in terms of profit generation and difficulty of exploration are being manually extended with data properties deemed helpful to create pages that improve the user search experience and ultimately the conversion rate. In this paper we present a scalable approach that aims to automate this process, automatically selecting the relevant and semantically homogenous subtrees in the taxonomy, extracting from semi-structured text in items descriptions a core set of properties and a popular subset of their ranges, then ex- tending the covered range using relational similarities in free text. Additionally, our process automatically tags the items with the new semantic information and exposes them as RDF triples. We present a set of experiments showing the effectiveness of our approach in this business context. + + Triple Extraction + Rakuten + + + + + Extracting Semantic Information for e-Commerce + Rakuten Ichiba uses a taxonomy to organize the items it sells. Currently, the taxonomy classes that are relevant in terms of profit generation and difficulty of exploration are being manually extended with data properties deemed helpful to create pages that improve the user search experience and ultimately the conversion rate. In this paper we present a scalable approach that aims to automate this process, automatically selecting the relevant and semantically homogenous subtrees in the taxonomy, extracting from semi-structured text in items descriptions a core set of properties and a popular subset of their ranges, then ex- tending the covered range using relational similarities in free text. Additionally, our process automatically tags the items with the new semantic information and exposes them as RDF triples. We present a set of experiments showing the effectiveness of our approach in this business context. + Machine Learning + Data Mining + Ontology Learning + Machine Learning + + + + e-commerce + + + + + + + + + + + + + + + + + + + Adam Sotona + + + Adam Sotona + + + + + 571a1fb7a88168822ba35590b5ab4e9c7f47336c + Adam Sotona + + + + + + + + + + + + 292f9640243b1369299b50ffada02682bd7e70b4 + + + Zhijia Fang + + + + Zhijia Fang + Zhijia Fang + + + + + Representing RDF Stream Processing Queries in RSP-SPIN + + + RSP-QL + A number of RDF Stream Processing (RSP) systems have been developed to support the processing of streaming Linked Data, however, due to the lack of a standardized query language they all provide different SPARQL extensions. The RSP Community Group is in the process of developing a standardized RSP query language (RSP-QL), which incorporates many of features of existing RSP language extensions. In this demo paper we describe how RSP-SPIN, a SPIN extension for representing RSP-QL queries, can be used to encapsulate RSP queries as RDF, forming a syntax agnostic representation that can be used to support serialization into multiple RSP language extensions. This could be useful, for example, to reduce the effort required to produce and maintain RSP benchmarks, since developers can focus on a single representation per query, rather than manually implementing and validating queries for several languages in parallel. + + + RDF Stream Processing + RSP-SPIN + Representing RDF Stream Processing Queries in RSP-SPIN + RDF Stream Processing + + + + + + + RSP-QL + + A number of RDF Stream Processing (RSP) systems have been developed to support the processing of streaming Linked Data, however, due to the lack of a standardized query language they all provide different SPARQL extensions. The RSP Community Group is in the process of developing a standardized RSP query language (RSP-QL), which incorporates many of features of existing RSP language extensions. In this demo paper we describe how RSP-SPIN, a SPIN extension for representing RSP-QL queries, can be used to encapsulate RSP queries as RDF, forming a syntax agnostic representation that can be used to support serialization into multiple RSP language extensions. This could be useful, for example, to reduce the effort required to produce and maintain RSP benchmarks, since developers can focus on a single representation per query, rather than manually implementing and validating queries for several languages in parallel. + RSP-SPIN + Representing RDF Stream Processing Queries in RSP-SPIN + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Protege plugin + + Rule + Modeling tool + + In our experience, some ontology modelers find it much easier to express logical axioms using rules rather than using OWL (or description logic) syntax. Based on recent theoretical developments on transformations between rules and description logics, we develop ROWL, a Protege plugin that allows users to enter OWL axioms by way of rules; the plugin then automatically converts these rules into OWL DL axioms if possible, and prompts the user in case such a conversion is not possible without weakening the semantics of the rule. + + Modeling OWL with Rules: The ROWL Protege Plugin + ROWL + + Rule-to-OWL transformation + + Protege + + + + + + Rule + Modeling tool + + ROWL + + + + + + + Modeling OWL with Rules: The ROWL Protege Plugin + Rule-to-OWL transformation + Protege plugin + Protege + In our experience, some ontology modelers find it much easier to express logical axioms using rules rather than using OWL (or description logic) syntax. Based on recent theoretical developments on transformations between rules and description logics, we develop ROWL, a Protege plugin that allows users to enter OWL axioms by way of rules; the plugin then automatically converts these rules into OWL DL axioms if possible, and prompts the user in case such a conversion is not possible without weakening the semantics of the rule. + + + + + Modeling OWL with Rules: The ROWL Protege Plugin + + + + + + + + + + Markus Krötzsch + + + + b7e14a29e6de4cc64fee9080f4e4db77a4226769 + Markus Krötzsch + + + Markus Krötzsch + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + + Working process quantification in factory using wearable sensor device and ontology-based stream data processing + 2016-10-19T21:00:00 + Working process quantification in factory using wearable sensor device and ontology-based stream data processing + + 2016-10-19T21:00:00 + Masao Watanabe, Kazunari Hashimoto, Seiya Inagi, Yohei Yamane, Seiji Suzuki and Hiroshi Umemoto + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + Gulbenkian Science Institute + + + + Gulbenkian Science Institute + + + + + Gulbenkian Science Institute + + + Yutaka Mitsuishi + + + + + + + + + + + 55ac3c4c8c976c25d3ca0660b8dc64bc263d32e7 + + Yutaka Mitsuishi + Yutaka Mitsuishi + + + + + + + + + + Alberto Tonon + Alberto Tonon + + + Alberto Tonon + + + 3b41efd1acf36406793166b43659b542c431776f + + + + Spyros Kotoulas + Spyros Kotoulas + + + + + + Spyros Kotoulas + + e879e287903caecdd41354eb5ae7aff6d9bc741b + + + + + + + + 1e42eb62ecf4ba2ca58a7d728117cc1458d12f4d + + + + Martin Rezk + Martin Rezk + + + + + Martin Rezk + + + + + + + + + Paramita Mirza + + + + 67eff0bc075f39ef28dfed00e58d3a115350ce0d + Paramita Mirza + + + + + + Paramita Mirza + + + + + + + + + + + + + + + 2016-10-20T10:50:00 + + + 2016-10-20T10:30:00 + Armen Inants, Manuel Atencia and Jérôme Euzenat + 2016-10-20T10:50:00 + + Algebraic calculi for weighted ontology alignments + 2016-10-20T10:50:00 + Algebraic calculi for weighted ontology alignments + 2016-10-20T10:50:00 + 2016-10-20T10:30:00 + + + + + + + Tessel Bogaard + + + + + + + + Tessel Bogaard + Tessel Bogaard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + A RDF based Portal of Biological Phenotype Data produced in Japan + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + A RDF based Portal of Biological Phenotype Data produced in Japan + Terue Takatsuki, Mikako Saito, Sadahiro Kumagai, Eiki Takayama, Kazuya Ohshima, Nozomu Ohshiro, Kai Lenz, Nobuhiko Tanaka, Norio Kobayashi and Hiroshi Masuya + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + + + + + + + + Word embedding + + + + Cross-Language Record Linkage using Word Embedding driven Metadata Similarity Measurement + + Cross-Language Record Linkage using Word Embedding driven Metadata Similarity Measurement + + + + + Cross-Language Record Linkage using Word Embedding driven Metadata Similarity Measurement + + + Semantic matching + Aiming to link the records that refer to the same entity across multiple databases in different languages, we address the mismatches of wordings between literal translations of metadata in source language and metadata in target language, which cannot be calculated by string-based measures. In this paper, we propose a method based on word embedding, which can capture the semantic similarity relationships among words. The effectiveness of this method is confirmed in linking the same records between Ukiyo-e (Japanese traditional woodblock printing) databases in Japanese and English. This method could be applied to other languages since it makes little assumption about languages. + Cross-language record linkage + + Aiming to link the records that refer to the same entity across multiple databases in different languages, we address the mismatches of wordings between literal translations of metadata in source language and metadata in target language, which cannot be calculated by string-based measures. In this paper, we propose a method based on word embedding, which can capture the semantic similarity relationships among words. The effectiveness of this method is confirmed in linking the same records between Ukiyo-e (Japanese traditional woodblock printing) databases in Japanese and English. This method could be applied to other languages since it makes little assumption about languages. + Word embedding + + Semantic matching + Similarity measurement + Similarity measurement + + + + Cross-language record linkage + + + + + + Social Meaning + According to Semantic Web standards, IRIs are individual +constants or predicate letters whose names are chosen arbitrarily +and carry no formal meaning. At the same time it is a well-known +aspect of Semantic Web pragmatics that IRIs are often constructed +mnemonically, in order to be meaningful to a human interpreter. +The latter has traditionally been termed 'Social Meaning', a +concept that has been discussed but not yet quantitatively +studied by the Semantic Web community. + +In this paper we use statistical model learning as a method to +quantify the meaning that is (at least) encoded in Semantic Web +names, We implement the approach and evaluate it over hundreds of +thousands of data sets in order to illustrate its efficacy. Our +experiments confirm that many Semantic Web names are indeed +meaningful and, more interestingly, we provide a quantitative +lower bound on how much meaning is (at least) encoded in names on +a per-dataset basis. + +To our knowledge, this is the first paper about the interaction +between social and formal meaning, as well as the first paper +that uses statistical model learning as a method to quantify +meaning in the Semantic Web context. + + + According to Semantic Web standards, IRIs are individual +constants or predicate letters whose names are chosen arbitrarily +and carry no formal meaning. At the same time it is a well-known +aspect of Semantic Web pragmatics that IRIs are often constructed +mnemonically, in order to be meaningful to a human interpreter. +The latter has traditionally been termed 'Social Meaning', a +concept that has been discussed but not yet quantitatively +studied by the Semantic Web community. + +In this paper we use statistical model learning as a method to +quantify the meaning that is (at least) encoded in Semantic Web +names, We implement the approach and evaluate it over hundreds of +thousands of data sets in order to illustrate its efficacy. Our +experiments confirm that many Semantic Web names are indeed +meaningful and, more interestingly, we provide a quantitative +lower bound on how much meaning is (at least) encoded in names on +a per-dataset basis. + +To our knowledge, this is the first paper about the interaction +between social and formal meaning, as well as the first paper +that uses statistical model learning as a method to quantify +meaning in the Semantic Web context. + + + + + + Social Meaning + + Semantics + + + + + + + Are Names Meaningful? Quantifying Social Meaning on the Semantic Web + Are Names Meaningful? Quantifying Social Meaning on the Semantic Web + Are Names Meaningful? Quantifying Social Meaning on the Semantic Web + + + Information Compression + + + + Information Compression + + + Semantics + + + + + + + + + + + + + + + + + + + 2016-10-21T14:50:00 + 2016-10-21T14:50:00 + 2016-10-21T14:50:00 + 2016-10-21T14:30:00 + Robert Piro, Ian Horrocks, Peter Hendler, Yavor Nenov, Boris Motik, Michael Rossman and Scott Kimberly + Semantic Technologies for Data Analysis in Health Care + 2016-10-21T14:30:00 + Semantic Technologies for Data Analysis in Health Care + + + 2016-10-21T14:50:00 + + + + + + + + + + + Harald Eisenmann + + + a38f1850e7a622420ed1efd4c981c50c498236ca + Harald Eisenmann + + Harald Eisenmann + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Thomas Rebele + Thomas Rebele + + + + + + + + + Thomas Rebele + + f32adc2de92c61cbdd9827bf8cf7ec11ab59adf9 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Calabria + + + + + + University of Calabria + + + University of Calabria + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + + A Protege Plugin with Swift Linked Data Miner + Jędrzej Potoniec and Agnieszka Ławrynowicz + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + A Protege Plugin with Swift Linked Data Miner + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-21T10:30:00 + 2016-10-21T11:50:00 + 2016-10-21T11:50:00 + 2016-10-21T11:50:00 + 2016-10-21T10:30:00 + Natural Language Processing + Natural Language Processing + 2016-10-21T11:50:00 + + + + Jörg Waitelonis + Jörg Waitelonis + + + + + Jörg Waitelonis + 8d447b7f5197ba83c5a83212d5b40bb3e7022caf + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Univ. Rennes 1 and INRIA + + + + Univ. Rennes 1 and INRIA + + + + + Univ. Rennes 1 and INRIA + + + + + + + + + + + + + + + + + + + + + + + + Alo Allik, Mariano Mora-Mcginity, Gyorgy Fazekas and Mark Sandler + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + MusicWeb: music discovery with open linked semantic metadata + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + + MusicWeb: music discovery with open linked semantic metadata + + + + + + + 393b51a0cc241618aa88bf7821c6def528afe7da + Werner Nutt + + + + Werner Nutt + + + + + Werner Nutt + + + + + + + + + + + + + + + + + + + + + + + 7e4f8b78f38f5fffb33304fc1fd80b43ccc792fa + + Giannis Mouchakis + + + + Giannis Mouchakis + Giannis Mouchakis + + + + + + + + + + + + + + + + + + + EURECOM + + EURECOM + + + + EURECOM + + + + + + + 2016-10-19T15:50:00 + 2016-10-19T17:10:00 + 2016-10-19T17:10:00 + 2016-10-19T17:10:00 + Minute Madness + 2016-10-19T15:50:00 + 2016-10-19T17:10:00 + Minute Madness + + + + Data Scaling + In this paper we present an experimental evaluation of VIG, a data scaler for OBDA benchmarks. Data scaling is a relatively recent approach, proposed in the database community, that allows for quickly scaling an input data instance to s times its size, while preserving certain application-specific characteristics. The advantages of scaling are that the generator is general, in the sense that it can be re-used on different database schemas, and that users are not required to manually input the data characteristics. VIG lifts the scaling approach from the database level to the OBDA level, where the domain information of ontologies and mappings has to be taken into account as well. + To evaluate the quality of VIG, in this paper we use it to generate data for the Berlin SPARQL Benchmark (BSBM), and compare it with the official BSBM data generator. + + + + Data Scaling + Evaluation + + + + + In this paper we present an experimental evaluation of VIG, a data scaler for OBDA benchmarks. Data scaling is a relatively recent approach, proposed in the database community, that allows for quickly scaling an input data instance to s times its size, while preserving certain application-specific characteristics. The advantages of scaling are that the generator is general, in the sense that it can be re-used on different database schemas, and that users are not required to manually input the data characteristics. VIG lifts the scaling approach from the database level to the OBDA level, where the domain information of ontologies and mappings has to be taken into account as well. + To evaluate the quality of VIG, in this paper we use it to generate data for the Berlin SPARQL Benchmark (BSBM), and compare it with the official BSBM data generator. + + OBDA Benchmark + An Evaluation of VIG with the BSBM Benchmark + + + + An Evaluation of VIG with the BSBM Benchmark + Evaluation + + + + + OBDA Benchmark + + An Evaluation of VIG with the BSBM Benchmark + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Damien Graux + + + + + + Damien Graux + + + + + + e04634d9f47999644ea729155de6ebac6736a5ef + + Damien Graux + + + + + 9cc447f0f4877bc04e7d6703bf040d7337094e65 + Martin Giese + + + + + Martin Giese + + Martin Giese + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Milano-Bicocca + + University of Milano-Bicocca + + + + + + + + University of Milano-Bicocca + + + + + + + + + + + + + + + + + + + + + + + + + erasmus mobility + + ontology matching + + + Compliance checking of business processes executed by auditors requires to analyze documents e.g. log files, business process models depending on requirements derived from reference guidelines. This paper presents a forward compliance checking application for facilitating conformant behavior by de-tecting organizational operations and their deviations based on these docu-ments in a semantic way. This application has been tested on the Internaliza-tion process in the respect of Erasmus mobility. + + + Semantic Audit Application + + + compliance check + + + ontology matching + + + ontology learning + ontology learning + Compliance checking of business processes executed by auditors requires to analyze documents e.g. log files, business process models depending on requirements derived from reference guidelines. This paper presents a forward compliance checking application for facilitating conformant behavior by de-tecting organizational operations and their deviations based on these docu-ments in a semantic way. This application has been tested on the Internaliza-tion process in the respect of Erasmus mobility. + + process ontology + Semantic Audit Application + + erasmus mobility + compliance check + + + process ontology + Semantic Audit Application + + + 2016-10-20T11:30:00 + + 2016-10-20T11:10:00 + 2016-10-20T11:30:00 + 2016-10-20T11:30:00 + 2016-10-20T11:10:00 + User validation in ontology alignment + 2016-10-20T11:30:00 + Zlatan Dragisic, Valentina Ivanova, Patrick Lambrix, Daniel Faria, Ernesto Jiménez-Ruiz and Catia Pesquita + User validation in ontology alignment + + + + + + + + + + + + + + + + + + + + + + Benedikt Kämpgen + + + + + Benedikt Kämpgen + + Benedikt Kämpgen + + + + + 2df16f8869e9f6a6120905b60dd8fdbeb27812b8 + + + + + + + + + + + + + + + ab378c183b37a86f1009f7719e499cb7f6b5f133 + + + + + + + + Gerard de Melo + + + + + Gerard de Melo + Gerard de Melo + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Multiple datasets that add high value to biomedical research have been exposed on the web as part of the Life Sciences Linked Open Data (LS-LOD) Cloud. The ability to easily navigate through these datasets is crucial in order to draw meaningful biological co relations. However, navigating these multiple datasets is not trivial as most of these are only available as isolated SPARQL endpoints with very little vocabulary reuse. We propose an approach for Autonomous Resource Discovery and Indexing (ARDI), a set of configurable rules which can be used to discover links between biological entities in the LS-LOD cloud. We have catalogued and linked concepts and properties from 137 public SPARQL endpoints. The ARDI is used to dynamically assemble queries retrieving data from multiple SPARQL endpoints simultaneously. + + SPARQL Endpoint + + A - Posteriori Data Integration for Life Sciences + + A - Posteriori Data Integration for Life Sciences + Life Sciences Data + + Multiple datasets that add high value to biomedical research have been exposed on the web as part of the Life Sciences Linked Open Data (LS-LOD) Cloud. The ability to easily navigate through these datasets is crucial in order to draw meaningful biological co relations. However, navigating these multiple datasets is not trivial as most of these are only available as isolated SPARQL endpoints with very little vocabulary reuse. We propose an approach for Autonomous Resource Discovery and Indexing (ARDI), a set of configurable rules which can be used to discover links between biological entities in the LS-LOD cloud. We have catalogued and linked concepts and properties from 137 public SPARQL endpoints. The ARDI is used to dynamically assemble queries retrieving data from multiple SPARQL endpoints simultaneously. + + SPARQL Endpoint + + Life Sciences Data + + Autonomous Resource Discovery and Indexing + + Autonomous Resource Discovery and Indexing + A - Posteriori Data Integration for Life Sciences + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Twitter + A Replication Study of the Top Performing Systems in SemEval Twitter Sentiment Analysis + + + + + Sentiment analysis + A Replication Study of the Top Performing Systems in SemEval Twitter Sentiment Analysis + + SemEval + + Twitter + + Replicate study + + + SemEval + We performed a thorough replicate study of the top systems performing in the yearly SemEval Twitter Sentiment Analysis task. We highlight some differences between the results obtained by the top systems and the ones we are able to compute. We also propose SentiME, an ensemble system composed of 5 state-of-the-art sentiment classifiers. SentiME first trains the different classifiers using the Bootstrap Aggregating Algorithm. The classification results are then aggregated using a linear function that averages the classification distributions of the different classifiers. SentiME has also been tested over the SemEval2015 test set, properly trained with the SemEval2015 train test, outperforming the best ranked system of the challenge. + + + Replicate study + + + A Replication Study of the Top Performing Systems in SemEval Twitter Sentiment Analysis + + We performed a thorough replicate study of the top systems performing in the yearly SemEval Twitter Sentiment Analysis task. We highlight some differences between the results obtained by the top systems and the ones we are able to compute. We also propose SentiME, an ensemble system composed of 5 state-of-the-art sentiment classifiers. SentiME first trains the different classifiers using the Bootstrap Aggregating Algorithm. The classification results are then aggregated using a linear function that averages the classification distributions of the different classifiers. SentiME has also been tested over the SemEval2015 test set, properly trained with the SemEval2015 train test, outperforming the best ranked system of the challenge. + + + Sentiment analysis + + + + Improving Open Data Usability through Semantics + With the success of Open Data a huge amount of tabular data become available that could potentially be mapped and linked into the Web of (Linked) Data. The use of semantic web technologies would then allow to explore related content and enhanced search functionalities across data portals. However, existing linkage and labeling approaches mainly rely on mappings of textual information to classes or properties in knowledge bases. In this work we outline methods to recover the semantics of tabular Open Data and to identify related content which allows a mapping and automated integration/categorization of Open Data resources and improves the overall usability and quality of Open Data. + Linked Data + Open Data + + + With the success of Open Data a huge amount of tabular data become available that could potentially be mapped and linked into the Web of (Linked) Data. The use of semantic web technologies would then allow to explore related content and enhanced search functionalities across data portals. However, existing linkage and labeling approaches mainly rely on mappings of textual information to classes or properties in knowledge bases. In this work we outline methods to recover the semantics of tabular Open Data and to identify related content which allows a mapping and automated integration/categorization of Open Data resources and improves the overall usability and quality of Open Data. + semantic table interpretation + table annotation + + semantic labeling + + + Linked Data + semantic labeling + + CSV + + Improving Open Data Usability through Semantics + CSV + Improving Open Data Usability through Semantics + related tables + + + table annotation + Open Data + related tables + + + + + semantic table interpretation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T14:00:00 + 2016-10-19T15:20:00 + 2016-10-19T15:20:00 + 2016-10-19T15:20:00 + 2016-10-19T14:00:00 + Multilinguality + 2016-10-19T15:20:00 + Multilinguality + + + + + + + + + + + + + + + + + + + + + + + + + Stefan Decker + + + + + + + + Stefan Decker + + 845a8211d50232fef2792db868e0d6fd4069e4ee + + + + Stefan Decker + + + f23513ddcad80a088d4976e7054faffb9f921dde + Xiaowang Zhang + + + + + Xiaowang Zhang + + + + + Xiaowang Zhang + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + EnergyUse - A Collaborative Semantic Platform for Monitoring and Discussing Energy Consumption + 2016-10-19T15:00:00 + 2016-10-19T14:40:00 + + + 2016-10-19T15:00:00 + 2016-10-19T15:00:00 + + 2016-10-19T14:40:00 + EnergyUse - A Collaborative Semantic Platform for Monitoring and Discussing Energy Consumption + Gregoire Burel, Lara S. G. Piccolo and Harith Alani + 2016-10-19T15:00:00 + + + + + + + Enabling combined software and data engineering: the ALIGNED suite of ontologies + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Monika Solanki + Enabling combined software and data engineering: the ALIGNED suite of ontologies + + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + + Riccardo Rosati + Riccardo Rosati + + + + + + + + + + + Riccardo Rosati + 347c5f7b49d6802f343668845c30c5e610d28a7f + + + + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Nandana Mihindukulasooriya, Esteban Gonzalez, Fernando Serena, Carlos Badenes and Oscar Corcho + FarolApp: Live Linked Data on Light Pollution + + FarolApp: Live Linked Data on Light Pollution + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + OpenCitations + + + Citation Database + + Jailbreaking your reference lists: the OpenCitations strike again + Jailbreaking your reference lists: the OpenCitations strike again + + Citation Database + + In this poster paper we provide an overview of the OpenCitations project and of its main outcome, the OpenCitations Corpus, which is an open repository of scholarly citation data made available under a Creative Commons public domain dedication, which provides in RDF accurate citation information harvested from the scholarly literature. + + Jailbreaking your reference lists: the OpenCitations strike again + Semantic Publishing + + Scholarly Communication + + In this poster paper we provide an overview of the OpenCitations project and of its main outcome, the OpenCitations Corpus, which is an open repository of scholarly citation data made available under a Creative Commons public domain dedication, which provides in RDF accurate citation information harvested from the scholarly literature. + + Scholarly Communication + OpenCitations Corpus + + + + Semantic Publishing + + + + OpenCitations Corpus + + + OpenCitations + + + + + + + + + + + Shanghai Hi-knowledge Information Technology Corporation + Shanghai Hi-knowledge Information Technology Corporation + + Shanghai Hi-knowledge Information Technology Corporation + + + + + + Daisuke Horyu + Daisuke Horyu + + + + Daisuke Horyu + + + + bbc0aae11be589070701435792fc955e18dd8a4b + + + + + + + + + Bijan Parsia + + + + + ce8a053681800c2b8b8a5a181b7ca984caad8163 + + + + Bijan Parsia + Bijan Parsia + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + pSPARQL: A Querying Language for Probabilistic RDF (Extended Abstract) + + Hong Fang and Xiaowang Zhang + 2016-10-19T21:00:00 + pSPARQL: A Querying Language for Probabilistic RDF (Extended Abstract) + + 2016-10-19T21:00:00 + + + + + + + + + + + + + Biligsaikhan Batjargal + Biligsaikhan Batjargal + + + + + fd3a79d38afdd0918dca809283156908e404549f + + Biligsaikhan Batjargal + + + + + + + + + + + + Joao Paulo Almeida + + 36a6eaf225efb85cfc3090f065638706dee3e996 + + + + + + + + + Joao Paulo Almeida + Joao Paulo Almeida + + + + + + + + + + + + + + + + + consistency of criteria for classification + + evaluation + Ontology Refinement and Evaluation System based on Similarity of Is-a Hierarchies + Ontologies are constructed in various fields such as medical information, mechanical design, and etc. It is important to build high quality ontologies so that these ontologies are used as knowledge bases and knowledge models for application systems. However it is hard to build good quality ontologies because of the necessity of both knowledge of ontology and expertise in their target domain. For this background, ontology construction and refinement costs a lot of time and ef-fort. In order to reduce such costs, we develop an ontology refinement support system. This system have two main function. First, the system can detect points that should be refined and propose how to refine it. Second, the system can evaluate ontologies quantitatively. This system indicate how ontologies are consistent in a classificatory criterion. To develop the refinement support system, we focus on a guideline for building well-organized ontologies that “Each subclass of a super class is distinguished by the values of exactly one attribute of the super class”. When an ontology is built following this guideline, there is similarity among Is-a hierarchies. We use these similar Is-a hierarchies and develop an ontology refinement system. + + + Ontology Refinement and Evaluation System based on Similarity of Is-a Hierarchies + + + refinement + + refinement + evaluation + + + Ontologies are constructed in various fields such as medical information, mechanical design, and etc. It is important to build high quality ontologies so that these ontologies are used as knowledge bases and knowledge models for application systems. However it is hard to build good quality ontologies because of the necessity of both knowledge of ontology and expertise in their target domain. For this background, ontology construction and refinement costs a lot of time and ef-fort. In order to reduce such costs, we develop an ontology refinement support system. This system have two main function. First, the system can detect points that should be refined and propose how to refine it. Second, the system can evaluate ontologies quantitatively. This system indicate how ontologies are consistent in a classificatory criterion. To develop the refinement support system, we focus on a guideline for building well-organized ontologies that “Each subclass of a super class is distinguished by the values of exactly one attribute of the super class”. When an ontology is built following this guideline, there is similarity among Is-a hierarchies. We use these similar Is-a hierarchies and develop an ontology refinement system. + consistency of criteria for classification + + + Ontology Refinement and Evaluation System based on Similarity of Is-a Hierarchies + + ontology + + ontology + + + + + + + + + The W3C RDF Stream Processing (RSP) community has proposed both a common model and a language for querying RDF streams. However, the current implementations of RSP systems are significantly different from each other in terms of performance. In this paper, we propose a unified interface for optimizing a continuous query in heterogeneous RSP systems. To enhance the performance of RSP, the unified interface decomposes query, reassembles partial queries and assigns them to appropriate RSP systems. Experimental results show that the proposed approach performances better in terms of memory consumption and latency. + + + RSP system + + + Unified query interface + RSP system + RDF stream processing + Unified query interface + A Unified Interface for Optimizing Continuous Query in Heterogeneous RDF Stream Processing Systems + + + A Unified Interface for Optimizing Continuous Query in Heterogeneous RDF Stream Processing Systems + + + + + + A Unified Interface for Optimizing Continuous Query in Heterogeneous RDF Stream Processing Systems + + + + RDF stream processing + + + The W3C RDF Stream Processing (RSP) community has proposed both a common model and a language for querying RDF streams. However, the current implementations of RSP systems are significantly different from each other in terms of performance. In this paper, we propose a unified interface for optimizing a continuous query in heterogeneous RSP systems. To enhance the performance of RSP, the unified interface decomposes query, reassembles partial queries and assigns them to appropriate RSP systems. Experimental results show that the proposed approach performances better in terms of memory consumption and latency. + + + + + + + + + + + + + + + + + George Gkotsis + + + + + George Gkotsis + + + b71e3ac43f0cbebb962e2233f1d66d72701cf027 + George Gkotsis + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + 2016-10-19T18:00:00 + Zhenyu Song, Xiaowang Zhang and Zhiyong Feng + PRONA: A Plugin for Well-Designed Approximate Queries in Jena + + PRONA: A Plugin for Well-Designed Approximate Queries in Jena + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + + + Fondazione Bruno Kessler + + + + + + + Fondazione Bruno Kessler + + + + Fondazione Bruno Kessler + + + + + + + + + + + + + + + + + + + + + + + + + + + + Simen Heggestøyl + + Simen Heggestøyl + + + + + + + + Simen Heggestøyl + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Takeshi Masuda + Takeshi Masuda + Takeshi Masuda + + 794ceb2d4c134124d418b0c3c6b3159f0f3c74b6 + + + + + + + + + + + + + + + + + + + + + + + + + Yousra Chabchoub + + + Yousra Chabchoub + + 9de082395a861352f1b8880591b2a37243fcc040 + + + Yousra Chabchoub + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + CWI + + + + CWI + CWI + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + DBpedia Entity Type Inference Using Categories + + + + Category + DBpedia Entity Type Inference Using Categories + + + + + + Category + + Type Inference + + In this paper, we investigate how to identify entity type based on entity cat-egory information. In particular, we first calculate the statistical distribution of each category over all the types. And then we generate type candidates according to distribution probability. Finally we identify the correct type ac-cording to distribution probability, keywords in category and abstract. To evaluate the effectiveness of the approach, we conduct preliminary experi-ments on a real-world dataset from DBpedia. Experimental results indicate that our approach is effective in identifying entity types. + DBpedia + + + DBpedia Entity Type Inference Using Categories + In this paper, we investigate how to identify entity type based on entity cat-egory information. In particular, we first calculate the statistical distribution of each category over all the types. And then we generate type candidates according to distribution probability. Finally we identify the correct type ac-cording to distribution probability, keywords in category and abstract. To evaluate the effectiveness of the approach, we conduct preliminary experi-ments on a real-world dataset from DBpedia. Experimental results indicate that our approach is effective in identifying entity types. + Type Inference + + + DBpedia + + + + + + a7196b0a3b229d1659a5c85c9b48269cd7fd59ce + + + + + + Stefano Faralli + + Stefano Faralli + + + + + + Stefano Faralli + + + + + + + + + + + + Jiewen Wu + Jiewen Wu + Jiewen Wu + + + 3bed06bddf50be6302110aa1a143b58967a0da01 + + + + + + + + + + + + + + + + + + + + + + Raf Buyle + 30e38b8343a31860010a7094c693582569f4644d + Raf Buyle + + Raf Buyle + + + + + + + + + + + + Qaiser Mehmood + + bd06c55389c4f62fbca9dfa5d3e7b4b110dacd8e + + Qaiser Mehmood + + + + + Qaiser Mehmood + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Andreas Steigmiller + + + + + + 0f010f0a40d013cd22f5b6573d0599271876826c + + Andreas Steigmiller + + + Andreas Steigmiller + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Building and Exploring National-wide Enterprise Knowledge Graphs for Investment Analysis in an Incremental Way + + 2016-10-19T11:40:00 + Tong Ruan, Lijuan Xue, Haofen Wang, Fanghuai Hu, Liang Zhao and Jun Ding + 2016-10-19T11:40:00 + 2016-10-19T11:20:00 + Building and Exploring National-wide Enterprise Knowledge Graphs for Investment Analysis in an Incremental Way + + + 2016-10-19T11:40:00 + 2016-10-19T11:40:00 + 2016-10-19T11:20:00 + + + + + + + + Victor Christen + + + Victor Christen + + 6a2471cdeab13902eb9650167b3bd8247f51b5fc + + + + Victor Christen + + + PRINTEPS + ROS + + + PRINTEPS + + We have developed PRactical INTElligent aPplicationS (PRINTEPS) which is a platform for developing comprehensive intelligence applications. This paper introduces an application of PRINTEPS for customer reception service in robot cafe by using stream reasoning and Robot Operating System (ROS) based on PRINTEPS, and for integrating image sensing with knowledge processing. Based on this platform, we demonstrate that the behaviors of a robot in a robot cafe can be modified by changing the applicable rule sets. + Implementing Customer Reception Service in Robot Cafe using Stream Reasoning and ROS based on PRINTEPS + + + + Stream Reasoning + + We have developed PRactical INTElligent aPplicationS (PRINTEPS) which is a platform for developing comprehensive intelligence applications. This paper introduces an application of PRINTEPS for customer reception service in robot cafe by using stream reasoning and Robot Operating System (ROS) based on PRINTEPS, and for integrating image sensing with knowledge processing. Based on this platform, we demonstrate that the behaviors of a robot in a robot cafe can be modified by changing the applicable rule sets. + SWRL + + + ROS + + + + + + + Stream Reasoning + Implementing Customer Reception Service in Robot Cafe using Stream Reasoning and ROS based on PRINTEPS + + + Implementing Customer Reception Service in Robot Cafe using Stream Reasoning and ROS based on PRINTEPS + + SWRL + + + + + + + + + + + + + + + + + Aalborg University + + Aalborg University + Aalborg University + + + + + + + + + + + + + + + + + + + + + + + + Mikako Saito + + + + + + Mikako Saito + a0636d694231bfca5a3a958eb96b271b84120584 + Mikako Saito + + + + + + Julien Subercaze + + Julien Subercaze + + + + + + + + + + + Julien Subercaze + + + 0b0b215b56f9139ca2bacc806ea8703f88d79add + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Kavitha Srinivas + + + + 3524dd31de79a544b21dd104327c3c7eb9a9f139 + Kavitha Srinivas + + + + + + Kavitha Srinivas + + + + + + + + + + + + + + 2016-10-20T11:30:00 + Linhong Zhu, Majid Ghasemi-Gol, Pedro Szekely, Aram Galstyan and Craig Knoblock + Unsupervised Entity Resolution on Multi-type Graphs + Unsupervised Entity Resolution on Multi-type Graphs + 2016-10-20T11:30:00 + 2016-10-20T11:50:00 + 2016-10-20T11:50:00 + 2016-10-20T11:50:00 + + + 2016-10-20T11:50:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Tuan-Dung Cao + + + Tuan-Dung Cao + b3b18b2d2b5de289ce8ed2386460465e7f3f1a5e + + + + + Tuan-Dung Cao + + + + + + + + + + + + + + + + + + + + + + + + + + + Knowledge Media Institute, The Open University + + + + + + + + Knowledge Media Institute, The Open University + + Knowledge Media Institute, The Open University + + + + c085a1f84c6c5690bd9e12a8b22016d2df360931 + + Angelo Antonio Salatino + Angelo Antonio Salatino + + + + + + + + Angelo Antonio Salatino + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Muhammad Amith + + + + 223fc2556836e04f425c459b7f86a86c2399ed1c + Muhammad Amith + Muhammad Amith + + + + + + + + + + + + + + 279b9d72c9d959d4f93219e45a9d41fc980ad9b9 + Simon Razniewski + + + + + + Simon Razniewski + Simon Razniewski + + + + + + + + + + + + + + + + + + + + + + + + + e86fa597c65874b942cb26a8f63d966084dde342 + + + Veronika Thost + + + + + + Veronika Thost + + + + Veronika Thost + + + + 2016-10-20T13:50:00 + Are Names Meaningful? Quantifying Social Meaning on the Semantic Web + 2016-10-20T13:30:00 + 2016-10-20T13:30:00 + Are Names Meaningful? Quantifying Social Meaning on the Semantic Web + Steven de Rooij, Wouter Beek, Stefan Schlobach and Frank Van Harmelen + 2016-10-20T13:50:00 + 2016-10-20T13:50:00 + + + 2016-10-20T13:50:00 + + + + + + EPFL + + EPFL + + + + + EPFL + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Real-time analytics that requires integration and aggregation of heterogeneous and distributed streaming and static data is a typical task in many industrial scenarios such as diagnostics of turbines in Siemens. OBDA approach has a great potential to facilitate such tasks; however, it has a number of limitations in dealing with analytics that restrict its use in important industrial applications. Based on our experience with Siemens, we argue that in order to overcome those limitations OBDA should be extended and become analytics, source, and cost aware. In this work we propose such an extension. In particular, we propose an ontology, mapping, and query language for OBDA, where aggregate and other analytical functions are first class citizens. Moreover, we develop query optimisation techniques that allow to efficiently process analytical tasks over static and streaming data. We implement our approach in a system and evaluate our system with Siemens turbine data. + + + Towards Analytics Aware Ontology Based Access to Static and Streaming Data + Analytics + + OBDA + + Towards Analytics Aware Ontology Based Access to Static and Streaming Data + Diagnostics + + + + + + Towards Analytics Aware Ontology Based Access to Static and Streaming Data + + + + + + + + + + + OBDA + + Diagnostics + + + + Analytics + Real-time analytics that requires integration and aggregation of heterogeneous and distributed streaming and static data is a typical task in many industrial scenarios such as diagnostics of turbines in Siemens. OBDA approach has a great potential to facilitate such tasks; however, it has a number of limitations in dealing with analytics that restrict its use in important industrial applications. Based on our experience with Siemens, we argue that in order to overcome those limitations OBDA should be extended and become analytics, source, and cost aware. In this work we propose such an extension. In particular, we propose an ontology, mapping, and query language for OBDA, where aggregate and other analytical functions are first class citizens. Moreover, we develop query optimisation techniques that allow to efficiently process analytical tasks over static and streaming data. We implement our approach in a system and evaluate our system with Siemens turbine data. + Static Data + + + Description Logic + + + + + + + Description Logic + + + Static Data + Streaming Data + Streaming Data + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Institute for High Performance Computing and Networking (ICAR-CNR) + Institute for High Performance Computing and Networking (ICAR-CNR) + Institute for High Performance Computing and Networking (ICAR-CNR) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Ross Stirling + + + Ross Stirling + + + + 8569f7e40415c10354c13ee042d67273826913f8 + + + Ross Stirling + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + OWL coverage + A variety of tools for visualizing, editing, and documenting OWL ontologies have been developed in the last couple of years. The OWL coverage and conformance of these tools usually needs to be tested during development or for evaluation and comparison purposes. However, in particular for the testing of special OWL concepts and concept combinations, it can be tedious to find suitable ontologies and test cases. We have developed OntoBench, a generator for OWL 2 benchmark ontologies that can be used to test and compare ontology visualizers and related tools. In contrast to existing OWL benchmarks, OntoBench does not focus on scalability and performance but OWL coverage and concept combinations. Consistent benchmark ontologies are dynamically generated based on OWL 2 language constructs selected in a graphical user interface. OntoBench is available on GitHub and as a public service, making it easy to use the tool and generate custom ontologies or ontology fragments. + benchmark + ontology + + + A variety of tools for visualizing, editing, and documenting OWL ontologies have been developed in the last couple of years. The OWL coverage and conformance of these tools usually needs to be tested during development or for evaluation and comparison purposes. However, in particular for the testing of special OWL concepts and concept combinations, it can be tedious to find suitable ontologies and test cases. We have developed OntoBench, a generator for OWL 2 benchmark ontologies that can be used to test and compare ontology visualizers and related tools. In contrast to existing OWL benchmarks, OntoBench does not focus on scalability and performance but OWL coverage and concept combinations. Consistent benchmark ontologies are dynamically generated based on OWL 2 language constructs selected in a graphical user interface. OntoBench is available on GitHub and as a public service, making it easy to use the tool and generate custom ontologies or ontology fragments. + OntoBench: Generating Custom OWL 2 Benchmark Ontologies + + + + + OntoBench: Generating Custom OWL 2 Benchmark Ontologies + OWL coverage + + + documentation + OWL 2 + + OWL 2 + visualization + benchmark + ontology + + documentation + + + + + OntoBench: Generating Custom OWL 2 Benchmark Ontologies + visualization + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Silvio Peroni + + + Silvio Peroni + e813f1989c5b90bd05bc71b3a7bb62e3df558122 + + + + + Silvio Peroni + + + + + + + 2016-10-21T13:50:00 + Integrating medical scientific knowledge with the semantically Quantified Self + 2016-10-21T13:50:00 + + Integrating medical scientific knowledge with the semantically Quantified Self + 2016-10-21T13:50:00 + 2016-10-21T13:50:00 + 2016-10-21T13:30:00 + + + 2016-10-21T13:30:00 + Allan Third, George Gkotsis, Eleni Kaldoudi, George Drosatos, Nick Portokallidis, Stefanos Roumeliotis, Kalliopi Pafilis and John Domingue + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Data Integration + + VoldemortKG: Mapping Schema.org Entities to Linked Open Data + schema.org + + + + VoldemortKG: Mapping Schema.org Entities to Linked Open Data + VoldemortKG: Mapping Schema.org Entities to Linked Open Data + Dataset + Dataset + + + + + + Data Integration + Increasingly, Web pages mix entities coming from different sources and represented in several different ways. It can thus happen that the same entity is both described by using schema.org annotations and by creating a text anchor pointing to its Wikipedia page. Often, those representations provide complementary information which is not exploited since those entities are disjoint. + +In this project, we explore the extent to which entities represented in different ways repeat on the Web, how they are related, and how they complement (or link) to each other. Our initial experiments show that we can unveil a previously (unexploited) knowledge graph by applying simple instance matching techniques on a large collection of schema.org annotations and DBpedia. The resulting knowledge graph aggregates entities (often tail entities) scattered across several Web pages, and complements existing DBpedia entities with new facts and properties. + +In order to facilitate further investigations in how to mine such information, we are releasing i) an excerpt of all CommonCrawl web pages containing both Wikipedia and schema.org annotations, ii) the toolset to extract this information and perform knowledge graph construction and mapping onto DBpedia, as well as iii) the resulting knowledge graph (VoldemortKG) obtained via label matching techniques. + + Knowledge Graphs + + + Instance Matching + + + Knowledge Graphs + + + Instance Matching + schema.org + Increasingly, Web pages mix entities coming from different sources and represented in several different ways. It can thus happen that the same entity is both described by using schema.org annotations and by creating a text anchor pointing to its Wikipedia page. Often, those representations provide complementary information which is not exploited since those entities are disjoint. + +In this project, we explore the extent to which entities represented in different ways repeat on the Web, how they are related, and how they complement (or link) to each other. Our initial experiments show that we can unveil a previously (unexploited) knowledge graph by applying simple instance matching techniques on a large collection of schema.org annotations and DBpedia. The resulting knowledge graph aggregates entities (often tail entities) scattered across several Web pages, and complements existing DBpedia entities with new facts and properties. + +In order to facilitate further investigations in how to mine such information, we are releasing i) an excerpt of all CommonCrawl web pages containing both Wikipedia and schema.org annotations, ii) the toolset to extract this information and perform knowledge graph construction and mapping onto DBpedia, as well as iii) the resulting knowledge graph (VoldemortKG) obtained via label matching techniques. + + + + + + + + + + + + + + + + + 0651939447753f025b79b237ca503607e5a97f93 + + + + + + Nobuhiko Tanaka + + + + + Nobuhiko Tanaka + + Nobuhiko Tanaka + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + integration + + + + + + + Linked Sensor Data Generation using Queryable RML Mappings + Linked Sensor Data Generation using Queryable RML Mappings + application + demo + RML + + Linked Sensor Data Generation using Queryable RML Mappings + As the amount of generated sensor data is increasing, semantic interoperability becomes an important aspect in order to support efficient data distribution and communication. Therefore, the integration and fusion of (sensor) data is important, as this data is coming from different data sources and might be in different formats. Furthermore, reusable and extensible methods for this integration and fusion are required in order to be able to scale with the growing number of applications that generate semantic sensor data. Current research efforts allow to map sensor data to Linked Data in order to provide semantic interoperability. However, they lack support for multiple data sources, hampering the integration and fusion. Furthermore, the used methods are not available for reuse or are not extensible, which hampers the development of applications. In this paper, we describe how the RDF Mapping Language (RML) and a Triple Pattern Fragments (TPF) server are used to address these shortcomings. %define reusable and extensible mappings to generate Linked Data based on heterogeneous (sensor) data. The demonstration consists of a micro controller that generates sensor data. The data is captured and mapped to RDF triples using module-specific RML mappings, which are queried from a TPF server. + + As the amount of generated sensor data is increasing, semantic interoperability becomes an important aspect in order to support efficient data distribution and communication. Therefore, the integration and fusion of (sensor) data is important, as this data is coming from different data sources and might be in different formats. Furthermore, reusable and extensible methods for this integration and fusion are required in order to be able to scale with the growing number of applications that generate semantic sensor data. Current research efforts allow to map sensor data to Linked Data in order to provide semantic interoperability. However, they lack support for multiple data sources, hampering the integration and fusion. Furthermore, the used methods are not available for reuse or are not extensible, which hampers the development of applications. In this paper, we describe how the RDF Mapping Language (RML) and a Triple Pattern Fragments (TPF) server are used to address these shortcomings. %define reusable and extensible mappings to generate Linked Data based on heterogeneous (sensor) data. The demonstration consists of a micro controller that generates sensor data. The data is captured and mapped to RDF triples using module-specific RML mappings, which are queried from a TPF server. + Linked Sensor Data + Linked Sensor Data + + + + + + integration + + TPF + demo + + + RML + TPF + application + + + + + + Michał Blinkiewicz + Michał Blinkiewicz + e514fe646a5d9a1ffd3a7d5aa995b9c5df1f1f4e + Michał Blinkiewicz + + + + + + + + + + + + + + + + + + + + + + INRIA + + INRIA + + + + + + + + INRIA + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-18T11:00:00 + 2016-10-18T12:00:00 + 2016-10-18T12:00:00 + 2016-10-18T12:00:00 + 2016-10-18T11:00:00 + Session 1 + 2016-10-18T12:00:00 + Session 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Stefan Negru + + + + + + + Stefan Negru + + 79958c6b5a6f44ea665c85179a17542cefb2cb46 + Stefan Negru + + + + + + + + + + + + Jorge Gracia + + 49a8499c1bb33dacd1076662488f85055c22dad2 + + + Jorge Gracia + + Jorge Gracia + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + QuerioDALI: Question Answering over Dynamic and Linked Knowledge Graphs + Knowledge Graphs + + QuerioDALI: Question Answering over Dynamic and Linked Knowledge Graphs + Question Answering + + + + + + We present a domain-agnostic system for Question Answering over multiple semi-structured and possibly linked datasets without the need of a training corpus. The system is motivated by an industry use-case where Enterprise Data needs to be combined with a large body of Open Data to fulfill information needs not satisfied by prescribed application data models. Our proposed Question Answering pipeline combines existing components with novel methods to perform, in turn, linguistic analysis of a query, named entity extraction, entity / graph search, fusion and ranking of possible answers. We evaluate QuerioDALI with two open-domain benchmarks and a biomedical one over Linked Open Data sources, and show that our system produces comparable results to systems that require training data and are domain-dependent. In addition, we analyze the current challenges and shortcomings. + + + + + Linked Data + QuerioDALI: Question Answering over Dynamic and Linked Knowledge Graphs + Linked Data + + We present a domain-agnostic system for Question Answering over multiple semi-structured and possibly linked datasets without the need of a training corpus. The system is motivated by an industry use-case where Enterprise Data needs to be combined with a large body of Open Data to fulfill information needs not satisfied by prescribed application data models. Our proposed Question Answering pipeline combines existing components with novel methods to perform, in turn, linguistic analysis of a query, named entity extraction, entity / graph search, fusion and ranking of possible answers. We evaluate QuerioDALI with two open-domain benchmarks and a biomedical one over Linked Open Data sources, and show that our system produces comparable results to systems that require training data and are domain-dependent. In addition, we analyze the current challenges and shortcomings. + Knowledge Graphs + + Question Answering + + + + + + + + + + Anisa Rula + + + + + + + + Anisa Rula + Anisa Rula + + + + + + + + + + + + + + + + + + + + + 2016-10-18T14:00:00 + 2016-10-18T15:15:00 + 2016-10-18T15:15:00 + 2016-10-18T15:15:00 + 2016-10-18T14:00:00 + Session 2 + Session 2 + 2016-10-18T15:15:00 + + + Giuseppe Pirrò + 8be175055825920c261d839f3b5a3c3b8a6b14a1 + + + Giuseppe Pirrò + + + + + + Giuseppe Pirrò + + + + + + + + + + + + 958555a51b4260d27615aabe33fb2e8d8c44e6d6 + Anika Groß + + + + + + Anika Groß + + Anika Groß + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Amit Joshi + + + Amit Joshi + + + + + Amit Joshi + 56f92704925eaf91a067a1961518cc4bf98cb3e3 + + + + + + + + + + Lowering knowledge : Making constrained devices semantically interoperable + + + + IoT data management + Data enrichment + Data enrichment + Semantic schema mapping + + Semantic interoperability is an issue in heterogeneous IoT systems. The limited processing power and memory storage of constrained IoT nodes prevents them from handling enriched data. This paper proposes a method to lower complex knowledge representations into simpler structured data, based on the reuse of lifting mappings from data schemas to semantic models. + + + Lowering knowledge : Making constrained devices semantically interoperable + Lowering knowledge : Making constrained devices semantically interoperable + + Semantic interoperability is an issue in heterogeneous IoT systems. The limited processing power and memory storage of constrained IoT nodes prevents them from handling enriched data. This paper proposes a method to lower complex knowledge representations into simpler structured data, based on the reuse of lifting mappings from data schemas to semantic models. + Knowledge lowering + + IoT data management + + Knowledge lowering + + + Semantic schema mapping + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-18T16:00:00 + 2016-10-18T17:00:00 + 2016-10-18T17:00:00 + 2016-10-18T17:00:00 + 2016-10-18T17:00:00 + Session 3 + Session 3 + 2016-10-18T16:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Department of Computer Science & Engineering, East China University of Science and Technology + + Department of Computer Science & Engineering, East China University of Science and Technology + + Department of Computer Science & Engineering, East China University of Science and Technology + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Personalized robot interactions to intercept behavioral disturbances of people with dementia + + + + + Dementia + + + + People with Dementia (PwD) exhibit Behavioral Disturbances (BD) that can be alleviated by personalized interactions, revisiting memories and promoting comfort and quality of life. However, caregivers are unable to spend a lot of time on these interactions. This work-in-progress poster details the design and deployment of a semantic Internet of Robotic Things (IoRT) platform that enables personalized interactions of a robot with a PwD to reduce and intercept BDs. + + Dementia + Personalized robot interactions to intercept behavioral disturbances of people with dementia + + + + + eHealth + + + Robotics + + + Internet of Robotic Things (IoRT) + + + + People with Dementia (PwD) exhibit Behavioral Disturbances (BD) that can be alleviated by personalized interactions, revisiting memories and promoting comfort and quality of life. However, caregivers are unable to spend a lot of time on these interactions. This work-in-progress poster details the design and deployment of a semantic Internet of Robotic Things (IoRT) platform that enables personalized interactions of a robot with a PwD to reduce and intercept BDs. + + + + + Behavioral Disturbance + + + + eHealth + Robotics + + Personalized robot interactions to intercept behavioral disturbances of people with dementia + Internet of Robotic Things (IoRT) + + + + Behavioral Disturbance + + + + + + + + + + + + + + + + + + + + + + + + + + + Cristina Feier + + + + + + Cristina Feier + + Cristina Feier + + + + 362a9cf0719836e44b681ddca183db628432d6e5 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + One focus of Semantic Technologies are formalisms that allow to express complex properties of and relationships between classes of data. The declarative nature of these formalisms is close to natural language and human conceptualisation and thus Semantic Technologies enjoy increasing popularity in scenarios where traditional solutions lead to very convoluted procedures which are difficult to maintain and whose correctness is difficult to judge. +A fruitful application of Semantic Technologies in the field of health care data analysis has emerged from the collaboration between Oxford and Kaiser Permanente a US health care provider (HMO). US HMOs have to annually deliver measurement results on their quality of care to US authorities. One of these sets of measurements is defined in a specification called HEDIS which is infamous amongst data analysts for its complexity. Traditional solutions with either SAS-programs or SQL-queries lead to involved solutions whose maintenance and validation is difficult and binds considerable amount of resources. +In this paper we present the project in which we have applied Semantic Technologies to compute the most difficult part of the HEDIS measures. We show that we arrive at a clean, structured and legible encoding of HEDIS in the rule language of the RDF-triple store RDFox. We use RDFox's reasoning capabilities and SPARQL queries to compute and extract the results. The results of a whole Kaiser Permanente regional branch could be computed in competitive time by RDFox on readily available commodity hardware. Further development and deployment of the project results are envisaged in Kaiser Permanente. + SWRL + + + + RDFox + + Forward Chaining + Semantic Technologies for Data Analysis in Health Care + + + Data Analysis + Materialisation + Materialisation + Datalog + RDF + RDFox + + Forward Chaining + Semantic Technologies for Data Analysis in Health Care + + + + SWRL + Triple store + Semantic Technologies + + RDF + Triple store + + + + Data Analysis + Datalog + + + + + + One focus of Semantic Technologies are formalisms that allow to express complex properties of and relationships between classes of data. The declarative nature of these formalisms is close to natural language and human conceptualisation and thus Semantic Technologies enjoy increasing popularity in scenarios where traditional solutions lead to very convoluted procedures which are difficult to maintain and whose correctness is difficult to judge. +A fruitful application of Semantic Technologies in the field of health care data analysis has emerged from the collaboration between Oxford and Kaiser Permanente a US health care provider (HMO). US HMOs have to annually deliver measurement results on their quality of care to US authorities. One of these sets of measurements is defined in a specification called HEDIS which is infamous amongst data analysts for its complexity. Traditional solutions with either SAS-programs or SQL-queries lead to involved solutions whose maintenance and validation is difficult and binds considerable amount of resources. +In this paper we present the project in which we have applied Semantic Technologies to compute the most difficult part of the HEDIS measures. We show that we arrive at a clean, structured and legible encoding of HEDIS in the rule language of the RDF-triple store RDFox. We use RDFox's reasoning capabilities and SPARQL queries to compute and extract the results. The results of a whole Kaiser Permanente regional branch could be computed in competitive time by RDFox on readily available commodity hardware. Further development and deployment of the project results are envisaged in Kaiser Permanente. + Semantic Technologies + Semantic Technologies for Data Analysis in Health Care + + + + + + + + + + + + + Daniel Garijo + + + + + + Daniel Garijo + + Daniel Garijo + + + + In recent years, there has been an increasing efforts to develop techniques for related entity recommendation, where the task is to retrieve a ranked list of related entities given a keyword query. Another trend in the area of information retrieval (IR) is to take temporal aspects of a given query into account when assessing the relevance of documents. However, while this has become an established functionality in document search engines, the significance of time, especially when explicitly given, has not been recognized for entity recommendation, yet. We address this gap by introducing the task of time-aware entity recommendation. We propose the first probabilistic model that takes time-awareness into consideration for entity recommendation by leveraging heterogeneous knowledge of entities extracted from different data sources publicly available on the Web. We extensively evaluate the proposed approach and our experimental results show considerable improvements compare to time-agnostic entity recommendation approaches. + + + A Probabilistic Model for Time-Aware Entity Recommendation + + + + Probabilistic Model + Probabilistic Model + + Time-awareness + + + + A Probabilistic Model for Time-Aware Entity Recommendation + + + Time-awareness + Entity Recommendation + + + Entity Recommendation + + A Probabilistic Model for Time-Aware Entity Recommendation + In recent years, there has been an increasing efforts to develop techniques for related entity recommendation, where the task is to retrieve a ranked list of related entities given a keyword query. Another trend in the area of information retrieval (IR) is to take temporal aspects of a given query into account when assessing the relevance of documents. However, while this has become an established functionality in document search engines, the significance of time, especially when explicitly given, has not been recognized for entity recommendation, yet. We address this gap by introducing the task of time-aware entity recommendation. We propose the first probabilistic model that takes time-awareness into consideration for entity recommendation by leveraging heterogeneous knowledge of entities extracted from different data sources publicly available on the Web. We extensively evaluate the proposed approach and our experimental results show considerable improvements compare to time-agnostic entity recommendation approaches. + + + knowledge resource + knowledge base + + + + There are many studies on question answering system which can answer to natural language questions. Diverse techniques are required for building this system, but it cannot be implemented without well-structured knowledge data. For this reason, we construct a large-scale knowledge base in Korean, with the goal of creating a uniquely Korean question answering system. + + + question answering system + + XB: A Large-scale Korean Knowledge Base for Question Answering Systems + + + XB: A Large-scale Korean Knowledge Base for Question Answering Systems + ontology + question answering system + ontology + knowledge resource + + + + + + knowledge base + + + + + There are many studies on question answering system which can answer to natural language questions. Diverse techniques are required for building this system, but it cannot be implemented without well-structured knowledge data. For this reason, we construct a large-scale knowledge base in Korean, with the goal of creating a uniquely Korean question answering system. + XB: A Large-scale Korean Knowledge Base for Question Answering Systems + + + Lancaster University + + + + Lancaster University + + + Lancaster University + + + + + + + + + + + + + + + + + + + + + + + + + + This paper describes an extension of the TableMiner+ sys- tem, the only open source Semantic Table Interpretation system that annotates Web tables using Linked Data in an effective and efficient ap- proach. It adds a graphical user interface to TableMiner+, to facilitate the visualization and correction of automatically generated annotations. This makes TableMiner+ an ideal tool for the semi-automatic creation of high-quality semantic annotations on tabular data, which facilitates the publication of Linked Data on the Web. + + Visualizing Semantic Table Annotations with TableMiner+ + + + + Visualizing Semantic Table Annotations with TableMiner+ + + Semantic Table Interpretation + Named Entity Disambiguation + + This paper describes an extension of the TableMiner+ sys- tem, the only open source Semantic Table Interpretation system that annotates Web tables using Linked Data in an effective and efficient ap- proach. It adds a graphical user interface to TableMiner+, to facilitate the visualization and correction of automatically generated annotations. This makes TableMiner+ an ideal tool for the semi-automatic creation of high-quality semantic annotations on tabular data, which facilitates the publication of Linked Data on the Web. + + Linked Data + Visualizing Semantic Table Annotations with TableMiner+ + + Semantic Table Interpretation + + Web table + Named Entity Disambiguation + + Linked Data + + Web table + + table annotation + table annotation + + + + + University of Birmingham + University of Birmingham + + + University of Birmingham + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-20T15:30:00 + 2016-10-20T16:50:00 + 2016-10-20T16:50:00 + 2016-10-20T16:50:00 + Embeddings & Neural Approaches + 2016-10-20T16:50:00 + 2016-10-20T15:30:00 + Embeddings & Neural Approaches + + + + + + + + + + + + + + + José Luis Ambite + + José Luis Ambite + + + + + + 1e430f2ad8c3dd42da0ac00ed2c6a7c3e6fcaeb5 + José Luis Ambite + + + + + + + + + + + + Joachim Van Herwegen + + + + Joachim Van Herwegen + + + + 485c05c89867f48a36372dbec0ff72e2768ce2df + Joachim Van Herwegen + + + + + + + + + + + + + + + + + + + + + + + + + Victorio Albani Carvalho + + Victorio Albani Carvalho + + + fae2979c50e663691b141cad1d70fdf9a83fe68e + Victorio Albani Carvalho + + + + + + + + + Yao Meng + + + + f174c419d83571a98647083026223b0bd3a0b2e3 + + Yao Meng + + + + Yao Meng + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Dhavalkumar Thakker + + + b046ea56f6da0922f220c34b48f164c77866ede0 + Dhavalkumar Thakker + + + Dhavalkumar Thakker + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Expressive Multi-Level Modeling for the Semantic Web + + + Expressive Multi-Level Modeling for the Semantic Web + + + + + + + + multi-level modeling + semantic web + In several subject domains, classes themselves may be subject to categorization, resulting in classes of classes (or “metaclasses”). When representing these do-mains, one needs to capture not only entities of different classification levels, but also their (intricate) relations. We observe that this is challenging in current Se-mantic Web languages as there is little support to guide the modeler in producing correct multi-level ontologies, especially because of the nuances in the constraints that apply to entities of different classification levels and their relations. In order to address these representation challenges, we propose a vocabulary that can be used as a basis for multi-level ontologies in OWL along with a number of integri-ty constraints to prevent the construction of inconsistent models. In this process we employ an axiomatic theory called MLT (a Multi-Level Modeling Theory). + metamodeling + + OWL + multi-level modeling + + metamodeling + OWL + In several subject domains, classes themselves may be subject to categorization, resulting in classes of classes (or “metaclasses”). When representing these do-mains, one needs to capture not only entities of different classification levels, but also their (intricate) relations. We observe that this is challenging in current Se-mantic Web languages as there is little support to guide the modeler in producing correct multi-level ontologies, especially because of the nuances in the constraints that apply to entities of different classification levels and their relations. In order to address these representation challenges, we propose a vocabulary that can be used as a basis for multi-level ontologies in OWL along with a number of integri-ty constraints to prevent the construction of inconsistent models. In this process we employ an axiomatic theory called MLT (a Multi-Level Modeling Theory). + Expressive Multi-Level Modeling for the Semantic Web + + + + + + semantic web + + + + + + + + + + + + + + + + + + + + + + + Linked Data Fragments + Querying Dynamic Datasources with Continuously Mapped Sensor Data + SPARQL + RML + + + SPARQL + + + dynamic data + + + + The world contains a large amount of sensors that produce new data at a high frequency. It is currently very hard to find public services that expose these measurements as dynamic Linked Data, We investigate how sensor data can be published continuously on the Web at a low cost. This paper describes how the publication of various sensor data sources can be done by continuously mapping raw sensor data to RDF and inserting it into a live, low-cost server. This makes it possible for clients to continuously evaluate dynamic queries using public sensor data. For our demonstration, we will illustrate how this pipeline works for the publication of temperature and humidity data originating from a microcontroller, and how it can be queried. + dynamic data + The world contains a large amount of sensors that produce new data at a high frequency. It is currently very hard to find public services that expose these measurements as dynamic Linked Data, We investigate how sensor data can be published continuously on the Web at a low cost. This paper describes how the publication of various sensor data sources can be done by continuously mapping raw sensor data to RDF and inserting it into a live, low-cost server. This makes it possible for clients to continuously evaluate dynamic queries using public sensor data. For our demonstration, we will illustrate how this pipeline works for the publication of temperature and humidity data originating from a microcontroller, and how it can be queried. + + Linked Data + + + Querying Dynamic Datasources with Continuously Mapped Sensor Data + Linked Data Fragments + + + Linked Data + + + + RML + + Querying Dynamic Datasources with Continuously Mapped Sensor Data + + + + + + + + + 828ae3c3ff393840381e683deef160054f888d2a + + + + Thi-Nhu Nguyen + + + + Thi-Nhu Nguyen + Thi-Nhu Nguyen + + + + + + 2016-10-21T15:30:00 + 2016-10-21T16:50:00 + 2016-10-21T16:50:00 + 2016-10-21T16:50:00 + 2016-10-21T16:50:00 + Reasoning + 2016-10-21T15:30:00 + Reasoning + + + + + + + + ce91cb3f688cea4a277c9dfc9f0f0a7f604bfec3 + Lei Zhang + + + + + Lei Zhang + + + Lei Zhang + + + + + + + + + + + + + + + + + + + + + Data Integration for the Media Value Chain + + tv and film production + Data Integration for the Media Value Chain + semantic metadata + + + + + + + + Data Integration for the Media Value Chain + tv and film production + linked data integration + semantic metadata + + + + + +With the switch from analog to digital technology the entire process of production, distribution, and archival of a film and tv program large amounts of data are created. Besides recorded and processed audiovisual information, in each single step of the production process and furthermore throughout the entire media value chain new metadata is created, administrated, and put into relation with already existing metadata mandatory for the management of these processes. Due to competing standards as well as to proprietary and incompatible interfaces of the applied software tools, a significant amount of this metadata is lost again and not available for subsequent steps in the process chain. As a consequence most of this valuable information has to be costly recreated in each single step of media production, distribution, and archival. Currently, there is no generally accepted nor commonly used metadata exchange format that is applied throughout the media value chain. But, also the market for media production companies has changed dramatically towards the internet as being the preferred distribution channel for all media content. Today’s available limited budget for media production companies puts additional pressure to work in a cost and time efficient way and not to waste resources due to the necessity of costly reengineering of lost metadata. The dwerft project aims to apply Linked Data principles for all metadata exchange through all steps of the media value chain. Starting with the very first idea for a script, all metadata are mapped to either existing or newly developed ontologies to be reused in subsequent steps of the media value chain. Thus, metadata collected during the media production becomes a valuable asset not only for each step from pre- to postproduction, but also in distribution and archival. +This paper presents results of the dwerft project about the successful integration of a set of film production tools based on the Linked Production Data Cloud, a technology platform for the film and tv industry to enable software interoperability used in production, distribution, and archival of audiovisual content. + linked data integration + + + +With the switch from analog to digital technology the entire process of production, distribution, and archival of a film and tv program large amounts of data are created. Besides recorded and processed audiovisual information, in each single step of the production process and furthermore throughout the entire media value chain new metadata is created, administrated, and put into relation with already existing metadata mandatory for the management of these processes. Due to competing standards as well as to proprietary and incompatible interfaces of the applied software tools, a significant amount of this metadata is lost again and not available for subsequent steps in the process chain. As a consequence most of this valuable information has to be costly recreated in each single step of media production, distribution, and archival. Currently, there is no generally accepted nor commonly used metadata exchange format that is applied throughout the media value chain. But, also the market for media production companies has changed dramatically towards the internet as being the preferred distribution channel for all media content. Today’s available limited budget for media production companies puts additional pressure to work in a cost and time efficient way and not to waste resources due to the necessity of costly reengineering of lost metadata. The dwerft project aims to apply Linked Data principles for all metadata exchange through all steps of the media value chain. Starting with the very first idea for a script, all metadata are mapped to either existing or newly developed ontologies to be reused in subsequent steps of the media value chain. Thus, metadata collected during the media production becomes a valuable asset not only for each step from pre- to postproduction, but also in distribution and archival. +This paper presents results of the dwerft project about the successful integration of a set of film production tools based on the Linked Production Data Cloud, a technology platform for the film and tv industry to enable software interoperability used in production, distribution, and archival of audiovisual content. + + + + + ontologies for manufacturing + + ontology editor + + ontologies for manufacturing + + + + + + + + + + + + + + + + ontology editor + + + + + Capturing Industrial Information Models with Ontologies and Constraints + Capturing Industrial Information Models with Ontologies and Constraints + This paper describes the outcomes of an ongoing collaboration between Siemens and the University of Oxford, with the goal of facilitating the design of ontologies and their deployment in applications. Ontologies are mainly used in Siemens to capture the conceptual information models underpinning a wide range of applications. We start by describing the key role that such models play in two use cases in the manufacturing and energy production sectors. Then, we discuss the formalisation of information models using ontologies, and the relevant reasoning services. Finally, we present SOMM---a tool that supports engineers with little background on semantic technologies in the creation of ontology-based models and in populating them with data. SOMM implements a fragment of OWL 2 RL extended with a form of integrity constraints for data validation, and it comes with support for schema and data reasoning, as well as for model integration. Our evaluation demonstrates the adequacy of SOMM's functionality and performance for Siemens applications. + This paper describes the outcomes of an ongoing collaboration between Siemens and the University of Oxford, with the goal of facilitating the design of ontologies and their deployment in applications. Ontologies are mainly used in Siemens to capture the conceptual information models underpinning a wide range of applications. We start by describing the key role that such models play in two use cases in the manufacturing and energy production sectors. Then, we discuss the formalisation of information models using ontologies, and the relevant reasoning services. Finally, we present SOMM---a tool that supports engineers with little background on semantic technologies in the creation of ontology-based models and in populating them with data. SOMM implements a fragment of OWL 2 RL extended with a form of integrity constraints for data validation, and it comes with support for schema and data reasoning, as well as for model integration. Our evaluation demonstrates the adequacy of SOMM's functionality and performance for Siemens applications. + + + ontologies + Capturing Industrial Information Models with Ontologies and Constraints + + + + ontologies + + + + + + + + + + + + + + + + + + + + + + + RDF + + + + + + Distributed RDF Query Answering with Dynamic Data Exchange + distributed query answering + Evaluating joins over RDF data stored in a shared-nothing server cluster is key +to processing truly large RDF datasets. To the best of our knowledge, the +existing approaches use a variant of the data exchange operator that is +inserted into the query plan statically (i.e., at query compile time) to +shuffle data between servers. We argue that this often misses opportunities for +local computation, and we present a novel solution to distributed query +answering that consists of two main components. First, we present a query +answering algorithm based on dynamic data exchange, which exploits data +locality better than the static approaches. Second, we present a partitioning +algorithm for RDF data based on graph partitioning whose aim is to increase +data locality. We have implemented our approach in the RDFox system, and our +performance evaluation suggests that our techniques outperform the state of the +art by up to an order of magnitude. + data partitioning + SPARQL + + data partitioning + + + Distributed RDF Query Answering with Dynamic Data Exchange + SPARQL + + + + + RDF + + + Distributed RDF Query Answering with Dynamic Data Exchange + + + distributed query answering + + + + Evaluating joins over RDF data stored in a shared-nothing server cluster is key +to processing truly large RDF datasets. To the best of our knowledge, the +existing approaches use a variant of the data exchange operator that is +inserted into the query plan statically (i.e., at query compile time) to +shuffle data between servers. We argue that this often misses opportunities for +local computation, and we present a novel solution to distributed query +answering that consists of two main components. First, we present a query +answering algorithm based on dynamic data exchange, which exploits data +locality better than the static approaches. Second, we present a partitioning +algorithm for RDF data based on graph partitioning whose aim is to increase +data locality. We have implemented our approach in the RDFox system, and our +performance evaluation suggests that our techniques outperform the state of the +art by up to an order of magnitude. + + + Diego Esteves + bf544080194f9f13e5c3116db1cb169cda21b76f + + + + + + + Diego Esteves + + + Diego Esteves + + + + + + + + + + + Gregoire Burel + + + Gregoire Burel + + + + 0b33c9bd854b746d01796c263e976d664bcdbda9 + Gregoire Burel + + + + + + + + + + + + + + + + + + + + + + + + + + + + Faceted search over RDF-based knowledge graphs + + + RDF + Algorithms + + + Ontology + + + + RDF + Faceted search + + + SPARQL + + + Knowledge graphs such as Yago and Freebase have become a powerful asset for enhancing search, and are being intensively used in both academia and industry. Many existing knowledge graphs are either available as Linked Open Data, or they can be exported as RDF datasets enhanced with background knowledge in the form of an OWL 2 ontology. Faceted search is the de facto approach for exploratory search in many online applications, and has been recently proposed as a suitable paradigm for querying RDF repositories. In this paper, we provide rigorous theoretical underpinnings for faceted search in the context of RDF-based knowledge graphs enhanced with OWL 2 ontologies. We identify well-defined fragments of SPARQL that can be naturally captured using faceted search as a query paradigm, and establish the computational complexity of answering such queries. We also study the problem of updating faceted interfaces, which is critical for guiding users in the formulation of meaningful queries during exploratory search. We have implemented our approach in a fully-fledged faceted search system, SemFacet, which we have evaluated over the Yago knowledge graph. + Faceted search over RDF-based knowledge graphs + Ontology + OWL 2 + Faceted search over RDF-based knowledge graphs + + + + SPARQL + Faceted search + Knowledge graphs such as Yago and Freebase have become a powerful asset for enhancing search, and are being intensively used in both academia and industry. Many existing knowledge graphs are either available as Linked Open Data, or they can be exported as RDF datasets enhanced with background knowledge in the form of an OWL 2 ontology. Faceted search is the de facto approach for exploratory search in many online applications, and has been recently proposed as a suitable paradigm for querying RDF repositories. In this paper, we provide rigorous theoretical underpinnings for faceted search in the context of RDF-based knowledge graphs enhanced with OWL 2 ontologies. We identify well-defined fragments of SPARQL that can be naturally captured using faceted search as a query paradigm, and establish the computational complexity of answering such queries. We also study the problem of updating faceted interfaces, which is critical for guiding users in the formulation of meaningful queries during exploratory search. We have implemented our approach in a fully-fledged faceted search system, SemFacet, which we have evaluated over the Yago knowledge graph. + + OWL 2 + + + Algorithms + + + + + + + + + Jan Wielemaker + b08951b8e7e528c076d4a62b8ba542ae4ac69789 + + + + Jan Wielemaker + + Jan Wielemaker + + + + + + + + + + + + + + + + + + + + 04ff3b804243a76261a8fc27f6d0033321e83ecc + + + + + + Bahaa Eldesouky + + + Bahaa Eldesouky + Bahaa Eldesouky + + + + + + Urban LOD + Building Urban LOD for Solving Illegally Parked Bicycles in Tokyo + Urban Problem + + + Illegally Parked Bicycles + + Open Urban Data + + + + Urban Problem + + + + Open Urban Data + Building Urban LOD for Solving Illegally Parked Bicycles in Tokyo + + Building Urban LOD for Solving Illegally Parked Bicycles in Tokyo + + Illegally Parked Bicycles + Urban LOD + + The illegal parking of bicycles is a social problem in Tokyo and other urban areas. The purpose of this study was to sustainably build Linked Open Data (LOD) for the illegally parked bicycles and to support the problem solving by raising social awareness, in cooperation with the Bureau of General Affairs of Tokyo. We first extracted information on the problem factors and designed LOD schema for illegally parked bicycles. Then we collected pieces of data from Social Networking Service (SNS) and websites of municipalities to build the illegally parked bicycle LOD (IPBLOD) with more than 200,000 triples. We then estimated the missing data in the LOD based on the causal relations from the problem factors. As a result, the number of illegally parked bicycles can be inferred with 70.9% accuracy. Finally, we published the complemented LOD and a Web application to visualize the distribution of illegally parked bicycles in the city. We hope this raises social attention on this issue. + + + The illegal parking of bicycles is a social problem in Tokyo and other urban areas. The purpose of this study was to sustainably build Linked Open Data (LOD) for the illegally parked bicycles and to support the problem solving by raising social awareness, in cooperation with the Bureau of General Affairs of Tokyo. We first extracted information on the problem factors and designed LOD schema for illegally parked bicycles. Then we collected pieces of data from Social Networking Service (SNS) and websites of municipalities to build the illegally parked bicycle LOD (IPBLOD) with more than 200,000 triples. We then estimated the missing data in the LOD based on the causal relations from the problem factors. As a result, the number of illegally parked bicycles can be inferred with 70.9% accuracy. Finally, we published the complemented LOD and a Web application to visualize the distribution of illegally parked bicycles in the city. We hope this raises social attention on this issue. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Yohei Yamane + + + + + + Yohei Yamane + a252745c5866421122de6b67e10caffc0a31658f + + + + Yohei Yamane + + + + + + + Fact validation + + DeFacto - Temporal and multilingual Deep Fact Validation + DeFacto - Temporal and multilingual Deep Fact Validation + + Web of Data + + + + + + + + + + Provenance + NLP + + + NLP + + + DeFacto - Temporal and multilingual Deep Fact Validation + + + One of the main tasks when creating and maintaining knowledge bases is to validate facts and provide sources for them in order to ensure correctness and traceability of the provided knowledge. So far, this task is often addressed by human curators in a three-step process: issuing appropriate keyword queries for the statement to check using standard search engines, retrieving potentially relevant documents and screening those documents for relevant content. The drawbacks of this process are manifold. Most importantly, it is very time-consuming as the experts have to carry out several search processes and must often read several documents. In this article, we present DeFacto (Deep Fact Validation)—an algorithm able to validate facts by finding trustworthy sources for them on the Web. DeFacto aims to provide an effective way of validating facts by supplying the user with relevant excerpts of web pages as well as useful additional information including a score for the confidence DeFacto has in the correctness of the input fact. To achieve this goal, DeFacto collects and combines evidence from web pages written in several languages. In addition, DeFacto provides support for facts with a temporal scope, i.e., it can estimate in which time frame a fact was valid. Given that the automatic evaluation of facts has not been paid much attention to so far, generic benchmarks for evaluating these frameworks were not previously available. We thus also present a generic evaluation framework for fact checking and make it publicly available. + One of the main tasks when creating and maintaining knowledge bases is to validate facts and provide sources for them in order to ensure correctness and traceability of the provided knowledge. So far, this task is often addressed by human curators in a three-step process: issuing appropriate keyword queries for the statement to check using standard search engines, retrieving potentially relevant documents and screening those documents for relevant content. The drawbacks of this process are manifold. Most importantly, it is very time-consuming as the experts have to carry out several search processes and must often read several documents. In this article, we present DeFacto (Deep Fact Validation)—an algorithm able to validate facts by finding trustworthy sources for them on the Web. DeFacto aims to provide an effective way of validating facts by supplying the user with relevant excerpts of web pages as well as useful additional information including a score for the confidence DeFacto has in the correctness of the input fact. To achieve this goal, DeFacto collects and combines evidence from web pages written in several languages. In addition, DeFacto provides support for facts with a temporal scope, i.e., it can estimate in which time frame a fact was valid. Given that the automatic evaluation of facts has not been paid much attention to so far, generic benchmarks for evaluating these frameworks were not previously available. We thus also present a generic evaluation framework for fact checking and make it publicly available. + Fact validation + + + Web of Data + + Provenance + + + + + + + + + + + + + + + + + + + + + + + + + Rules with exceptions + Rules with exceptions + + + Exception-enriched Rule Learning from Knowledge Graphs + Advances in information extraction have enabled the automatic construction of large knowledge graphs (KGs) like DBpedia, Freebase, Yago and Wikidata. These KGs are inevitably bound to be incomplete. To fill in the gaps, data correlations in the KG can be analyzed to infer Horn rules and to predict new facts. However, Horn rules do not take into account possible exceptions, so that predicting facts via such rules introduces errors. +To overcome this problem, we present a method for effective revision of learned Horn rules by effectively incorporating exceptions (i.e., negated atoms) into their bodies. This way errors are largely reduced. We apply our method to discover rules with exceptions from real-world KGs. Our experimental results demonstrate the effectiveness of the developed method and the improvements in accuracy for KG completion by rule-based fact prediction. + + + + Knowledge graphs + + Knowledge base completion + + + Exception-enriched Rule Learning from Knowledge Graphs + + + Rule mining + + Knowledge graphs + Exception-enriched Rule Learning from Knowledge Graphs + + + Advances in information extraction have enabled the automatic construction of large knowledge graphs (KGs) like DBpedia, Freebase, Yago and Wikidata. These KGs are inevitably bound to be incomplete. To fill in the gaps, data correlations in the KG can be analyzed to infer Horn rules and to predict new facts. However, Horn rules do not take into account possible exceptions, so that predicting facts via such rules introduces errors. +To overcome this problem, we present a method for effective revision of learned Horn rules by effectively incorporating exceptions (i.e., negated atoms) into their bodies. This way errors are largely reduced. We apply our method to discover rules with exceptions from real-world KGs. Our experimental results demonstrate the effectiveness of the developed method and the improvements in accuracy for KG completion by rule-based fact prediction. + Knowledge base completion + Rule mining + + + + + + + + + Khaled Rasheed + + + Khaled Rasheed + + + + 0dcf9fe7adc3bfea3c0dade5dc57da19e188ffd4 + + + + Khaled Rasheed + + + + + + + + + + + + + + Giuseppe Rizzo + + + Giuseppe Rizzo + + + d6db5bfd6b52117e6f1162c66351046a1b87c8f4 + + Giuseppe Rizzo + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Soheila Dehghanzadeh + Soheila Dehghanzadeh + e510ea1c0a6ba7bc006ec1c4c5e2d07c9fb725cc + + + Soheila Dehghanzadeh + + + + + + + + + + + + + + Armen Inants + + + + + + Armen Inants + + + + 0bc71d13b13b2b87aa2d33f3857a17fd5f9fe8b6 + + Armen Inants + + + + + + + + + + + German Research Center for Artificial Intelligence + + German Research Center for Artificial Intelligence + + + + + + German Research Center for Artificial Intelligence + + + + + + + + + British Library + + + British Library + British Library + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Ermei Cao + + a26a2d07a4430148de3b8b4c26c2bac7362544dd + + Ermei Cao + + + + Ermei Cao + + + + + + + + + + + Ontologies + + + FOOD: FOod in Open Data + + Linked Open Data + + + + + + + FOOD: FOod in Open Data + This paper describes the outcome of an e-government project named FOOD, FOod in Open Data, which was carried out in the context of a collaboration between the Institute of Cognitive Sciences and Technologies of the Italian National Research Council, the Italian Ministry of Agriculture (MIPAAF) and the Italian Digital Agency (AgID). In particular, we implemented several ontologies for describing protected names of products (wine, pasta, fish, oil, etc.). In addition, we present the process carried out for producing and publishing a LOD dataset containing data extracted from existing Italian policy documents on such products and compliant with the aforementioned ontologies. + Ontologies + + This paper describes the outcome of an e-government project named FOOD, FOod in Open Data, which was carried out in the context of a collaboration between the Institute of Cognitive Sciences and Technologies of the Italian National Research Council, the Italian Ministry of Agriculture (MIPAAF) and the Italian Digital Agency (AgID). In particular, we implemented several ontologies for describing protected names of products (wine, pasta, fish, oil, etc.). In addition, we present the process carried out for producing and publishing a LOD dataset containing data extracted from existing Italian policy documents on such products and compliant with the aforementioned ontologies. + Linked Open Data + + FOOD: FOod in Open Data + + Ontology Design Patterns + + + + + + + + Ontology Design Patterns + + + + + + Axel Polleres + + + + + + + Axel Polleres + be73142926111b35aed1f1001bc62a1edb3ea1d1 + + + + Axel Polleres + + + + + + Hiroshi Umemoto + + + + + Hiroshi Umemoto + + + 8280495beca572e92aed89d57c1f99dd9e45d066 + Hiroshi Umemoto + + + + + The rapid growth of the Linked Open Data cloud, as well as the increasing ability to lift relational enterprise datasets to a semantic, ontology-based level means that vast amounts of information are now available in a representation that closely matches the conceptualizations of the potential users of this information. This makes it interesting to create ontology based, user-oriented tools for searching and exploring this data. Although initial efforts were intended for tech users with knowledge of SPARQL/RDF, there are ongoing proposals designed for lay users. One of the most promising approaches is to use visual query interfaces, but more user studies are needed to assess their effectiveness. In this paper, we compare the effect on usability of two important paradigms for ontology-based query interfaces: form-based and graph-based interfaces. In order to reduce the number of variables affecting the comparison, we performed a user study with two state-of-the-art query tools developed by ourselves, sharing a large part of the code base: the graph-based tool OptiqueVQS*, and the form-based tool PepeSearch. We evaluated these tools in a formal comparison study with 15 participants searching a Linked Open Data version of the Norwegian Company Registry. Participants had to respond to 6 non-trivial search tasks using alternately OptiqueVQS* and PepeSearch. Even without previous training, retrieval performance and user confidence were very high, thus suggesting that both interface designs are effective for searching RDF datasets. Expert searchers had a clear preference for the graph-based interface, and mainstream searchers obtained better performance and confidence with the form-based interface. While a number of participants spontaneously praised the capability of the graph interface for composing complex queries, our results evidence that graph interfaces are difficult to grasp. In contrast, form interfaces are more learnable and relieve problems with disorientation for mainstream users. We have also observed positive results introducing faceted search and dynamic term suggestion in semantic search interfaces. + + + User studies + + + + + + + + + + + + + + + Visual query interfaces for semantic datasets: an evaluation study + + Visual query interfaces + Visual query interfaces for semantic datasets: an evaluation study + Usability + Semantic search + Usability + + + User studies + + Visual query interfaces + + Semantic search + The rapid growth of the Linked Open Data cloud, as well as the increasing ability to lift relational enterprise datasets to a semantic, ontology-based level means that vast amounts of information are now available in a representation that closely matches the conceptualizations of the potential users of this information. This makes it interesting to create ontology based, user-oriented tools for searching and exploring this data. Although initial efforts were intended for tech users with knowledge of SPARQL/RDF, there are ongoing proposals designed for lay users. One of the most promising approaches is to use visual query interfaces, but more user studies are needed to assess their effectiveness. In this paper, we compare the effect on usability of two important paradigms for ontology-based query interfaces: form-based and graph-based interfaces. In order to reduce the number of variables affecting the comparison, we performed a user study with two state-of-the-art query tools developed by ourselves, sharing a large part of the code base: the graph-based tool OptiqueVQS*, and the form-based tool PepeSearch. We evaluated these tools in a formal comparison study with 15 participants searching a Linked Open Data version of the Norwegian Company Registry. Participants had to respond to 6 non-trivial search tasks using alternately OptiqueVQS* and PepeSearch. Even without previous training, retrieval performance and user confidence were very high, thus suggesting that both interface designs are effective for searching RDF datasets. Expert searchers had a clear preference for the graph-based interface, and mainstream searchers obtained better performance and confidence with the form-based interface. While a number of participants spontaneously praised the capability of the graph interface for composing complex queries, our results evidence that graph interfaces are difficult to grasp. In contrast, form interfaces are more learnable and relieve problems with disorientation for mainstream users. We have also observed positive results introducing faceted search and dynamic term suggestion in semantic search interfaces. + Visual query interfaces for semantic datasets: an evaluation study + + + + + + + + Yu Sugawara + + + Yu Sugawara + + + 429c9c0ab6250d527315b88a1d93ab3bd82cef6f + + Yu Sugawara + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Structuring Linked Data Search Results Using Probabilistic Soft Logic + + + + + + + linked data integration + + probabilistic soft logic + linked data integration + On-the-fly generation of integrated representations of Linked Data (LD) + search results is challenging because it requires successfully automating a number + of complex subtasks, such as structure inference and matching of + both instances and concepts, each of which gives rise to uncertain + outcomes. Such uncertainty is unavoidable given the semantically heterogeneous + nature of web sources, including LD ones. This paper approaches the problem of + structuring LD search results as an evidence-based one. In particular, the paper shows + how one formalism (viz., probabilistic soft logic (PSL)) can be exploited to assimilate + different sources of evidence in a principled way and to beneficial + effect for users. The paper considers syntactic evidence derived from matching + algorithms, semantic evidence derived from LD vocabularies, and user + evidence, in the form of feedback. The main contributions are: sets + of PSL rules that model the uniform assimilation of diverse kinds of evidence, + an empirical evaluation of how the resulting PSL programs perform in terms + of their ability to infer structure in LD search results, and, finally, a concrete + example of how populating such inferred structures for presentation + to the end user is beneficial, besides enabling the collection of + feedback whose assimilation further improves search result presentation. + + + + linked data search + + probabilistic soft logic + On-the-fly generation of integrated representations of Linked Data (LD) + search results is challenging because it requires successfully automating a number + of complex subtasks, such as structure inference and matching of + both instances and concepts, each of which gives rise to uncertain + outcomes. Such uncertainty is unavoidable given the semantically heterogeneous + nature of web sources, including LD ones. This paper approaches the problem of + structuring LD search results as an evidence-based one. In particular, the paper shows + how one formalism (viz., probabilistic soft logic (PSL)) can be exploited to assimilate + different sources of evidence in a principled way and to beneficial + effect for users. The paper considers syntactic evidence derived from matching + algorithms, semantic evidence derived from LD vocabularies, and user + evidence, in the form of feedback. The main contributions are: sets + of PSL rules that model the uniform assimilation of diverse kinds of evidence, + an empirical evaluation of how the resulting PSL programs perform in terms + of their ability to infer structure in LD search results, and, finally, a concrete + example of how populating such inferred structures for presentation + to the end user is beneficial, besides enabling the collection of + feedback whose assimilation further improves search result presentation. + + + + + Structuring Linked Data Search Results Using Probabilistic Soft Logic + Structuring Linked Data Search Results Using Probabilistic Soft Logic + linked data search + + + + + + + + + + + + + + + + + + + + + Konrad Höffner + Konrad Höffner + + Konrad Höffner + + + + + + 4ffd792932126fe6ab4d187c84c65c0f9c665057 + + + + user experience + semantic data + + search + + + + + relational data + + Benchmarking End-User Structured Data Search and Exploration + + semantic data + + + + exploration + + + exploration + + usability + The Semantic Web Community has invested significant research effort in developing systems for Semantic Web search and exploration. But while it has been easy to assess the systems' computational efficiency, it has been much harder to assess how well different semantic systems help their users find and browse information. In this article, we propose and demonstrate the use of a benchmark for evaluating them, similar to the TREC benchmark for evaluating traditional search engines. Our benchmark includes a set of typical user tasks and a well-defined procedure for assigning a measure of performance on those tasks to a semantic system. We demonstrate its application to one such system, Rhizomer. We intend for this work to initiate a community conversation that will lead to a general accepted framework for comparing systems and measuring, and thus encouraging, progress towards better semantic search and exploration tools. + Benchmarking End-User Structured Data Search and Exploration + relational data + + Benchmarking End-User Structured Data Search and Exploration + user experience + + + The Semantic Web Community has invested significant research effort in developing systems for Semantic Web search and exploration. But while it has been easy to assess the systems' computational efficiency, it has been much harder to assess how well different semantic systems help their users find and browse information. In this article, we propose and demonstrate the use of a benchmark for evaluating them, similar to the TREC benchmark for evaluating traditional search engines. Our benchmark includes a set of typical user tasks and a well-defined procedure for assigning a measure of performance on those tasks to a semantic system. We demonstrate its application to one such system, Rhizomer. We intend for this work to initiate a community conversation that will lead to a general accepted framework for comparing systems and measuring, and thus encouraging, progress towards better semantic search and exploration tools. + + + + benchmark + + usability + + benchmark + search + + + + + + + + + + + + + + + Auckland University of Technology + + + Auckland University of Technology + + + + + Auckland University of Technology + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-20T11:30:00 + 2016-10-20T11:10:00 + + + 2016-10-20T11:10:00 + Daniel Hernandez, Aidan Hogan, Cristian Riveros, Carlos Rojas and Enzo Zerega + 2016-10-20T11:30:00 + Querying Wikidata: Comparing SPARQL, Relational and Graph Databases + + 2016-10-20T11:30:00 + 2016-10-20T11:30:00 + Querying Wikidata: Comparing SPARQL, Relational and Graph Databases + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + e409b5eceff3b8cf4be69005301c6984fa6ceae3 + + + Jeff Z. Pan + + + Jeff Z. Pan + Jeff Z. Pan + + + + + + + + + + + + + + Corentin Jouault + + + + + 6aa1c38a4568fa3bc90618e86135e2b8e37a34ed + + + + Corentin Jouault + + + Corentin Jouault + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Kleanthi Georgala + + Kleanthi Georgala + + + d160fc13181daaa28c946ed7699a99d5a06201cd + + Kleanthi Georgala + + + + + + + + + + + + + + + + + + + + + + + + + + + Davide Lanti + + + + + de114ecaecd68c6eba2c09ba149487b21fbb31dd + + + Davide Lanti + + Davide Lanti + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Ontology + + + Working process quantification in factory using wearable sensor device and ontology-based stream data processing + Ontology + + + + A method for quantifying working processes on manufacturing floors was established that uses a wearable sensor device and an ontology-based stream data processing system. +Using this method, the measurement of manufacturing process efficiency from sensor data extracted from such a device worn by workers on the job was confirmed at the Fuji Xerox factory. + + + Wearable device + + A method for quantifying working processes on manufacturing floors was established that uses a wearable sensor device and an ontology-based stream data processing system. +Using this method, the measurement of manufacturing process efficiency from sensor data extracted from such a device worn by workers on the job was confirmed at the Fuji Xerox factory. + + Working process quantification in factory using wearable sensor device and ontology-based stream data processing + Wearable device + IOT + + Stream data processing + Working process quantification in factory using wearable sensor device and ontology-based stream data processing + + + + + + + + + Stream data processing + IOT + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Esteban Gonzalez + + + + + 21e27cb7ab09b1bff3892cf43b13f33164c93811 + Esteban Gonzalez + + Esteban Gonzalez + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Semantic labeling: A domain-independent approach + 2016-10-21T16:30:00 + + + + Semantic labeling: A domain-independent approach + 2016-10-21T16:10:00 + 2016-10-21T16:30:00 + 2016-10-21T16:10:00 + 2016-10-21T16:30:00 + Minh Pham, Suresh Alse, Craig Knoblock and Pedro Szekely + 2016-10-21T16:30:00 + + + + + + + + + ISEP, BILab + + + + + + ISEP, BILab + + ISEP, BILab + + + + + + + + + + + + + + + + + data integration + A RDF based Portal of Biological Phenotype Data produced in Japan + + Biological phenotype + We developed RDF-based databases of phenotype and animal strains produced in Japan and a portal site termed as “J-Phenome”. By the application of common schema, these databases can be retrieved by the same SPARQL query across graphs. In the operation of these databases, RDF represented multiple advantages such as improvement of comprehensive search, data integration using ontologies and public data, reuse of data and wider dissemination of phenotype data compared to conventional technologies. + + + + + + + RDF + RDF + + + + + Biological phenotype + + + + We developed RDF-based databases of phenotype and animal strains produced in Japan and a portal site termed as “J-Phenome”. By the application of common schema, these databases can be retrieved by the same SPARQL query across graphs. In the operation of these databases, RDF represented multiple advantages such as improvement of comprehensive search, data integration using ontologies and public data, reuse of data and wider dissemination of phenotype data compared to conventional technologies. + data integration + A RDF based Portal of Biological Phenotype Data produced in Japan + + + + + + + A RDF based Portal of Biological Phenotype Data produced in Japan + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Accenture Technology Labs + + + + Accenture Technology Labs + Accenture Technology Labs + + + + + + + + + + + + + + + + 2016-10-20T13:30:00 + 2016-10-20T14:50:00 + 2016-10-20T14:50:00 + 2016-10-20T14:50:00 + Search (I) + 2016-10-20T14:50:00 + Search (I) + 2016-10-20T13:30:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Vienna University of Economy and Business (WU) + Vienna University of Economy and Business (WU) + + + + + + + + + Vienna University of Economy and Business (WU) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + IRIT + + + + IRIT + IRIT + + + + + + + + + + + + + + Makoto Nakatsuji + 748dfd31068eb8b775edb25f2594071336437b28 + Makoto Nakatsuji + + Makoto Nakatsuji + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 3bc2893aa0872907eadc78ded2963c3fa0653366 + + + + + + Mikhail Roshchin + + Mikhail Roshchin + + + Mikhail Roshchin + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Directed Acyclic Graph(DAG) data is increasingly available on the Web, including Linked Open Data(LOD). Mining reachability relationships between entities is an important task for extracting knowledge from LOD. Diverse labeling schemes have been proposed to efficiently determine the reachability. We focus on a state-of-the-art 2-hop labeling scheme that is based on a permutation of vertices to achieve a linear index size and reduce on-line searches that are required when the reachability cannot be answered by 2-hop labels only. We observed that the approach can be improved in three different ways; 1) space-efficiency - guarantee the minimized index size without randomness 2) update-efficiency - update labels efficiently when graphs changes 3) parallelization - labeling should be cluster-based, and solved in a distributed fashion. In these regards, this PhD thesis proposes optimization techniques that address these issues. In this paper in particular, a way of reducing the 2-hop label size is proposed with preliminary results on real-world DAG datasets. In addition, we will discuss the feasibilities of the other issues based on our on-going works. + Linked Open Data + Directed Acyclic Graph(DAG) data is increasingly available on the Web, including Linked Open Data(LOD). Mining reachability relationships between entities is an important task for extracting knowledge from LOD. Diverse labeling schemes have been proposed to efficiently determine the reachability. We focus on a state-of-the-art 2-hop labeling scheme that is based on a permutation of vertices to achieve a linear index size and reduce on-line searches that are required when the reachability cannot be answered by 2-hop labels only. We observed that the approach can be improved in three different ways; 1) space-efficiency - guarantee the minimized index size without randomness 2) update-efficiency - update labels efficiently when graphs changes 3) parallelization - labeling should be cluster-based, and solved in a distributed fashion. In these regards, this PhD thesis proposes optimization techniques that address these issues. In this paper in particular, a way of reducing the 2-hop label size is proposed with preliminary results on real-world DAG datasets. In addition, we will discuss the feasibilities of the other issues based on our on-going works. + doctoralconsortium-2-hop labeling + + + Linked Open Data + LOD + Optimization Techniques for 2-hop Labeling of Dynamic Directed Acyclic Graphs + Directed Acyclic Graph + Optimization Techniques for 2-hop Labeling of Dynamic Directed Acyclic Graphs + LOD + + DAG + + graph reachability + + DAG + + doctoralconsortium-2-hop labeling + + + Optimization Techniques for 2-hop Labeling of Dynamic Directed Acyclic Graphs + + + graph reachability + + + Directed Acyclic Graph + + + + + + + + + + + + + + + + + + + Paul Groth + + Paul Groth + + + + Paul Groth + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Using Semantic Web Technologies for Explaining and Predicting Abnormal Expense + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Freddy Lecue, John Vard and Jiewen Wu + 2016-10-19T21:00:00 + + Using Semantic Web Technologies for Explaining and Predicting Abnormal Expense + + + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Michael Martin + + Michael Martin + + + + Michael Martin + + 577bfc91a74510adea7fbbb5686629985f5c6864 + + + + + + + + + + 2016-10-19T12:20:00 + 2016-10-19T14:00:00 + 2016-10-19T14:00:00 + 2016-10-19T14:00:00 + 2016-10-19T14:00:00 + Lunch + Lunch + 2016-10-19T12:20:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Pistoia Alliance + + + + + Pistoia Alliance + Pistoia Alliance + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + A Web Application to Search a Large Repository of Taxonomic Relations from the Web + A Web Application to Search a Large Repository of Taxonomic Relations from the Web + + + 2016-10-19T21:00:00 + Stefano Faralli, Christian Bizer, Kai Eckert, Robert Meusel and Simone Paolo Ponzetto + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Kai Eckert + a123c0989154667c08f2ca1814cb0e750c514cfd + + + + + Kai Eckert + + + + + + Kai Eckert + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Octavian Rinciog + Octavian Rinciog + + + + + + + + + Octavian Rinciog + + 44b2319b9be44404698a32a105b1c10b6287d435 + + + + + + + + + + + + + + Achille Fokoue + 42e105e76a4873077b7c63e06d51946a89f0eafd + + Achille Fokoue + + + + + + + Achille Fokoue + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Insight Centre for Data Analytics, National University of Ireland + + + Insight Centre for Data Analytics, National University of Ireland + + + + + + Insight Centre for Data Analytics, National University of Ireland + + + + + + + + + + + + + + + + + + + + + Ran Yu + + + Ran Yu + + + + + + + + + + Ran Yu + cbf775d617898b6bf5237bf149e3ca57a5a1e8f8 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + triple pattern fragments + federated querying + linked data + + Linked Data can be distributed through multiple interfaces on the Web, +each of them with their own expressivity. +However, there is no generic client available that can handle querying over multiple interfaces. +This increases the complexity of combining datasets and designing new interfaces. +One can imagine the difficulties that arise +when trying to create a client querying various interfaces at the same time, +that can be discovered just in time. +To this end, I aim to design a generic Linked Data querying engine +capable of handling different interfaces that can easily be extended. +Rule-based reasoning is going to be explored +to combine different interfaces without intervention of a human developer. +Using an iterative approach to extend Linked Data interfaces, +I am going to evaluate different querying set-ups for the SPARQL language. +Preliminary results indicate a broad spectrum of yet to be explored options. +As the PhD is still in an early phase, we hope to narrow the scope in the next months, +based on feedback of the doctoral consortium. + linked data fragments + + reasoning + linked data fragments + sparql + sparql + Querying Distributed Heterogeneous Linked Data Interfaces through Reasoning + + + + + + triple pattern fragments + Querying Distributed Heterogeneous Linked Data Interfaces through Reasoning + linked data + + reasoning + + + federated querying + + Querying Distributed Heterogeneous Linked Data Interfaces through Reasoning + + Linked Data can be distributed through multiple interfaces on the Web, +each of them with their own expressivity. +However, there is no generic client available that can handle querying over multiple interfaces. +This increases the complexity of combining datasets and designing new interfaces. +One can imagine the difficulties that arise +when trying to create a client querying various interfaces at the same time, +that can be discovered just in time. +To this end, I aim to design a generic Linked Data querying engine +capable of handling different interfaces that can easily be extended. +Rule-based reasoning is going to be explored +to combine different interfaces without intervention of a human developer. +Using an iterative approach to extend Linked Data interfaces, +I am going to evaluate different querying set-ups for the SPARQL language. +Preliminary results indicate a broad spectrum of yet to be explored options. +As the PhD is still in an early phase, we hope to narrow the scope in the next months, +based on feedback of the doctoral consortium. + + + + 2016-10-20T14:30:00 + + + 2016-10-20T14:30:00 + + Lei Zhang and Achim Rettinger + A Knowledge Base Approach to Cross-lingual Keyword Query Interpretation + 2016-10-20T14:10:00 + 2016-10-20T14:30:00 + 2016-10-20T14:10:00 + 2016-10-20T14:30:00 + A Knowledge Base Approach to Cross-lingual Keyword Query Interpretation + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-18T17:00:00 + + Joachim Van Herwegen + 2016-10-18T17:00:00 + 2016-10-18T17:00:00 + 2016-10-18T16:45:00 + 2016-10-18T17:00:00 + Querying Distributed Heterogeneous Linked Data Interfaces through Reasoning + + 2016-10-18T16:45:00 + + Querying Distributed Heterogeneous Linked Data Interfaces through Reasoning + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T11:40:00 + 2016-10-19T11:20:00 + Ontologies for Knowledge Graphs: Breaking the Rules + Ontologies for Knowledge Graphs: Breaking the Rules + 2016-10-19T11:20:00 + 2016-10-19T11:40:00 + Markus Krötzsch and Veronika Thost + + 2016-10-19T11:40:00 + + 2016-10-19T11:40:00 + + + + + + + + + + + + + + + + + Linked data has the potential of interconnecting data from different domains, bringing new potentials to machine agents to provide better services for web users. The ever increasing amount of linked data in government open data, social linked data, linked medical and patients’ data provides new opportuni-ties for data mining and machine learning. Both are however strongly de-pendent on the selection of high quality data features to achieve good results. In this work we present an approach that uses ontological knowledge to gen-erate features that are suitable for building a decision tree classifier address-ing the specific data set and classification problem. The approach that we present has two main characteristics - it generates new features on demand as required by the induction algorithm and uses ontological knowledge about linked data to restrict the set of possible options. These two characteristics enable the induction algorithm to look for features that might be connected through many entities in the linked data enabling the generation of cross-domain explanation models. + + + linked data + semantic relatedness + + + Feature Generation using Ontologies during Induction of Decision Trees on Linked Data + Hoeffding bound + feature generation + Hoeffding tree + ontology + + + semantic relatedness + + + RDF + Hoeffding tree + ontology + Feature Generation using Ontologies during Induction of Decision Trees on Linked Data + + linked data + RDF + Feature Generation using Ontologies during Induction of Decision Trees on Linked Data + + + feature generation + Hoeffding bound + decision tree + Linked data has the potential of interconnecting data from different domains, bringing new potentials to machine agents to provide better services for web users. The ever increasing amount of linked data in government open data, social linked data, linked medical and patients’ data provides new opportuni-ties for data mining and machine learning. Both are however strongly de-pendent on the selection of high quality data features to achieve good results. In this work we present an approach that uses ontological knowledge to gen-erate features that are suitable for building a decision tree classifier address-ing the specific data set and classification problem. The approach that we present has two main characteristics - it generates new features on demand as required by the induction algorithm and uses ontological knowledge about linked data to restrict the set of possible options. These two characteristics enable the induction algorithm to look for features that might be connected through many entities in the linked data enabling the generation of cross-domain explanation models. + + decision tree + + + + + + + + + + + + + + + + Lorenz Bühmann + + + Lorenz Bühmann + + Lorenz Bühmann + 6ee6fbd8b4b0d98ca0f572ec43bfee4b3e699f9c + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + An On-Line Learning to Query System + 2016-10-19T21:00:00 + An On-Line Learning to Query System + 2016-10-19T18:00:00 + + Jędrzej Potoniec + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + + + + + + + + b0dd6a6493ec27189fb906b672d38f57a02776a9 + Özgür Lütfü Özcep + + + + + + Özgür Lütfü Özcep + Özgür Lütfü Özcep + + + + + + + + + + + + + Cyril Chapellier + d3bc2053f4cc681a54c86a72e598db673559ef2f + + Cyril Chapellier + Cyril Chapellier + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Client-server trade-offs can be analyzed using Linked Data Fragments, +which proposes an uniform view on all interfaces to rdf. This reveals a complete +spectrum between Linked Data documents and the sparql protocol, in which +we can advance the state-of-the-art of Linked Data publishing. This axis can be +explored in the following two dimensions: i) Selector, allowing different, more +complex questions for the server; and ii) Metadata, extending the response +with more information clients can use. +This work studies the second Metadata dimension in a practical Web context. +Considering the conditions on the Web, this problem becomes three-fold. First, +analog to the Web itself, ldf interfaces should exist in a distributed, scalable +manner in order to succeed. Generating additional metadata introduces overhead +on the server, which influences the ability to scale towards multiple clients. Second, +the communication between client and server uses the http protocol. Modeling, +serialization, and compression determine the extra load the overall network traffic. +Third, with query execution on the client, novel approaches need to apply this +metadata intelligently to increase efficiency. +Concretely, this work defines and evaluates a series of transparent, interchangeable, +and discoverable interface features. We proposed Triple Pattern Fragments, a Linked Data api with low-server cost, as a fundamental base . This +interface uses a single triple pattern as selector. To explore this research space, +we append this interface with different metadata, starting with an estimated +number of total matching triples. By combining several tpfs, sparql queries are +evaluated on the client-side, using the metadata for optimization. Hence, we can +measure the query execution + + + Metadata + Client-server trade-offs can be analyzed using Linked Data Fragments, +which proposes an uniform view on all interfaces to rdf. This reveals a complete +spectrum between Linked Data documents and the sparql protocol, in which +we can advance the state-of-the-art of Linked Data publishing. This axis can be +explored in the following two dimensions: i) Selector, allowing different, more +complex questions for the server; and ii) Metadata, extending the response +with more information clients can use. +This work studies the second Metadata dimension in a practical Web context. +Considering the conditions on the Web, this problem becomes three-fold. First, +analog to the Web itself, ldf interfaces should exist in a distributed, scalable +manner in order to succeed. Generating additional metadata introduces overhead +on the server, which influences the ability to scale towards multiple clients. Second, +the communication between client and server uses the http protocol. Modeling, +serialization, and compression determine the extra load the overall network traffic. +Third, with query execution on the client, novel approaches need to apply this +metadata intelligently to increase efficiency. +Concretely, this work defines and evaluates a series of transparent, interchangeable, +and discoverable interface features. We proposed Triple Pattern Fragments, a Linked Data api with low-server cost, as a fundamental base . This +interface uses a single triple pattern as selector. To explore this research space, +we append this interface with different metadata, starting with an estimated +number of total matching triples. By combining several tpfs, sparql queries are +evaluated on the client-side, using the metadata for optimization. Hence, we can +measure the query execution + Studying Metadata for better client-server trade-offs in Linked Data publishing + + Studying Metadata for better client-server trade-offs in Linked Data publishing + Linked Data Fragments + + + + Linked Data Fragments + + Studying Metadata for better client-server trade-offs in Linked Data publishing + Semantic Web + Semantic Web + + Metadata + + + + + + + + + + + + + + + + + + + + + + + Roger Nkambou + + + + Roger Nkambou + + e39442f80ef81e65a31cfdfd6d5294baa7d72caf + Roger Nkambou + + + + + 3c84a2d7b88aaaee9abc3f6a599604813ee021d8 + + + + + + + Mark Sandler + Mark Sandler + + + + + Mark Sandler + + + + + + + + + + + + + + + + Heiner Stuckenschmidt + Heiner Stuckenschmidt + + + + + Heiner Stuckenschmidt + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-18T11:45:00 + 2016-10-18T11:30:00 + 2016-10-18T11:45:00 + Syed Muhammad Ali Hasnain + 2016-10-18T11:45:00 + A - Posteriori Data Integration for Life Sciences + + A - Posteriori Data Integration for Life Sciences + + 2016-10-18T11:45:00 + + 2016-10-18T11:30:00 + + + ef2204ce2cc114efa6aeb83d4fcd4b63605524c7 + Minh-Duc Pham + + + + Minh-Duc Pham + + + + + + + + + Minh-Duc Pham + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + OntoCASE4G-OWL: Towards a modeling software tool for G-OWL a visual syntax for RDF/RDFS/OWL2 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + OntoCASE4G-OWL: Towards a modeling software tool for G-OWL a visual syntax for RDF/RDFS/OWL2 + + 2016-10-19T21:00:00 + + Michel Héon, Roger Nkambou and Mohamed Gaha + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Md. Kamruzzaman Sarker + + + a4a440701c140cde7e64e7c3913fd8f99bbad380 + Md. Kamruzzaman Sarker + + + + Md. Kamruzzaman Sarker + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Stefanos Roumeliotis + + + + + + + + + Stefanos Roumeliotis + + + + Stefanos Roumeliotis + + + + + + + + + + + + + RWTH Aachen University + + RWTH Aachen University + + RWTH Aachen University + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Kaiser Permanente + + + Kaiser Permanente + Kaiser Permanente + + + + + + + + + + + + + + + + + + + + 23b24b58894d8da006c6c317e320e42431a9ec00 + + Lara S. G. Piccolo + + + Lara S. G. Piccolo + + + + + + Lara S. G. Piccolo + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Nicolas Matentzoglu + + + + Nicolas Matentzoglu + + 8bfb88288cdf2930fac4c97945b88d52fec277bc + Nicolas Matentzoglu + + + + + + + + + + + + + + + + + + + Scalable Semantic Access to Siemens Static and Streaming Distributed Data + + + + + Scalable Semantic Access to Siemens Static and Streaming Distributed Data + Numerous analytical tasks in industry rely on data integration solutions since they require data from multiple static and streaming data sources. In the context of the Optique project we have investigated how Semantic Technologies can enhance data integration and thus facilitate further data analysis. We introduced the notion Ontology-Based Stream-Static Data Integration and developed the system Optique to put our ideas in practice. In this demo we will show how Optique can help in diagnostics of power generating turbines in Siemens Energy. For this purpose we prepared anonymised streaming and static data from 950 Siemens power generating turbines with more than 100,000 sensors and deployed Optique on distributed environments with 128 nodes. The demo attendees will be able to see do diagnostics of turbines by registering and monitoring continuous queries that combine streaming and static data; to test scalability of our devoted stream management system that is able to process up to 1024 concurrent complex diagnostic queries with a 10 TB/day throughput; and to deploy Optique over Siemens demo data using our devoted interactive system to create abstraction semantic layers over data sources. + + + ontologies + streaming data + + + + + + + + + Numerous analytical tasks in industry rely on data integration solutions since they require data from multiple static and streaming data sources. In the context of the Optique project we have investigated how Semantic Technologies can enhance data integration and thus facilitate further data analysis. We introduced the notion Ontology-Based Stream-Static Data Integration and developed the system Optique to put our ideas in practice. In this demo we will show how Optique can help in diagnostics of power generating turbines in Siemens Energy. For this purpose we prepared anonymised streaming and static data from 950 Siemens power generating turbines with more than 100,000 sensors and deployed Optique on distributed environments with 128 nodes. The demo attendees will be able to see do diagnostics of turbines by registering and monitoring continuous queries that combine streaming and static data; to test scalability of our devoted stream management system that is able to process up to 1024 concurrent complex diagnostic queries with a 10 TB/day throughput; and to deploy Optique over Siemens demo data using our devoted interactive system to create abstraction semantic layers over data sources. + + + + streaming data + + + + + + data access + + + + + static data + + + + + + + + + + + + + + + + static data + data access + Scalable Semantic Access to Siemens Static and Streaming Distributed Data + ontologies + + + + + + + + + + + + + + + + + + + + + + + + + + + Houda Khrouf + + + + Houda Khrouf + + + 9972a88dceb3a990213b7781ba64f10a806c2200 + + + + Houda Khrouf + + + + + + + + + + + + + + + + + + + + + + DASPLab, DISI, University of Bologna + + DASPLab, DISI, University of Bologna + DASPLab, DISI, University of Bologna + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + An Evaluation of VIG with the BSBM Benchmark + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + + + An Evaluation of VIG with the BSBM Benchmark + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Davide Lanti, Guohui Xiao and Diego Calvanese + 2016-10-19T21:00:00 + + + + Francesco Osborne + + + + + Francesco Osborne + + + + + + + Francesco Osborne + 5c98580f6804602e7b7102cf99078e9f1a1a31e6 + + + + + + + + + + + + + + + + + + + + + + + + + + + flight planning + GTFS + + + + data enrichment + + data enrichment + flight planning + + + + + When searching for flights, current systems often suggest routes involving waiting times at stopovers. There might exist alternative routes which are more attractive from a touristic perspective because their duration is not necessarily much longer while offering enough time in an appropriate place. Choosing among such alternatives requires additional planning efforts to make sure that e.g. points of interest can conveniently be reached in the allowed time frame. We present a system that automatically computes smart trip alternatives between any two cities. To do so, it searches points of interest in large semantic datasets considering the set of accessible areas around each possible layover. It then elects feasible alternatives and displays their differences with respect to the default trip. + Smart Trip Alternatives for the Curious + GTFS + + Smart Trip Alternatives for the Curious + POIs + POIs + + + + Smart Trip Alternatives for the Curious + stopovers + + + + + + When searching for flights, current systems often suggest routes involving waiting times at stopovers. There might exist alternative routes which are more attractive from a touristic perspective because their duration is not necessarily much longer while offering enough time in an appropriate place. Choosing among such alternatives requires additional planning efforts to make sure that e.g. points of interest can conveniently be reached in the allowed time frame. We present a system that automatically computes smart trip alternatives between any two cities. To do so, it searches points of interest in large semantic datasets considering the set of accessible areas around each possible layover. It then elects feasible alternatives and displays their differences with respect to the default trip. + stopovers + + + + + + + + + + + + + + + + + FZI Research Center for Information Technology + + + + FZI Research Center for Information Technology + + + + FZI Research Center for Information Technology + + + + + + + + + + + + + + + + + + + + + + + + + + + + Pierre Geneves + + 1079cfa61e43a4a7fb5b3907d03bd3024570f566 + Pierre Geneves + + + + + + + + Pierre Geneves + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + University Mannheim + University Mannheim + + + + University Mannheim + + + + + + + + Semantic Audit Application + 2016-10-19T21:00:00 + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + Katalin Ternai and Ildikó Szabó + Semantic Audit Application + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Changlong Wang + + Changlong Wang + + + + + 2c1a762edfc2413f3c922af78e176c30070a1b30 + + + + + + + Changlong Wang + + + Antske Fokkens + + + + + + + + + + Antske Fokkens + + + Antske Fokkens + + + + + + + + Adila A. Krisnadhi + 76fe98c9de14916373ed11a039db06c7fa0c9429 + + + + + + + Adila A. Krisnadhi + Adila A. Krisnadhi + + + + Record Linkage + + Record Linkage + + Unsupervised Entity Resolution on Multi-type Graphs + + Entity resolution + + + We address the problem of performing entity resolution on RDF graphs containing multiple types of nodes, using the links between instances of different types to improve the accuracy. For example, in a graph of products and manufacturers the goal is to resolve all the products and all the manufacturers. We formulate this problem as multi-type graph summarization problem, which involves clustering the nodes in each type that refer to the same entity into one super node and creating weighted links among super nodes that summarize the inter-cluster links in the original graph. Experiments show that the proposed approach outperforms several state-of-the-art generic entity resolution approaches, especially in data sets with one-to-many, many-to-many relations and attributes with missing values. + + + + + Entity resolution + + + + + + Multi-type Graph Summarization + Unsupervised Entity Resolution on Multi-type Graphs + Unsupervised Entity Resolution on Multi-type Graphs + + + + + + We address the problem of performing entity resolution on RDF graphs containing multiple types of nodes, using the links between instances of different types to improve the accuracy. For example, in a graph of products and manufacturers the goal is to resolve all the products and all the manufacturers. We formulate this problem as multi-type graph summarization problem, which involves clustering the nodes in each type that refer to the same entity into one super node and creating weighted links among super nodes that summarize the inter-cluster links in the original graph. Experiments show that the proposed approach outperforms several state-of-the-art generic entity resolution approaches, especially in data sets with one-to-many, many-to-many relations and attributes with missing values. + + Multi-type Graph Summarization + + + + + + + + + + + + + + + + + + + + + + + + + + + + Hatim Aouzal + + + + Hatim Aouzal + + 4f8c57c009a4c0d4755d8fe3d0a85e4a8269a9b3 + + + Hatim Aouzal + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + STLab (ISTC-CNR) + + + + + + + STLab (ISTC-CNR) + + STLab (ISTC-CNR) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-21T14:30:00 + Ontop: Answering SPARQL queries over relational databases + Ontop: Answering SPARQL queries over relational databases + 2016-10-21T14:30:00 + 2016-10-21T14:10:00 + + + 2016-10-21T14:10:00 + 2016-10-21T14:30:00 + Diego Calvanese, Benjamin Cogrel, Sarah Komla-Ebri, Roman Kontchakov, Davide Lanti, Martin Rezk, Mariano Rodriguez-Muro and Guohui Xiao + + 2016-10-21T14:30:00 + + + + d7c46260e8a7f0570bb8cabc64fa362addda2b20 + + + + + + + + + + Ujwal Gadiraju + + Ujwal Gadiraju + Ujwal Gadiraju + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Masao Watanabe + + + Masao Watanabe + + + + + + + Masao Watanabe + + 70b7ce67759053909d6ed6b3af898c21c97f12f3 + + + + Pascal Hitzler + + + + + Pascal Hitzler + d9d5e01de07e6f9a5e8b66c44c995c5ca8cc3b63 + + + + Pascal Hitzler + + + + + + + + + + + + + + + + + + + + + + + + + + + + + e4f813d7844475a45b91ef78eeacea4b0faa4168 + Denis Parra + + Denis Parra + + + + + + + + + Denis Parra + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Bruno Charron + + + + + + Bruno Charron + + + + + Bruno Charron + + + fe57be327dcfcd3aa84114619f16cf5ed3657892 + + + Imen Megdiche + + + + + + + + Imen Megdiche + Imen Megdiche + + + 632b5e521bc9e23267093100ca3bb1a4612a5048 + + + + + Filippo Gramegna + + + + Filippo Gramegna + Filippo Gramegna + + 37a7ac9099047adc44c3d1e9da64ad367949e333 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Jun Ding + + + + + + + Jun Ding + 219e99d828b66428ae2ae8e1835e4a71a59898c1 + + + + Jun Ding + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + Discovering and Using Functions via Content Negotiation + 2016-10-19T21:00:00 + Discovering and Using Functions via Content Negotiation + 2016-10-19T18:00:00 + Ben De Meester, Anastasia Dimou, Ruben Verborgh, Erik Mannens and Rik Van de Walle + + + + + + + + + + + + + + + + + + + + + + + + + + + + e77e0c204cbd1f67750ab520e2d2b7bd4b71ebcc + Frank Van Harmelen + + + + + + + + Frank Van Harmelen + + + Frank Van Harmelen + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Information extraction + + Knowledge base generation + + Knowledge base generation + + Relation extraction + Wikipedia + Relation extraction + + + Wikipedia has been the primary source of information for many automatically-generated Semantic Web data sources. However, they suffer from incompleteness since they largely do not cover information contained in the unstructured texts of Wikipedia. Our goal is to extract structured entity-relationships in RDF from such unstructured texts, ultimately using them to enrich existing data sources. Our extraction technique is aimed to be topic-independent, leveraging grammatical dependency of sentences and context semantic refinement. Preliminary evaluations of the proposed approach has shown some promising results. + Information extraction + Entity-Relationship Extraction from Wikipedia Unstructured Text + Entity-Relationship Extraction from Wikipedia Unstructured Text + + + + + + + + + Entity-Relationship Extraction from Wikipedia Unstructured Text + Wikipedia + Wikipedia has been the primary source of information for many automatically-generated Semantic Web data sources. However, they suffer from incompleteness since they largely do not cover information contained in the unstructured texts of Wikipedia. Our goal is to extract structured entity-relationships in RDF from such unstructured texts, ultimately using them to enrich existing data sources. Our extraction technique is aimed to be topic-independent, leveraging grammatical dependency of sentences and context semantic refinement. Preliminary evaluations of the proposed approach has shown some promising results. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Domain Adaptation for Ontology Localization + John P. McCrae, Mihael Arcan, Kartik Asooja, Jorge Gracia, Paul Buitelaar and Philipp Cimiano + + Domain Adaptation for Ontology Localization + 2016-10-19T15:00:00 + 2016-10-19T15:20:00 + 2016-10-19T15:20:00 + 2016-10-19T15:20:00 + 2016-10-19T15:20:00 + + 2016-10-19T15:00:00 + + + + + + + + + + + + + + + + + + + + + + + + Ghent University + + + + + + + Ghent University + Ghent University + + + Alvaro A. A. Fernandes + 2676f098258b7af5f2554fc4b41de7cd3d8a7dfb + + + + Alvaro A. A. Fernandes + + + + Alvaro A. A. Fernandes + + + + + + + + + + + + + + + + + + Wendy Hall + 27ee74540695fd57411b7614bc1826fd8e2bed4e + + + + + Wendy Hall + + Wendy Hall + + + + + + fd46b21a039a8af1044cab3a963a6a86f987cd7c + + + + Jędrzej Potoniec + + Jędrzej Potoniec + + + + Jędrzej Potoniec + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Victor Christen, Anika Groß and Erhard Rahm + + 2016-10-21T14:10:00 + 2016-10-21T13:50:00 + 2016-10-21T14:10:00 + 2016-10-21T14:10:00 + + + A Reuse-based Annotation Approach for Medical Documents + 2016-10-21T14:10:00 + 2016-10-21T13:50:00 + A Reuse-based Annotation Approach for Medical Documents + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Markus Freudenberg + + + Markus Freudenberg + + Markus Freudenberg + da0ea91b05a76afe4b955963e44225cbbca27b6e + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + National Institute of Informatics + + + + + National Institute of Informatics + National Institute of Informatics + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ontology alignment + + + + + + + + User validation is one of the challenges facing the ontology alignment community, as there are limits to the quality of automated alignment algorithms. +In this paper we present a broad study on user validation of ontology alignments that encompasses three distinct but interrelated aspects: the profile of the user, the services of the alignment system, and its user interface. We discuss key issues pertaining to the alignment validation process under each of these aspects, and provide an overview of how current systems address them. Finally, we use experiments from the Interactive Matching track of the Ontology Alignment Evaluation Initiative (OAEI) 2015 to assess the impact of errors in alignment validation, and how systems cope with them as function of their services. + ontology engineering + + + User validation is one of the challenges facing the ontology alignment community, as there are limits to the quality of automated alignment algorithms. +In this paper we present a broad study on user validation of ontology alignments that encompasses three distinct but interrelated aspects: the profile of the user, the services of the alignment system, and its user interface. We discuss key issues pertaining to the alignment validation process under each of these aspects, and provide an overview of how current systems address them. Finally, we use experiments from the Interactive Matching track of the Ontology Alignment Evaluation Initiative (OAEI) 2015 to assess the impact of errors in alignment validation, and how systems cope with them as function of their services. + + + + + + user interaction + + User validation in ontology alignment + + User validation in ontology alignment + + + User validation in ontology alignment + + user interaction + ontology engineering + + ontology alignment + + + + + + + + + + + Khadija Elbedweihy + + + Khadija Elbedweihy + + + + Khadija Elbedweihy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Gerben Klaas Dirk de Vries + + + Gerben Klaas Dirk de Vries + Gerben Klaas Dirk de Vries + + + + 7b796d33ced4b93447091b2f177ec6a8ba044760 + + + + + + + + + + 54cd3a48f7c3c0d893b06f7de03571968e8d474c + + Jürgen Umbrich + + Jürgen Umbrich + + + Jürgen Umbrich + + + + + + + + + + + + + + Colette Menard + + Colette Menard + + + edf8c59dab500fdfa3415ac72d373057771a7cef + + Colette Menard + + + + + + + + + + + + + + + + + + + + 2d5620c54c5a7a5add5d943131bd53bc99bc0097 + Jooik Jung + + + Jooik Jung + Jooik Jung + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Atul Nautiyal + + + + + + 38a67c25e0d2cf2a2159067aafda02685b4136ab + Atul Nautiyal + Atul Nautiyal + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Laura Slaughter + + + + + + Laura Slaughter + Laura Slaughter + + + + + + University of Southampton + University of Southampton + + + + + + University of Southampton + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Scalable + User-friendly + + Smart Office + + + + User-Friendly and Scalable Platform for the Design of Intelligent IoT Services: a Smart Office Use Case + + + + User-Friendly and Scalable Platform for the Design of Intelligent IoT Services: a Smart Office Use Case + Internet of Things (IoT) + + + + + + User-friendly + The Internet of Things (IoT) is starting to take a prevalent role in our daily lives. Smart offices that automatically adapt their environment to make life at the office as pleasant as possible, are slowly becoming reality. +In this paper we present a user-friendly semantic-based smart office platform that allows, through easy configuration, a personalized and comfortable experience at the office. + The Internet of Things (IoT) is starting to take a prevalent role in our daily lives. Smart offices that automatically adapt their environment to make life at the office as pleasant as possible, are slowly becoming reality. +In this paper we present a user-friendly semantic-based smart office platform that allows, through easy configuration, a personalized and comfortable experience at the office. + + Scalable + + + Smart Office + Internet of Things (IoT) + + + + Semantic Platform + User-Friendly and Scalable Platform for the Design of Intelligent IoT Services: a Smart Office Use Case + + Semantic Platform + + + + + + + + + + + + + + + + 2016-10-18T16:15:00 + 2016-10-18T16:30:00 + 2016-10-18T16:15:00 + 2016-10-18T16:30:00 + + 2016-10-18T16:30:00 + Valentina Ivanova + 2016-10-18T16:30:00 + + Applications of Large Displays: Advancing User Support in Large Scale Ontology Alignment + Applications of Large Displays: Advancing User Support in Large Scale Ontology Alignment + + + + Sapienza Università di Roma + + + Sapienza Università di Roma + + + + Sapienza Università di Roma + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Carlos Pedrinaci + + + Carlos Pedrinaci + + + Carlos Pedrinaci + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Japan Science and Technology Agency + + + + + Japan Science and Technology Agency + + + + Japan Science and Technology Agency + + + + + + + + + + + + + + + + distributional semantic representations + word sense disambiguation + + + + + + Linked Disambiguated Distributional Semantic Networks + + lexical-semantic networks + + + distributional semantic representations + + We present a new hybrid knowledge base that combines the contextual information of distributional models with the conciseness and precision of manually constructed lexical networks. In contrast to dense vector representations, our resource is human readable and interpretable, and can be easily embedded within the Semantic Web ecosystem. Manual evaluation based on human judgments and an extrinsic evaluation on the task of Word Sense Disambiguation both indicate the high quality of the resource, as well as the benefits of enriching top-down lexical knowledge resources with bottom-up distributional information from text. + + + + lexical-semantic networks + + distributional model + + distributional model + We present a new hybrid knowledge base that combines the contextual information of distributional models with the conciseness and precision of manually constructed lexical networks. In contrast to dense vector representations, our resource is human readable and interpretable, and can be easily embedded within the Semantic Web ecosystem. Manual evaluation based on human judgments and an extrinsic evaluation on the task of Word Sense Disambiguation both indicate the high quality of the resource, as well as the benefits of enriching top-down lexical knowledge resources with bottom-up distributional information from text. + + Linked Disambiguated Distributional Semantic Networks + + word sense disambiguation + + + Linked Disambiguated Distributional Semantic Networks + + + + + + + + + + + + + 011c9ba2e167393ebc4af29761b38eee5c02823f + Luca Costabello + + Luca Costabello + + Luca Costabello + + + + + Aldo Gangemi + Aldo + Gangemi + Aldo Gangemi + + Aldo Gangemi + + + + + + 8d7f004803b48a3b7c5e9f73dc16953069a6632d + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Nozomu Ohshiro + Nozomu Ohshiro + + + + + + + + + Nozomu Ohshiro + + b25acd7694393c08b907299266201b69e16ed06c + + + + + Oscar Corcho + + Oscar Corcho + Oscar Corcho + + + + + efbd90eca236ae3131e67f30e3abe0a1bceff305 + + + + + + + + + + Recently, a growing number of linguistic resources in different languages have been published and interlinked as part of the Linguistic Linked Open Data (LLOD) cloud. However, in comparison to English and other prominent languages, the presence of Chinese in such a cloud is still limited, despite the fact that Chinese is the most spoken language worldwide. Publishing more Chinese language resources in the LLOD cloud can benefit both academia and industry to better understand the language itself and to further build multilingual applications that will improve the flow of data and services across countries. In this paper, we describe Zhishi.lemon, a newly developed dataset based on the lemon model that constitutes the lexical realization of Zhishi.me, one of the largest Chinese datasets in the Linked Open Data (LOD) cloud. Zhishi.lemon combines the lemon core with the lemon translation module in order to build a linked data lexicon in Chinese with translations into Spanish and English. Links to BabelNet (a vast multilingual encyclopedic resource) have been provided as well. We also present a showcase of this module along with the technical details of transforming Zhishi.me to Zhishi.lemon. We have made the dataset accessible on the Web for both humans (via a Web interface) and software agents (with a SPARQL endpoint). + + + multilingualism + linked data + + Zhishi.lemon:On Publishing Zhishi.me as Linguistic Linked Open Data + + + + + + + + linked data + + translation + Recently, a growing number of linguistic resources in different languages have been published and interlinked as part of the Linguistic Linked Open Data (LLOD) cloud. However, in comparison to English and other prominent languages, the presence of Chinese in such a cloud is still limited, despite the fact that Chinese is the most spoken language worldwide. Publishing more Chinese language resources in the LLOD cloud can benefit both academia and industry to better understand the language itself and to further build multilingual applications that will improve the flow of data and services across countries. In this paper, we describe Zhishi.lemon, a newly developed dataset based on the lemon model that constitutes the lexical realization of Zhishi.me, one of the largest Chinese datasets in the Linked Open Data (LOD) cloud. Zhishi.lemon combines the lemon core with the lemon translation module in order to build a linked data lexicon in Chinese with translations into Spanish and English. Links to BabelNet (a vast multilingual encyclopedic resource) have been provided as well. We also present a showcase of this module along with the technical details of transforming Zhishi.me to Zhishi.lemon. We have made the dataset accessible on the Web for both humans (via a Web interface) and software agents (with a SPARQL endpoint). + multilingualism + + translation + Zhishi.lemon:On Publishing Zhishi.me as Linguistic Linked Open Data + + + + + + + Zhishi.lemon:On Publishing Zhishi.me as Linguistic Linked Open Data + + + + + + 2016-10-19T15:20:00 + 2016-10-19T15:50:00 + 2016-10-19T15:50:00 + 2016-10-19T15:50:00 + Coffee Break + 2016-10-19T15:50:00 + 2016-10-19T15:20:00 + Coffee Break + + + f71357f477f9eac84aafffd63b00f94ef3262ed8 + + + + + + + + + + Mariano Mora-Mcginity + Mariano Mora-Mcginity + Mariano Mora-Mcginity + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Climate Change + Semantic Collaborative Platforms + EnergyUse - A Collaborative Semantic Platform for Monitoring and Discussing Energy Consumption + EnergyUse - A Collaborative Semantic Platform for Monitoring and Discussing Energy Consumption + Energy Monitors + + Conserving fossil-based energy to reduce carbon emissions is key to slowing down global warming. The 2015 Paris agreement on climate change emphasised the importance of raising public awareness and participation to address this societal challenge. In this paper we introduce EnergyUse; a social and collaborative platform for raising awareness on climate change, by enabling users to view and compare the actual energy consumption of various appliances, and to share and discuss energy conservation tips in an open and social environment. The platform collects data from smart plugs, and exports appliance consumption information and community generated energy tips as linked data. In this paper we report on the system design, data modelling, platform usage and early deployment with a set of 58 initial participants. We also discuss the challenges, lessons learnt, and future platform developments. + + + Energy Consumption + + EnergyUse - A Collaborative Semantic Platform for Monitoring and Discussing Energy Consumption + Conserving fossil-based energy to reduce carbon emissions is key to slowing down global warming. The 2015 Paris agreement on climate change emphasised the importance of raising public awareness and participation to address this societal challenge. In this paper we introduce EnergyUse; a social and collaborative platform for raising awareness on climate change, by enabling users to view and compare the actual energy consumption of various appliances, and to share and discuss energy conservation tips in an open and social environment. The platform collects data from smart plugs, and exports appliance consumption information and community generated energy tips as linked data. In this paper we report on the system design, data modelling, platform usage and early deployment with a set of 58 initial participants. We also discuss the challenges, lessons learnt, and future platform developments. + + + + + + + + Semantic Collaborative Platforms + Climate Change + + Energy Monitors + + + Energy Consumption + + + + + + + + + Keio University + Keio University + + + + + + Keio University + + + + + + + + Kai Lenz + + + + + + Kai Lenz + + Kai Lenz + 1a31d50ae36c97bb6d08ae4adb8c388439eef137 + + + The root of schema violations for RDF data generated from (semi-)structured data, often derives from mappings, which are repeatedly applied and specify how an RDF dataset is generated. The DBpedia dataset, which derives from Wikipedia infoboxes, is no exception. To mitigate the violations, we proposed in previous work to validate the mappings which generate the data, instead of validating the generated data afterwards. In this work, we demonstrate how mappings validation is applied to DBpedia. DBpedia mappings are automatically translated to RML and validated by RDFUnit. The DBpedia mappings assessment can be frequently executed, because it requires significantly less time compared to validating the dataset. The validation results become available via a user-friendly interface. The DBpedia community considers them to refine the DBpedia mappings or ontology and thus, increase the dataset quality. + + DBpedia Mappings Quality Assessment + Data Quality + RML + + + DBpedia + + + + RDFUnit + Linked Data Mapping + + + DBpedia Mappings Quality Assessment + + + DBpedia + Data Quality + RDFUnit + + + + + DBpedia Mappings Quality Assessment + + + + + + + The root of schema violations for RDF data generated from (semi-)structured data, often derives from mappings, which are repeatedly applied and specify how an RDF dataset is generated. The DBpedia dataset, which derives from Wikipedia infoboxes, is no exception. To mitigate the violations, we proposed in previous work to validate the mappings which generate the data, instead of validating the generated data afterwards. In this work, we demonstrate how mappings validation is applied to DBpedia. DBpedia mappings are automatically translated to RML and validated by RDFUnit. The DBpedia mappings assessment can be frequently executed, because it requires significantly less time compared to validating the dataset. The validation results become available via a user-friendly interface. The DBpedia community considers them to refine the DBpedia mappings or ontology and thus, increase the dataset quality. + + + + RML + + + + Linked Data Mapping + + + + + + + + + + ISMB + + + + + + ISMB + + + + + ISMB + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + Silvio Peroni, David Shotton and Fabio Vitali + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Jailbreaking your reference lists: the OpenCitations strike again + 2016-10-19T21:00:00 + + + 2016-10-19T21:00:00 + Jailbreaking your reference lists: the OpenCitations strike again + + + + 2016-10-19T10:30:00 + 2016-10-19T11:00:00 + 2016-10-19T11:00:00 + 2016-10-19T11:00:00 + 2016-10-19T11:00:00 + Coffee Break + 2016-10-19T10:30:00 + Coffee Break + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Linked Data generation and publication remain challenging and complicated, in particular for data owners who are not Semantic Web experts or tech-savvy. The situation deteriorates when data from multiple heterogeneous sources, accessed via different interfaces, is integrated, and the Linked Data generation is a long-lasting activity repeated periodically, often adjusted and incrementally enriched with new data. Therefore, we propose the RML Workbench, a graphical user interface to support data owners administrating their Linked Data generation and publication workflow. The RML Workbench’s underlying language is RML, since it allows to declaratively describe the complete Linked Data generation workflow. Thus, any Linked Data generation workflow specified by a user can be exported and reused by other tools interpreting RML. + Linked Data generation and publication remain challenging and complicated, in particular for data owners who are not Semantic Web experts or tech-savvy. The situation deteriorates when data from multiple heterogeneous sources, accessed via different interfaces, is integrated, and the Linked Data generation is a long-lasting activity repeated periodically, often adjusted and incrementally enriched with new data. Therefore, we propose the RML Workbench, a graphical user interface to support data owners administrating their Linked Data generation and publication workflow. The RML Workbench’s underlying language is RML, since it allows to declaratively describe the complete Linked Data generation workflow. Thus, any Linked Data generation workflow specified by a user can be exported and reused by other tools interpreting RML. + + + + + Linked Data Generation + + + + + Towards an Interface for User-Friendly Linked Data Generation Administration + + + + + + Towards an Interface for User-Friendly Linked Data Generation Administration + + RML + R2RML + + + + + Linked Data Workbench + Linked Data Workbench + + Towards an Interface for User-Friendly Linked Data Generation Administration + + Linked Data Generation + + RML + + + + R2RML + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Poster and Demo session + 2016-10-19T18:00:00 + Poster and Demo session + + + + + + + + + + + + + + Daria Stepanova + + Daria Stepanova + + e473527846adf331759e7bb54fcfaa1bce63ceff + + Daria Stepanova + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + A Unified Interface for Optimizing Continuous Query in Heterogeneous RDF Stream Processing Systems + + 2016-10-19T21:00:00 + A Unified Interface for Optimizing Continuous Query in Heterogeneous RDF Stream Processing Systems + 2016-10-19T21:00:00 + + Seungjun Yoon, Sejin Chun, Xiongnan Jin and Kyong-Ho Lee + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + 84b76afee6e26404a8a2835641304b1f456acbdd + Martin Ringsquandl + + + Martin Ringsquandl + + + + + + Martin Ringsquandl + + + + + + + + Institut de recherche en Électricité d'Hydro-Québec + + + + Institut de recherche en Électricité d'Hydro-Québec + + + + + Institut de recherche en Électricité d'Hydro-Québec + + + + + + + + + + + + + + + + + + + + + + + + + + + + SQL + + SPARQL + + SPARQL-to-SQL on Internet of Things Databases and Streams + + Query Translation + Web of Things + SQL + + + SPARQL + Internet of Things + To realise a semantic Web of Things, the challenge of achieving efficient Resource Description Format (RDF) storage and SPARQL query performance on Internet of Things (IoT) devices with limited resources has to be addressed. State-of-the-art SPARQL-to-SQL engines have been shown to outperform RDF stores on some benchmarks. In this paper, we describe an optimisation to the SPARQL-to-SQL approach, based on a study of time-series IoT data structures, that employs metadata abstraction and efficient translation by reusing existing SPARQL engines to produce Linked Data `just-in-time'. We evaluate our approach against RDF stores, state-of-the-art SPARQL-to-SQL engines and streaming SPARQL engines, in the context of IoT data and scenarios. We show that storage efficiency, with succinct row storage, and query performance can be improved from 2 times to 3 orders of magnitude. + + To realise a semantic Web of Things, the challenge of achieving efficient Resource Description Format (RDF) storage and SPARQL query performance on Internet of Things (IoT) devices with limited resources has to be addressed. State-of-the-art SPARQL-to-SQL engines have been shown to outperform RDF stores on some benchmarks. In this paper, we describe an optimisation to the SPARQL-to-SQL approach, based on a study of time-series IoT data structures, that employs metadata abstraction and efficient translation by reusing existing SPARQL engines to produce Linked Data `just-in-time'. We evaluate our approach against RDF stores, state-of-the-art SPARQL-to-SQL engines and streaming SPARQL engines, in the context of IoT data and scenarios. We show that storage efficiency, with succinct row storage, and query performance can be improved from 2 times to 3 orders of magnitude. + SPARQL-to-SQL on Internet of Things Databases and Streams + + + Analytics + + Analytics + Web of Things + + Query Translation + Internet of Things + + SPARQL-to-SQL on Internet of Things Databases and Streams + + + + + + + + + + + + + + + + + + + + + + + + + + + + Claudio Gutierrez + + + + + + + + + + f341588c8c974d3b6fa4134e12700da871e8704f + + Claudio Gutierrez + Claudio Gutierrez + + + + LODStats: The Data Web Census Dataset + + + RDF + + Government Data + + + + Over the past years, the size of the Data Web has increased significantly, which makes obtaining general insights into its growth and structure both more challenging and more desirable. The lack of such insights hinders important data management tasks such as quality, privacy and coverage analysis. In this paper, we present LODStats, which provides a comprehensive picture of the current state of a significant part of the Data Web. LODStats integrates RDF datasets from data.gov, publicdata.eu and datahub.io data catalogs and at the time of writing lists over 9 000 RDF datasets. For each RDF dataset, LODStats collects comprehensive statistics and makes these available in adhering to the LDSO vocabulary. This analysis has been regularly published and enhanced over the past four years at the public platform lodstats.aksw.org. We give a comprehensive overview over the resulting dataset. + + Statistics + Linked Data + Linked Data + + + LOD Cloud + + Statistics + LODStats: The Data Web Census Dataset + + RDF + + + LODStats: The Data Web Census Dataset + + + + Government Data + Over the past years, the size of the Data Web has increased significantly, which makes obtaining general insights into its growth and structure both more challenging and more desirable. The lack of such insights hinders important data management tasks such as quality, privacy and coverage analysis. In this paper, we present LODStats, which provides a comprehensive picture of the current state of a significant part of the Data Web. LODStats integrates RDF datasets from data.gov, publicdata.eu and datahub.io data catalogs and at the time of writing lists over 9 000 RDF datasets. For each RDF dataset, LODStats collects comprehensive statistics and makes these available in adhering to the LDSO vocabulary. This analysis has been regularly published and enhanced over the past four years at the public platform lodstats.aksw.org. We give a comprehensive overview over the resulting dataset. + + + LOD Cloud + + + + + + + + + + + + + + + + + + + + + + + + Barry Clarke + + + + + Barry Clarke + + + 2e8dc1d1d979e478fb826f2738e304f2a8256dab + Barry Clarke + + + + + + + + + + Hiroaki Morikawa + + Hiroaki Morikawa + e8763bb27f9128dd304337155d48c07a4b127402 + Hiroaki Morikawa + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ca8dd724a7c73e32093d4950de5a8cf337ad9afe + + + + + + Nicolas Seydoux + + Nicolas Seydoux + + + Nicolas Seydoux + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-18T16:15:00 + Ontology Refinement and Evaluation System based on Similarity of Is-a Hierarchies + + + 2016-10-18T16:15:00 + Ontology Refinement and Evaluation System based on Similarity of Is-a Hierarchies + 2016-10-18T16:00:00 + 2016-10-18T16:15:00 + 2016-10-18T16:00:00 + + Takeshi Masuda + 2016-10-18T16:15:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Duisburg-Essen + + + + University of Duisburg-Essen + + University of Duisburg-Essen + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + David Carral + + + + + David Carral + David Carral + + d2efb52c6d8a90c7503b795bd12a0292d8f2873c + + + + + + + + + + + + + + + + + + + + + + + + + Université du Québec à Montréal + + Université du Québec à Montréal + + Université du Québec à Montréal + + + Tsinghua University + + + + + Tsinghua University + + + + + + Tsinghua University + + + + + + + + + + + Sadahiro Kumagai + + + + Sadahiro Kumagai + + + + + + 6905b3f66c47f4e446ff715499aed5c90bc3fb0c + + + + Sadahiro Kumagai + + + + + + + + + + Matthew Rowe + + Matthew Rowe + + + + e6a0bcfb9bbf3e9449e805ad14142a114bfcfd68 + Matthew Rowe + + + + East China University of Science and Technology + + East China University of Science and Technology + + + + East China University of Science and Technology + + + + + + + + + + + + + + + + + + + + + + + + Haofen Wang + + + + + + + 1f3ad60b6da3589cf3725b33643ea80355f2cb7f + + + Haofen Wang + + + Haofen Wang + + + + + + + + + + + + + + SynerScope B.V. + + + + + + SynerScope B.V. + SynerScope B.V. + + + + + + + + + + + + + + + + + + Vanessa Lopez + + + Vanessa Lopez + + + + + + 5c3ac25297fd6033d663d292004b1e8d977ceb5f + + Vanessa Lopez + + + + + + 931aee58ad9a19b08e6a5e282830add804f5a62a + + Tong Ruan + Tong Ruan + + + + + + + Tong Ruan + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Akira Maeda + 885d1aecc2daae8997938994ed7999fa6c11c801 + + + + + Akira Maeda + Akira Maeda + + + Christoph Pinkel + Christoph Pinkel + + 77399f7516b596a8967dd6e41d31de0e66f07846 + + + + + + + + + + Christoph Pinkel + + + + + Improving Open Data Usability through Semantics + 2016-10-18T16:45:00 + 2016-10-18T16:45:00 + + Improving Open Data Usability through Semantics + 2016-10-18T16:45:00 + 2016-10-18T16:30:00 + + 2016-10-18T16:30:00 + Sebastian Neumaier + 2016-10-18T16:45:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Josiane Xavier Parreira + + + + Josiane Xavier Parreira + b69112e39080305b0153178cb8e678adcad0f8d9 + + + + + + + + Josiane Xavier Parreira + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Efficient Algorithms for Association Finding and Frequent Association Pattern Mining + + Efficient Algorithms for Association Finding and Frequent Association Pattern Mining + canonical code + + graph search + + + frequent association pattern mining + + + + + canonical code + distance oracle + + association finding + + frequent association pattern mining + Efficient Algorithms for Association Finding and Frequent Association Pattern Mining + + Finding associations between entities is a common information need in many areas. It has been facilitated by the increasing amount of graph-structured data on the Web describing relations between entities. In this paper, we define an association connecting multiple entities in a graph as a minimal connected subgraph containing all of them. We propose an efficient graph search algorithm for finding associations, which prunes the search space by exploiting distances between entities computed based on a distance oracle. Having found a possibly large group of associations, we propose to mine frequent association patterns as a conceptual abstract summarizing notable subgroups to be explored, and present an efficient mining algorithm based on canonical codes and partitions. Extensive experiments on large, real RDF datasets demonstrate the efficiency of the proposed algorithms. + distance oracle + Finding associations between entities is a common information need in many areas. It has been facilitated by the increasing amount of graph-structured data on the Web describing relations between entities. In this paper, we define an association connecting multiple entities in a graph as a minimal connected subgraph containing all of them. We propose an efficient graph search algorithm for finding associations, which prunes the search space by exploiting distances between entities computed based on a distance oracle. Having found a possibly large group of associations, we propose to mine frequent association patterns as a conceptual abstract summarizing notable subgroups to be explored, and present an efficient mining algorithm based on canonical codes and partitions. Extensive experiments on large, real RDF datasets demonstrate the efficiency of the proposed algorithms. + + association finding + + + graph search + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T09:30:00 + 2016-10-19T10:30:00 + 2016-10-19T10:30:00 + 2016-10-19T10:30:00 + 2016-10-19T09:30:00 + Keynote: Kathleen McKeown + Keynote: Kathleen McKeown + 2016-10-19T10:30:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ATOS + + ATOS + ATOS + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Manel Achichi + Manel Achichi + + + + + + + + + Manel Achichi + + + + 1cc5d51633bd12fad9caacc78bcf32ae8a2fb134 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Pankesh Patel + + + Pankesh Patel + + c1c532c21238edc831e152818f39ea5e497e746d + + Pankesh Patel + + + + + + + + + + + 2016-10-21T15:30:00 + 2016-10-21T16:50:00 + 2016-10-21T16:50:00 + 2016-10-21T16:50:00 + 2016-10-21T16:50:00 + 2016-10-21T15:30:00 + Special Event Session + Special Event Session + + + + + + + + + + + + + + + + + + + + + + + + Université catholique de Louvain + Université catholique de Louvain + + + + + Université catholique de Louvain + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Technische Universität Dresden + + + + + + + Technische Universität Dresden + Technische Universität Dresden + + + + + + + + + + + + Laura M. Daniele + + Laura M. Daniele + af5845f37a1867a8bc788e4f539dddcd5dd2b33e + + + + + + Laura M. Daniele + + + + Leibniz Universität Hannover + Leibniz Universität Hannover + + + + Leibniz Universität Hannover + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Fabian Flöck + + + + + Fabian Flöck + + Fabian Flöck + + + + + + + + b805260db1b72fb6954433eec4ef2c406de2dac2 + + + + + Taisuke Kimura + + + Taisuke Kimura + + + Taisuke Kimura + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Politecnico di Bari + + Politecnico di Bari + + Politecnico di bari + + Politecnico di bari + + Politecnico di Bari + + + + Politecnico di bari + + + + + + + + + + + + + + + + + + + + + + + + + + This paper presents LIXR, a system for converting between RDF and XML. LIXR is based on domain-specific language embedded into the Scala programming language. It supports the definition of transformations of datasets from RDF to XML in a declarative fashion, while still maintaining the flexibility of a full programming language environment. We directly compare this system to other systems programmed in Java and XSLT and show that the LIXR implementations are significantly shorter in terms of lines of code, in addition to being bidirectional and conceptually simple to understand. + + RDF + RDF + LIXR: Quick, succinct conversion of XML to RDF + + LIXR: Quick, succinct conversion of XML to RDF + + Scala + XML + + + format conversion + This paper presents LIXR, a system for converting between RDF and XML. LIXR is based on domain-specific language embedded into the Scala programming language. It supports the definition of transformations of datasets from RDF to XML in a declarative fashion, while still maintaining the flexibility of a full programming language environment. We directly compare this system to other systems programmed in Java and XSLT and show that the LIXR implementations are significantly shorter in terms of lines of code, in addition to being bidirectional and conceptually simple to understand. + + Scala + LIXR: Quick, succinct conversion of XML to RDF + format conversion + + + + XML + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 556254b522c8fa9cf8561376c21139d7c5894287 + Neil Wilson + Neil Wilson + + + + + Neil Wilson + + + + + + + + + + + + The recently proposed Triple Pattern Fragment (TPF) interface aims at increasing the availability of Web-queryable RDF datasets by trading off an increased client-side query processing effort for a significant reduction of server load. However, an additional aspect of this trade-off is a very high network load. To mitigate this drawback we propose to extend the interface by allowing clients to augment TPF requests with a VALUES clause as introduced in SPARQL 1.1. In an ongoing research project we study the trade-offs of such an extended TPF interface and compare it to the pure TPF interface. With a poster in the conference we aim to present initial results of this research. In particular, we would like to present a series of experiments showing that a distributed, bind-join-based query execution using this extended interface can reduce the network load drastically (in terms of both the number of HTTP requests and data transfer). + Linked Data Fragments + Reducing the Network Load of Triple Pattern Fragments by Supporting Bind Joins + Query Endpoints + + + The recently proposed Triple Pattern Fragment (TPF) interface aims at increasing the availability of Web-queryable RDF datasets by trading off an increased client-side query processing effort for a significant reduction of server load. However, an additional aspect of this trade-off is a very high network load. To mitigate this drawback we propose to extend the interface by allowing clients to augment TPF requests with a VALUES clause as introduced in SPARQL 1.1. In an ongoing research project we study the trade-offs of such an extended TPF interface and compare it to the pure TPF interface. With a poster in the conference we aim to present initial results of this research. In particular, we would like to present a series of experiments showing that a distributed, bind-join-based query execution using this extended interface can reduce the network load drastically (in terms of both the number of HTTP requests and data transfer). + + + Query Endpoints + + Reducing the Network Load of Triple Pattern Fragments by Supporting Bind Joins + + + Query Processing + Triple Pattern Fragments + + + Triple Pattern Fragments + Linked Data Fragments + + + + + Query Processing + + Reducing the Network Load of Triple Pattern Fragments by Supporting Bind Joins + + + + + + + + + + + + + + Algebraic calculi for weighted ontology alignments + + + relaxed taxonomic relations + + + + Alignments between ontologies usually come with numerical attributes expressing the confidence of each correspondence. Semantics supporting such confidences must generalise the semantics of alignments without confidence. There exists a semantics which satisfies this but introduces a discontinuity between weighted and non-weighted interpretations. Moreover, it does not provide a calculus for reasoning with weighted ontology alignments. This paper introduces a calculus for such alignments. It is given by an infinite relation-type algebra, the elements of which are weighted taxonomic relations. In addition, it approximates the non-weighted case in a continuous manner. + + + Algebraic calculi for weighted ontology alignments + + + + relation algebra + algebraic calculus + algebraic calculus + relaxed taxonomic relations + + + weighted ontology alignment + + weighted ontology alignment + + Algebraic calculi for weighted ontology alignments + relation algebra + + + Alignments between ontologies usually come with numerical attributes expressing the confidence of each correspondence. Semantics supporting such confidences must generalise the semantics of alignments without confidence. There exists a semantics which satisfies this but introduces a discontinuity between weighted and non-weighted interpretations. Moreover, it does not provide a calculus for reasoning with weighted ontology alignments. This paper introduces a calculus for such alignments. It is given by an infinite relation-type algebra, the elements of which are weighted taxonomic relations. In addition, it approximates the non-weighted case in a continuous manner. + + + + + + + + + + + + + + + + + + + 2016-10-19T14:00:00 + 2016-10-19T15:20:00 + 2016-10-19T15:20:00 + 2016-10-19T15:20:00 + Smart Planet + 2016-10-19T14:00:00 + 2016-10-19T15:20:00 + Smart Planet + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-20T10:30:00 + 2016-10-20T11:50:00 + 2016-10-20T11:50:00 + 2016-10-20T11:50:00 + Linked Data + 2016-10-20T11:50:00 + Linked Data + 2016-10-20T10:30:00 + + + Paolo Ciancarini + + + + + Paolo Ciancarini + + + + + Paolo Ciancarini + + + 769c68be495857d926488ef699ee50b2e7c762b0 + + + + + + + + + + + + + + + Information retrieval using Semantic Web Technology + Information retrieval using Semantic Web Technology + + + Artificial intelligence technique for the Semantic Web + Semantics in the knowledge bases (e.g. Linked Open Data) + Semantics spread in large-scale knowledge bases can be used to intermediate heterogeneous users’ activity logs distributed in services; it can improve applications that assist users to decide next activities across services. Since user activities can be represented in terms of re- lationships involving three or more things (e.g. a user tags movie items on a webpage), they can be represented as a tensor. The recent semantic sensitive tensor factorization (SSTF) is promising since it achieves high accuracies in predicting users’ activities by applying semantics behind objects (e.g. item categories) to tensor factorization. However, SSTF fo- cuses on the factorization of data logs from a single service and thus has two problems: (1) the balance problem caused when simultaneously han- dling heterogeneous datasets and (2) the sparcity problem caused when there are insufficient data logs within a single service. Our solution, Se- mantic Sensitive Simultaneous Tensor Factorization (S3TF), tackles the above problems by: (1) It creates tensors for individual services and fac- torizes those tensors simultaneously; it does not force to create a tensor from multiple services and factorize the single tensor. This avoids low prediction results caused by the balance problem. (2) It utilizes shared semantics behind distributed logs and gives semantic biases to each ten- sor factorization. This avoids the sparsity problem by using the shared se- mantics among services. Experiments using the real-world datasets show that S3TF achieves up to 13% higher accuracy in rating predictions than the current best tensor method. It also extracts implicit relationships across services in the feature spaces by simultaneouse factorization. + Prediction methods of users' activities + Recommender systems using semantics in the knowledge bases + Prediction methods of users' activities + + Semantic Sensitive Simultaneous Tensor Factorization + + Semantic Sensitive Simultaneous Tensor Factorization + Tensor Factorization using semantic knowledge + + Semantics spread in large-scale knowledge bases can be used to intermediate heterogeneous users’ activity logs distributed in services; it can improve applications that assist users to decide next activities across services. Since user activities can be represented in terms of re- lationships involving three or more things (e.g. a user tags movie items on a webpage), they can be represented as a tensor. The recent semantic sensitive tensor factorization (SSTF) is promising since it achieves high accuracies in predicting users’ activities by applying semantics behind objects (e.g. item categories) to tensor factorization. However, SSTF fo- cuses on the factorization of data logs from a single service and thus has two problems: (1) the balance problem caused when simultaneously han- dling heterogeneous datasets and (2) the sparcity problem caused when there are insufficient data logs within a single service. Our solution, Se- mantic Sensitive Simultaneous Tensor Factorization (S3TF), tackles the above problems by: (1) It creates tensors for individual services and fac- torizes those tensors simultaneously; it does not force to create a tensor from multiple services and factorize the single tensor. This avoids low prediction results caused by the balance problem. (2) It utilizes shared semantics behind distributed logs and gives semantic biases to each ten- sor factorization. This avoids the sparsity problem by using the shared se- mantics among services. Experiments using the real-world datasets show that S3TF achieves up to 13% higher accuracy in rating predictions than the current best tensor method. It also extracts implicit relationships across services in the feature spaces by simultaneouse factorization. + Recommender systems using semantics in the knowledge bases + Semantics in the knowledge bases (e.g. Linked Open Data) + + + + Tensor Factorization using semantic knowledge + Semantic Sensitive Simultaneous Tensor Factorization + Artificial intelligence technique for the Semantic Web + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Wouter Beek + fc25e939e3bd6e57bbfa027d6546f3116e632492 + + + Wouter Beek + + + + + + Wouter Beek + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc1d47b43c3d22ef49202d417a7cf655057bcfd7 + + + Evgeny Kharlamov + + + + Evgeny Kharlamov + + + + + + + Evgeny Kharlamov + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-18T11:00:00 + 2016-10-18T11:00:00 + + + 2016-10-18T11:15:00 + Entity-Relationship Extraction from Wikipedia Unstructured Text + Entity-Relationship Extraction from Wikipedia Unstructured Text + 2016-10-18T11:15:00 + 2016-10-18T11:15:00 + + Radityo Eko Prasojo + 2016-10-18T11:15:00 + + + + Ontology Engineering Group, Universidad Politécnica de Madrid + + + + + Ontology Engineering Group, Universidad Politécnica de Madrid + Ontology Engineering Group, Universidad Politécnica de Madrid + + + + + + + + + INSA of Toulouse, LAAS-CNRS + + INSA of Toulouse, LAAS-CNRS + + + + + INSA of Toulouse, LAAS-CNRS + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Laurens De Graeve + + + + Laurens De Graeve + + + Laurens De Graeve + + e2c5d68771e49a539531943d0354069684316f0f + + + + 2016-10-21T11:50:00 + + + 2016-10-21T11:50:00 + 2016-10-21T11:50:00 + + Exception-enriched Rule Learning from Knowledge Graphs + Mohamed H. Gad-Elrab, Daria Stepanova, Jacopo Urbani and Gerhard Weikum + 2016-10-21T11:50:00 + 2016-10-21T11:30:00 + 2016-10-21T11:30:00 + Exception-enriched Rule Learning from Knowledge Graphs + + + + + + + + + + d7dd4d1804ac29dc9ee2788b83bf23362939763e + + + + + + + Shinichiro Tago + + + + Shinichiro Tago + Shinichiro Tago + + + + + + + + + + + + + + + + + + + + + + + + UEC, The University of Electro-Communications + + + UEC, The University of Electro-Communications + + + UEC, The University of Electro-Communications + + + + + + + + + + Eric Prud'Hommeaux + Eric Prud'Hommeaux + Eric Prud'Hommeaux + + + + + + + + e2d67791b2a0ce3441c0c770f94daa130b4e6d95 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + University of Ulm + + + + + + + + + University of Ulm + + University of Ulm + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Christophe Gravier + + Christophe Gravier + + + + Christophe Gravier + + + + + + + + af583d240b4536b3ead4ec62a1b46a8e5456e659 + + + + LT2C Claude Chappe + + + + LT2C Claude Chappe + LT2C Claude Chappe + + + + + + + + + + + + Tianjin University + + + + + + Tianjin University + + + Tianjin University + + + + + + + Robert Piro + + + + + + Robert Piro + a7cda19d058934ea762bdfb6b875610ec4dbaed9 + + + + Robert Piro + + + + + + + + + + + + + + + SWoTSuite: A Toolkit for Prototyping Cross-domain Semantic Web of Things Applications + + 2016-10-19T21:00:00 + SWoTSuite: A Toolkit for Prototyping Cross-domain Semantic Web of Things Applications + + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + Pankesh Patel, Amelie Gyrard, Dhavalkumar Thakker, Amit Sheth and Martin Serrano + + + + + + + + + + 2016-10-20T14:10:00 + Operator-aware approach for boosting performance in RDF stream processing + Operator-aware approach for boosting performance in RDF stream processing + 2016-10-20T14:30:00 + 2016-10-20T14:30:00 + 2016-10-20T14:30:00 + + + 2016-10-20T14:30:00 + 2016-10-20T14:10:00 + Danh Le-Phuoc + + + + Universität zu Lübeck, Institut für Informations Systeme + + + Universität zu Lübeck, Institut für Informations Systeme + + + + + Universität zu Lübeck, Institut für Informations Systeme + + + + + + + + + + + + + + + + + + + + + + + LIRMM + + LIRMM + LIRMM + + + + + + + + + + + + SPARQLGX in Action: Efficient Distributed Evaluation of SPARQL with Apache Spark + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Damien Graux, Louis Jachiet, Pierre Geneves and Nabil Layaida + SPARQLGX in Action: Efficient Distributed Evaluation of SPARQL with Apache Spark + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + + + + + 2016-10-20T16:10:00 + 2016-10-20T15:50:00 + 2016-10-20T16:10:00 + + + 2016-10-20T15:50:00 + Towards Analytics Aware Ontology Based Access to Static and Streaming Data + Evgeny Kharlamov, Yannis Kotidis, Theofilos Mailis, Christian Neuenstadt, Charalampos Nikolaou, Özgür Lütfü Özcep, Christoforos Svingos, Dmitriy Zheleznyakov, Steffen Lamparter, Ian Horrocks, Yannis Ioannidis and Ralf Möller + Towards Analytics Aware Ontology Based Access to Static and Streaming Data + 2016-10-20T16:10:00 + 2016-10-20T16:10:00 + + + 2016-10-20T14:30:00 + 2016-10-20T14:50:00 + + Andrea Mauri, Jean-Paul Calbimonte, Daniele Dell'Aglio, Marco Balduini, Marco Brambilla, Emanuele Della Valle and Karl Aberer + + 2016-10-20T14:50:00 + 2016-10-20T14:50:00 + 2016-10-20T14:30:00 + + TripleWave: Spreading RDF Streams on the Web + 2016-10-20T14:50:00 + TripleWave: Spreading RDF Streams on the Web + + + + + + + + + + + Eirik Bakke + Eirik Bakke + + Eirik Bakke + + + + + + + + + + + + + + + + + Thanassis Tiropanis + + + + + + + + 9241d288757ff5c800606b716ae9c96f87c66493 + Thanassis Tiropanis + + + + + Thanassis Tiropanis + + + + + + + + + + + + CNRS + CNRS + CNRS + + + + + + + + + + + + + + + + + + + + + 183ab288609caaa35e87d388df4443c568eb103f + + + + + Khai Nguyen + Khai Nguyen + + + + + Khai Nguyen + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Kno.e.sis Center, Wright State University + Kno.e.sis Center, Wright State University + + Kno.e.sis Center, Wright State University + + + + + + + + + + + 2016-10-19T21:00:00 + + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Gize: A Time Warp in the Web of Data + 2016-10-19T18:00:00 + Valeria Fionda, Melisachew Wudage Chekol and Giuseppe Pirrò + Gize: A Time Warp in the Web of Data + + + + + + + + + + + + + + + + + + + + + David Purcell + David Purcell + efc7e1ff9cf1a36a560b85f1ec91e16a7139a731 + + + + + + David Purcell + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T12:00:00 + 2016-10-19T12:20:00 + Giuseppe Loseto, Saverio Ieva, Filippo Gramegna, Michele Ruta, Floriano Scioscia and Eugenio Di Sciascio + 2016-10-19T12:20:00 + 2016-10-19T12:20:00 + + 2016-10-19T12:00:00 + Linked Data (in resourceless) Platforms: a mapping for Constrained Application Protocol + Linked Data (in resourceless) Platforms: a mapping for Constrained Application Protocol + + + 2016-10-19T12:20:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Yasar Khan + + + + + + + + + Yasar Khan + + + Yasar Khan + + + On the Role of Semantics for Detecting pro-ISIS Stances on Social Media + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Hassan Saif, Miriam Fernandez, Matthew Rowe and Harith Alani + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + On the Role of Semantics for Detecting pro-ISIS Stances on Social Media + + + + Valeria Fionda + + f29ec74ebe2918cd0732a899e1949c503a144647 + + + + + + + + Valeria Fionda + + Valeria Fionda + + + + + Yuting Song + Yuting Song + + 3804e24b42c3292daac88d05a49accd8010b5f44 + + + + + Yuting Song + + + + + + + + 2016-10-19T21:00:00 + DBpedia Entity Type Inference Using Categories + 2016-10-19T18:00:00 + Lu Fang, Qingliang Miao and Yao Meng + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + + + DBpedia Entity Type Inference Using Categories + + + + + + Michel Buffa + + + + + Michel Buffa + + + + + + Michel Buffa + + + d50a9bdc107cdf7b3e32a131655b4d7f577a33c6 + + + + 8a8535067464904d74264b02f2e2dce905730d37 + + + + + + + Junzhao Zhang + Junzhao Zhang + + + + Junzhao Zhang + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Stuttgart Media University + + + + Stuttgart Media University + + + + + Stuttgart Media University + + + + 2016-10-19T21:00:00 + + Who-Does-What: A knowledge base of people's occupations and job activities + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + Who-Does-What: A knowledge base of people's occupations and job activities + 2016-10-19T18:00:00 + Jonas Bulegon Gassen, Stefano Faralli, Simone Paolo Ponzetto and Jan Mendling + + + + + + + + In this demo we present the SOMM system that resulted from an ongoing collaboration between Siemens and the University of Oxford. The goal of this collaboration is to facilitate design and management of ontologies that capture conceptual information models underpinning various industrial applications. SOMM supports engineers with little background on semantic technologies in the creation of such ontologies and in populating them with data. SOMM implements a fragment of OWL 2 RL extended with a form of integrity constraints for data validation, and it comes with support for schema and data reasoning, as well as for ontology integration. We demonstrate functionality of SOMM on two scenarios from energy and manufacturing domains. + SOMM: Industry Oriented Ontology Management Tool + + reasoning + In this demo we present the SOMM system that resulted from an ongoing collaboration between Siemens and the University of Oxford. The goal of this collaboration is to facilitate design and management of ontologies that capture conceptual information models underpinning various industrial applications. SOMM supports engineers with little background on semantic technologies in the creation of such ontologies and in populating them with data. SOMM implements a fragment of OWL 2 RL extended with a form of integrity constraints for data validation, and it comes with support for schema and data reasoning, as well as for ontology integration. We demonstrate functionality of SOMM on two scenarios from energy and manufacturing domains. + + + + + constraints + + + + + industry + SOMM: Industry Oriented Ontology Management Tool + + reasoning + + + + ontologies + + + + information models + + + + + + + information models + + + constraints + industry + ontologies + SOMM: Industry Oriented Ontology Management Tool + + + + + + + + + + + + + + + + + + Wei Hu + + + + + + Wei Hu + + + + + Wei Hu + f7074d05b74deb43ec150671bb9b226578d20f2b + + + + 2016-10-20T15:50:00 + 2016-10-20T15:50:00 + 2016-10-20T15:50:00 + + + Capturing Industrial Information Models with Ontologies and Constraints + + Capturing Industrial Information Models with Ontologies and Constraints + Evgeny Kharlamov, Bernardo Cuenca Grau, Ernesto Jimenez-Ruiz, Steffen Lamparter, Gulnar Mehdi, Martin Ringsquandl, Yavor Nenov, Sebastian Brandt and Ian Horrocks + 2016-10-20T15:30:00 + 2016-10-20T15:30:00 + 2016-10-20T15:50:00 + + + + + + + + + + + + + + + + 2016-10-21T13:30:00 + 2016-10-21T14:50:00 + 2016-10-21T14:50:00 + 2016-10-21T14:50:00 + 2016-10-21T14:50:00 + Ontologies (II) + 2016-10-21T13:30:00 + Ontologies (II) + + + 2016-10-20T11:30:00 + A Fine-Grained Evaluation of SPARQL Endpoint Federation Systems + Muhammad Saleem, Yasar Khan, Ali Hasnain, Ivan Ermilov and Axel-Cyrille Ngonga Ngomo + 2016-10-20T11:50:00 + 2016-10-20T11:50:00 + + 2016-10-20T11:30:00 + A Fine-Grained Evaluation of SPARQL Endpoint Federation Systems + 2016-10-20T11:50:00 + + + 2016-10-20T11:50:00 + + + + + Science & Technology Research Laboratories, Japan Broadcasting Corporation(NHK) + + + + + + + Science & Technology Research Laboratories, Japan Broadcasting Corporation(NHK) + Science & Technology Research Laboratories, Japan Broadcasting Corporation(NHK) + + + + + + + + + + + + + + + + + + + + + + + + + Amith P. Sheth + Amith P. Sheth + + + Amith P. Sheth + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + Changlong Wang, Xiaowang Zhang and Zhiyong Feng + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Structure-guiding Modular Reasoning for Expressive Ontologies + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + Structure-guiding Modular Reasoning for Expressive Ontologies + + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + Meghyn Bienvenu + Meghyn Bienvenu + + + + + + + + + + 7553a4556916b7c26b0955d745eec61f9aea5be5 + + + Meghyn Bienvenu + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Pontificia Universidad Catolica de Chile + + Pontificia Universidad Catolica de Chile + + + + Pontificia Universidad Catolica de Chile + Pontificia Universidad Católica de Chile + Pontificia Universidad Católica de Chile + + + + Pontificia Universidad Católica de Chile + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + David Martin and Peter Patel-Schneider + 2016-10-19T18:00:00 + + EXISTStential Aspects of SPARQL + + + EXISTStential Aspects of SPARQL + 2016-10-19T21:00:00 + + + Chetana Gavankar + e0a256306b228712f1e0f37eb0115e5f70faf13f + + Chetana Gavankar + + Chetana Gavankar + + + + + + + + + + + + + + TU Darmstadt, Germany + + + TU Darmstadt, Germany + + + + + TU Darmstadt, Germany + + + + + + + Andrea Splendiani + + + Andrea Splendiani + + + + + + Andrea Splendiani + d2b55d1cadb293d73c649c9aa834f3a15852fe91 + + + + Metadata. + + + Scholarly Data + Academic publishers, such as Springer Nature, annotate scholarly products with the appropriate research topics and keywords to facilitate the marketing process and to support (digital) libraries and academic search engines. This critical process is usually handled manually by experienced editors, leading to high costs and slow throughput. In this demo paper, we present Smart Topic Miner (STM), a semantic application designed to support the Springer Nature Computer Science editorial team in classifying scholarly publications. STM analyses conference proceedings and annotates them with a set of topics drawn from a large automatically generated ontology of research areas and a set of tags from Springer Nature Classification. + Smart Topic Miner: Supporting Springer Nature Editors with Semantic Web Technologies + + + Ontology Learning + + Scholarly Data + Conference Proceedings + Metadata. + + + + Data Mining + + Bibliographic Data + + Conference Proceedings + + Ontology Learning + + Smart Topic Miner: Supporting Springer Nature Editors with Semantic Web Technologies + Smart Topic Miner: Supporting Springer Nature Editors with Semantic Web Technologies + + Bibliographic Data + Data Mining + + + Scholarly Ontologies + Academic publishers, such as Springer Nature, annotate scholarly products with the appropriate research topics and keywords to facilitate the marketing process and to support (digital) libraries and academic search engines. This critical process is usually handled manually by experienced editors, leading to high costs and slow throughput. In this demo paper, we present Smart Topic Miner (STM), a semantic application designed to support the Springer Nature Computer Science editorial team in classifying scholarly publications. STM analyses conference proceedings and annotates them with a set of topics drawn from a large automatically generated ontology of research areas and a set of tags from Springer Nature Classification. + + Scholarly Ontologies + + + + + + + + + + + + + + + + Ian Harrow + + + Ian Harrow + + + + fb7c2d632e58ae40666baaabe677b8bb91edabcc + + + + Ian Harrow + + + + + + + + + + + Shuya Abe + 084acd609d8ef10c7b09810da9f23fb55d20ea61 + + + + Shuya Abe + + + Shuya Abe + + + + + + + + + + + + + + + + + + + + + + + + + + + + Kartik Asooja + Kartik Asooja + + + + + Kartik Asooja + + + + + + Carlos Buil Aranda + + + Carlos Buil Aranda + + + + + + 250a676d59d5e43d3d41c99fa31c016132e9ee0f + + + + Carlos Buil Aranda + + + + 2016-10-20T15:30:00 + 2016-10-20T16:50:00 + 2016-10-20T16:50:00 + 2016-10-20T16:50:00 + Search (II) + Search (II) + 2016-10-20T16:50:00 + 2016-10-20T15:30:00 + + + + a5d5374c8d420b63cec2aca4e83499227f8a56cd + + + + + + An Jacobs + + An Jacobs + + An Jacobs + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Implementing Customer Reception Service in Robot Cafe using Stream Reasoning and ROS based on PRINTEPS + + Takeshi Morita, Yu Sugawara, Ryota Nishimura and Takahira Yamaguchi + + 2016-10-19T18:00:00 + Implementing Customer Reception Service in Robot Cafe using Stream Reasoning and ROS based on PRINTEPS + 2016-10-19T18:00:00 + + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Femke Ongenae + + + + + + c979080c0d162570600ee44eed83bfe0fe9c69b6 + + + Femke Ongenae + Femke Ongenae + + + + + + + + + + Nabil Layaida + + Nabil Layaida + + e1ab6c0bc3461716961f51ca8ff5d1fe192258b0 + + + + Nabil Layaida + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + SQuaRE: A Visual Tool For Creating R2RML Mappings + + 2016-10-19T18:00:00 + + Michał Blinkiewicz and Jaroslaw Bak + SQuaRE: A Visual Tool For Creating R2RML Mappings + + + + + Jing Mei + Jing Mei + Jing Mei + d55031f4add439f198f6968ca1bc1b916806cbff + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + In the recent years, several approaches for machine learning on the Semantic Web have been proposed. However, no extensive comparisons between those approaches have been undertaken, in particular due to a lack of publicly available, acknowledged benchmark datasets. In this paper, we present a collection of 22 benchmark datasets at different sizes, derived from existing Semantic Web datasets as well as from external classification and regression problems linked to datasets in the Linked Open Data cloud. Such a collection of datasets can be used to conduct qualitative performance testing and systematic comparisons of approaches. + A Collection of Benchmark Datasets for Systematic Evaluations of Machine Learning on the Semantic Web + Linked Open Data + Benchmarking + In the recent years, several approaches for machine learning on the Semantic Web have been proposed. However, no extensive comparisons between those approaches have been undertaken, in particular due to a lack of publicly available, acknowledged benchmark datasets. In this paper, we present a collection of 22 benchmark datasets at different sizes, derived from existing Semantic Web datasets as well as from external classification and regression problems linked to datasets in the Linked Open Data cloud. Such a collection of datasets can be used to conduct qualitative performance testing and systematic comparisons of approaches. + Machine Learning + + A Collection of Benchmark Datasets for Systematic Evaluations of Machine Learning on the Semantic Web + Datasets + + Machine Learning + + Benchmarking + + + + + + A Collection of Benchmark Datasets for Systematic Evaluations of Machine Learning on the Semantic Web + Datasets + + + + + Linked Open Data + + + + Andrea Giovanni Nuzzolese + + Nuzzolese + + + + Andrea Giovanni + Andrea Giovanni Nuzzolese + + + + + 87707a356b60f036a079c5268236791fbab9f85e + + Andrea Giovanni Nuzzolese + + + + 37d41901db3bb687d255ca09084c4ada551096c2 + + + Pieter Heyvaert + + + Pieter Heyvaert + + + + + + Pieter Heyvaert + + + + + + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + + Ran Yu, Besnik Fetahu, Ujwal Gadiraju and Stefan Dietze + A Survey on Challenges in Web Markup Data for Entity Retrieval + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + A Survey on Challenges in Web Markup Data for Entity Retrieval + 2016-10-19T21:00:00 + + + + Predicting Energy Consumption of Ontology Reasoning over Mobile Devices + + 2016-10-19T14:20:00 + 2016-10-19T14:20:00 + 2016-10-19T14:00:00 + 2016-10-19T14:00:00 + + Isa Guclu, Yuan-Fang Li, Jeff Z. Pan and Martin J. Kollingbaum + 2016-10-19T14:20:00 + Predicting Energy Consumption of Ontology Reasoning over Mobile Devices + 2016-10-19T14:20:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + Luca Costabello, Pierre-Yves Vandenbussche, Gofran Shukair, Corine Deliot and Neil Wilson + Access Logs Don't Lie: Towards Traffic Analytics for Linked Data Publishers + + Access Logs Don't Lie: Towards Traffic Analytics for Linked Data Publishers + + + 7c02091cc31fe5d2006aba1f9167fbd5cd28fef3 + + Ruben Taelman + Ruben Taelman + + Ruben Taelman + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + Data Integration for the Media Value Chain + + Henning Agt-Rickauer, Jörg Waitelonis, Tabea Tietz and Harald Sack + + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + Data Integration for the Media Value Chain + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + + + Aitor Soroa + Aitor Soroa + + Aitor Soroa + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 92ea611cf55f95a0ffd94eca818bb9d8a3f9a735 + + + + + + Manolis Koubarakis + Manolis Koubarakis + + + Manolis Koubarakis + + + + + + + + + + + + + + + + + + 2016-10-21T13:30:00 + 2016-10-21T14:50:00 + 2016-10-21T14:50:00 + 2016-10-21T14:50:00 + Querying/SPARQL (II) + 2016-10-21T14:50:00 + Querying/SPARQL (II) + 2016-10-21T13:30:00 + + + + What if machines could be creative? + In this demo proposal, we present a system that proposes generations of existing concepts such as "cars that park automatically"or "skyscrapers made of glass". + + + + description logics + + concept + concept + creativity + creativity + What if machines could be creative? + invention + description logics + + + + + + + + + invention + In this demo proposal, we present a system that proposes generations of existing concepts such as "cars that park automatically"or "skyscrapers made of glass". + + + What if machines could be creative? + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + Interdisciplinary Classification of Audio Effects in the Audio Effect Ontology + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + 2016-10-19T21:00:00 + + Thomas Wilmering and Mark B. Sandler + 2016-10-19T21:00:00 + + Interdisciplinary Classification of Audio Effects in the Audio Effect Ontology + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + XB: A Large-scale Korean Knowledge Base for Question Answering Systems + + XB: A Large-scale Korean Knowledge Base for Question Answering Systems + + 2016-10-19T21:00:00 + + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + Jongmin Lee, Youngkyoung Ham and Tony Lee + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + + + + f7c78787302ad536b4811448194b9f8b47bfcf86 + + Manuel Atencia + + Manuel Atencia + + + + + + Manuel Atencia + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Ryutaro Ichise + Ryutaro Ichise + 0a09da4b327971fff4a6573caac2b4872cab5cc9 + + + + + + + + + Ryutaro Ichise + + + + + + + + + Ahmet Soylu + + + + + + Ahmet Soylu + bf7b4e41f5b18d0964cb5901e4a74a08d6f2b10b + + Ahmet Soylu + + + + Christian Bizer + Christian Bizer + + Christian Bizer + + + + + + + + + d293ced5ef76989393dc5a8380fb9b2c89c1f083 + + + + + + + + + + + + + + Personalized robot interactions to intercept behavioral disturbances of people with dementia + 2016-10-19T18:00:00 + Femke Ongenae, Femke De Backere, Jelle Nelis, Stijn De Pestel, Christof Mahieu, Shirley Elprama, Charlotte Jewell, An Jacobs, Pieter Simoens and Filip De Turck + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + + + 2016-10-19T21:00:00 + Personalized robot interactions to intercept behavioral disturbances of people with dementia + + 2016-10-19T21:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Vitaveska Lanfranchi + + + + + + Vitaveska Lanfranchi + + + + + + + Vitaveska Lanfranchi + + + + + + + + + + + + + + + + 2016-10-20T11:50:00 + 2016-10-20T13:30:00 + 2016-10-20T13:30:00 + 2016-10-20T13:30:00 + 2016-10-20T13:30:00 + Lunch + 2016-10-20T11:50:00 + Lunch + + + + + + + Semantic Web data management raises the challenge of answering queries under constraints (i.e., in the presence of implicit data). To bridge the gap between this extended setting and that of query evaluation provided by database engines, a reasoning step (w.r.t. the constraints) is necessary before query evaluation. A large and useful set of ontology languages enjoys FOL reducibility of query answering: queries can be answered by evaluating a SQLized first-order logic (FOL) formula (obtained from the query and the ontology) directly against the explicitly stored data (i.e., without considering the ontological constraints). +Our demonstration showcases to the attendees, and analyzes, the performance of several reformulation-based query answering techniques, including one we recently devised, applied to the lightweight description logic DL-LiteR underpinning the W3C’s OWL2 QL profile. + Semantic Web data management raises the challenge of answering queries under constraints (i.e., in the presence of implicit data). To bridge the gap between this extended setting and that of query evaluation provided by database engines, a reasoning step (w.r.t. the constraints) is necessary before query evaluation. A large and useful set of ontology languages enjoys FOL reducibility of query answering: queries can be answered by evaluating a SQLized first-order logic (FOL) formula (obtained from the query and the ontology) directly against the explicitly stored data (i.e., without considering the ontological constraints). +Our demonstration showcases to the attendees, and analyzes, the performance of several reformulation-based query answering techniques, including one we recently devised, applied to the lightweight description logic DL-LiteR underpinning the W3C’s OWL2 QL profile. + Query optimization + Optimizing FOL reducible query answering: understanding performance challenges + + Query answering + + DL-Lite + + + Optimizing FOL reducible query answering: understanding performance challenges + + Query optimization + Query answering + + + Optimizing FOL reducible query answering: understanding performance challenges + FOL query reformulation + FOL query reformulation + + DL-Lite + + + + + + + + + + + + + + + + + + + + + + + + + Stefan Schlobach + a9f51c1c74b52bff440f635430f761876fa8b83b + + + + Stefan Schlobach + + + Stefan Schlobach + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + Lowering knowledge : Making constrained devices semantically interoperable + + + + 2016-10-19T21:00:00 + Lowering knowledge : Making constrained devices semantically interoperable + Nicolas Seydoux, Khalil Drira, Nathalie Hernandez and Thierry Monteil + 2016-10-19T21:00:00 + 2016-10-19T18:00:00 + 2016-10-19T18:00:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Simone Paolo Ponzetto + + + Simone Paolo Ponzetto + Simone Paolo Ponzetto + 8fb5b6bef5aaeb67a43acb4ea8924f144a03d1f8 + + + + + + + + + + + + + + Suresh Alse + + + + + Suresh Alse + + + Suresh Alse + 22c36eea38557c8acf092a86cf1f130734f7a822 + + + + + + + + + + + + + + + + + + + + + + + + + + Victor Felder + + + + + + + + + + Victor Felder + Victor Felder + b343d8eb73f38f07f8c0dadbe1c867f8f4dcdd81 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Knowledge Media Institute + Knowledge Media Institute + + + + + Knowledge Media Institute + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + linked data + linked data + Yuzu: Publishing Any Data as Linked Data + + + + data conversion + + + Yuzu: Publishing Any Data as Linked Data + data frontend + Yuzu: Publishing Any Data as Linked Data + Linked data is one of the most important methods for improving the applicability of data, however most data is not in linked data formats and raising it to linked data is still a significant challenge. We present Yuzu, an application that makes it easy to host legacy data in JSON, XML or CSV as linked data, while providing a clean interface with advanced features. The ease-of-use of this framework is shown by its adoption for a number of existing datasets including WordNet. + + data frontend + data conversion + + Linked data is one of the most important methods for improving the applicability of data, however most data is not in linked data formats and raising it to linked data is still a significant challenge. We present Yuzu, an application that makes it easy to host legacy data in JSON, XML or CSV as linked data, while providing a clean interface with advanced features. The ease-of-use of this framework is shown by its adoption for a number of existing datasets including WordNet. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Ermyas Abebe + + a4132a2b9ab88c52ef4412fcd79d24e2fb4a0efe + + Ermyas Abebe + + + Ermyas Abebe + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Vlad Posea + + Vlad Posea + + + + Vlad Posea + + + + + + + 10e096a0c0842fea011fb61ff468f90deb161a7c + + + + + + + + + + + + + + + + + + Data Mining + + + + + + RDF2Vec: RDF Graph Embeddings for Data Mining + Graph Embeddings + Linked Open Data + Graph Embeddings + + + Linked Open Data has been recognized as a valuable source for background information in data mining. However, most data mining tools require features in propositional form, i.e., a vector of nominal or numerical features associated with an instance, while Linked Open Data sources are graphs by nature. In this paper, we present RDF2Vec, an approach that uses language modeling approaches for unsupervised feature extraction from sequences of words, and adapts them to RDF graphs. We generate sequences by leveraging local information from graph sub-structures, harvested by Weisfeiler-Lehman Subtree RDF Graph Kernels and graph walks, and learn latent numerical representations of entities in RDF graphs. Our evaluation shows that such vector representations outperform existing techniques for the propositionalization of RDF graphs on a variety of different predictive machine learning tasks, and that feature vector representations of general knowledge graphs such as DBpedia and Wikidata can be easily reused for different tasks. + RDF2Vec: RDF Graph Embeddings for Data Mining + + + Data Mining + RDF2Vec: RDF Graph Embeddings for Data Mining + Linked Open Data + Linked Open Data has been recognized as a valuable source for background information in data mining. However, most data mining tools require features in propositional form, i.e., a vector of nominal or numerical features associated with an instance, while Linked Open Data sources are graphs by nature. In this paper, we present RDF2Vec, an approach that uses language modeling approaches for unsupervised feature extraction from sequences of words, and adapts them to RDF graphs. We generate sequences by leveraging local information from graph sub-structures, harvested by Weisfeiler-Lehman Subtree RDF Graph Kernels and graph walks, and learn latent numerical representations of entities in RDF graphs. Our evaluation shows that such vector representations outperform existing techniques for the propositionalization of RDF graphs on a variety of different predictive machine learning tasks, and that feature vector representations of general knowledge graphs such as DBpedia and Wikidata can be easily reused for different tasks. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Université de Fribourg + Université de Fribourg + + Université de Fribourg + + + + + + + + + + + + + + + + + Accenture Technology Labs / INRIA + + + + + + + Accenture Technology Labs / INRIA + + Accenture Technology Labs / INRIA + + + + + + + + + John Vard + + + + 05ea0b74ea3b81b7481a0913163891d3cb6b3787 + + John Vard + + + + John Vard + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The time of answering a SPARQL query with its all exact solutions in large scale RDF dataset possibly exceeds users' tolerable waiting time, especially when it contains the OPT operations since the OPT operation is the least conventional operator in SPARQL. +It becomes essential to make a trade-off between the query response time and solution accuracy. We propose PRONA - an plugin for well-designed approximate queries in Jena, which provides help for users to answer well-designed SPARQL queries by approximate computation.The main features of PRONA comprise SPARQL query engine with approximate queries, as well as various approximate degrees for users to choose. + + SPARQL + + Approximate queries + Well-designed patterns + RDF + + The time of answering a SPARQL query with its all exact solutions in large scale RDF dataset possibly exceeds users' tolerable waiting time, especially when it contains the OPT operations since the OPT operation is the least conventional operator in SPARQL. +It becomes essential to make a trade-off between the query response time and solution accuracy. We propose PRONA - an plugin for well-designed approximate queries in Jena, which provides help for users to answer well-designed SPARQL queries by approximate computation.The main features of PRONA comprise SPARQL query engine with approximate queries, as well as various approximate degrees for users to choose. + + + + + + PRONA: A Plugin for Well-Designed Approximate Queries in Jena + + + PRONA: A Plugin for Well-Designed Approximate Queries in Jena + + + Well-designed patterns + + PRONA: A Plugin for Well-Designed Approximate Queries in Jena + + RDF + + + Approximate queries + SPARQL + + + + + Extending SPARQL for data analytic tasks + + data analytics + + data integration + + + + + SPARQL has many nice features for accessing data integrated across different data sources, which is an important step in any data analysis task. We report the use of SPARQL for two real data analytic use cases from the healthcare and life sciences domains, which exposed certain weaknesses in the current specification of SPARQL; specifically when the data being integrated is most conveniently accessed via RESTful services and in formats beyond RDF, such as XML. We therefore extended SPARQL with generalized 'service', constructs for accessing services beyond the SPARQL endpoints supported by 'service'; for efficiency, our constructs additionally needed to support posting data, which is also not supported by 'service'. Furthermore, data from multiple sources led to natural modularity in the queries, with different portions of the query pertaining to different sources, so we also extended SPARQL with a simple 'function' mechanism to isolate the mechanics of accessing each endpoint. We provide an open source implementation of this SPARQL endpoint in an RDF store called Quetzal, and evaluate its use in the two data analytic scenarios over real datasets. + + data integration + data analytics + SPARQL has many nice features for accessing data integrated across different data sources, which is an important step in any data analysis task. We report the use of SPARQL for two real data analytic use cases from the healthcare and life sciences domains, which exposed certain weaknesses in the current specification of SPARQL; specifically when the data being integrated is most conveniently accessed via RESTful services and in formats beyond RDF, such as XML. We therefore extended SPARQL with generalized 'service', constructs for accessing services beyond the SPARQL endpoints supported by 'service'; for efficiency, our constructs additionally needed to support posting data, which is also not supported by 'service'. Furthermore, data from multiple sources led to natural modularity in the queries, with different portions of the query pertaining to different sources, so we also extended SPARQL with a simple 'function' mechanism to isolate the mechanics of accessing each endpoint. We provide an open source implementation of this SPARQL endpoint in an RDF store called Quetzal, and evaluate its use in the two data analytic scenarios over real datasets. + + + + Extending SPARQL for data analytic tasks + + + + + SPARQL + + + + + Extending SPARQL for data analytic tasks + + SPARQL + + + + + + + + + + + + + + + + + + + + + + + + + + + Query Translation + Internet of Things + + Query Translation + SQL + Internet of Things + Analytics + + PIOTRe: Personal Internet of Things Repository + + SPARQL + Web Observatory + + + PIOTRe: Personal Internet of Things Repository + Analytics + + + + RSP + RSP + PIOTRe: Personal Internet of Things Repository + Resource-constrained Internet of Things (IoT) devices like Raspberry Pis', with specific performance optimisation, can serve as interoperable personal Linked Data repositories for IoT applications. In this demo paper we describe PIOTRe, a personal datastore that utilises our sparql2sql query translation technology on Pis' to process, store and publish IoT time-series historical data and streams. We demonstrate, for a smart home scenario with PIOTRe: a real-time dashboard that utilises RDF stream processing, a set of descriptive analytics visualisations on historical data, a framework for registering stream queries within a local network and a means of sharing metadata globally with HyperCat and Web Observatories. + SQL + + + + + Web Observatory + + + + Resource-constrained Internet of Things (IoT) devices like Raspberry Pis', with specific performance optimisation, can serve as interoperable personal Linked Data repositories for IoT applications. In this demo paper we describe PIOTRe, a personal datastore that utilises our sparql2sql query translation technology on Pis' to process, store and publish IoT time-series historical data and streams. We demonstrate, for a smart home scenario with PIOTRe: a real-time dashboard that utilises RDF stream processing, a set of descriptive analytics visualisations on historical data, a framework for registering stream queries within a local network and a means of sharing metadata globally with HyperCat and Web Observatories. + SPARQL + + + Automatic Classification of Springer Nature Proceedings with Smart Topic Miner + 2016-10-21T11:30:00 + 2016-10-21T11:10:00 + Francesco Osborne, Angelo Salatino, Aliaksandr Birukou and Enrico Motta + + 2016-10-21T11:30:00 + + 2016-10-21T11:30:00 + + 2016-10-21T11:30:00 + 2016-10-21T11:10:00 + Automatic Classification of Springer Nature Proceedings with Smart Topic Miner + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-20T09:00:00 + 2016-10-20T10:00:00 + 2016-10-20T10:00:00 + 2016-10-20T10:00:00 + 2016-10-20T10:00:00 + 2016-10-20T09:00:00 + Keynote: Christian Bizer + Keynote: Christian Bizer + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Université Joseph Fourier + Université Joseph Fourier + + + + + + + + Université Joseph Fourier + + + + + + + + + + + + + + + + + + + + + + + + + + 421ac6bc9506d2f2e1d6640498b35d6c6231a13c + + Heiko Maus + + + + + + + + + Heiko Maus + Heiko Maus + + + + + + + + + + + + + + + 6dc74861de4f6d7be14507d7925d2fcf4a9d3d62 + Ichiro Yamada + + + + + + + + + + + Ichiro Yamada + + Ichiro Yamada + + + + + + + + + Fumihito Nishino + + + fd1f1d905060f01fccf69d4bd0f37762449ecb1b + + Fumihito Nishino + + + + + + + Fumihito Nishino + + + + + + + + + cb0a1b9eb51b03495a7c0ee59829c06d201de120 + Thierry Monteil + + Thierry Monteil + + + + + + + Thierry Monteil + + + Constructing Curriculum Ontology and Dynamic Learning Path Based on Resource Description Framework + + + learning path + Curriculum for school is generated based on the academic year. For the reason that students need to learn many subjects every year, the relative topics are put into curricula in discrete. In this study, we propose a method to construct a dynamic learning path which enables us to learn the relative topics continuously. In this process, we define two kinds of similarity score, inheritance score and context similarity score to connect the learning path of relative topics. We also construct curriculum ontology with Resource Description Framework (RDF) to make the dynamic learning path accessible. Using the curriculum ontology, we develop a learning system for school which shows a dynamic learning path with broadcasted video clips. + education + + ontology + + + education + curriculum + resource description framework + natural language processing + + linked data + + + + + knowledge graph + natural language processing + + + Constructing Curriculum Ontology and Dynamic Learning Path Based on Resource Description Framework + resource description framework + curriculum + linked data + ontology + + + Curriculum for school is generated based on the academic year. For the reason that students need to learn many subjects every year, the relative topics are put into curricula in discrete. In this study, we propose a method to construct a dynamic learning path which enables us to learn the relative topics continuously. In this process, we define two kinds of similarity score, inheritance score and context similarity score to connect the learning path of relative topics. We also construct curriculum ontology with Resource Description Framework (RDF) to make the dynamic learning path accessible. Using the curriculum ontology, we develop a learning system for school which shows a dynamic learning path with broadcasted video clips. + + + + + learning path + + + knowledge graph + Constructing Curriculum Ontology and Dynamic Learning Path Based on Resource Description Framework + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-21T10:30:00 + 2016-10-21T10:30:00 + Linked Disambiguated Distributional Semantic Networks + 2016-10-21T10:50:00 + Stefano Faralli, Alexander Panchenko, Chris Biemann and Simone Paolo Ponzetto + Linked Disambiguated Distributional Semantic Networks + + + 2016-10-21T10:50:00 + 2016-10-21T10:50:00 + 2016-10-21T10:50:00 + + + + + a97a78e57ea1fe838f9448e92463e2cae16b69b5 + + Sejin Chun + + Sejin Chun + + + + Sejin Chun + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Christian Hennig, Alexander Viehl, Benedikt Kämpgen and Harald Eisenmann + 2016-10-20T16:30:00 + 2016-10-20T16:30:00 + + + Ontology-Based Design of Space Systems + 2016-10-20T16:50:00 + 2016-10-20T16:50:00 + + 2016-10-20T16:50:00 + Ontology-Based Design of Space Systems + 2016-10-20T16:50:00 + + + Itziar Aldabe + + + + + + + + Itziar Aldabe + + + + + Itziar Aldabe + + + 2016-10-19T14:40:00 + + + 2016-10-19T14:20:00 + 2016-10-19T14:40:00 + 2016-10-19T14:40:00 + 2016-10-19T14:20:00 + Translating Ontologies in a Real-World Setting with ESSOT + + Mihael Arcan, Mauro Dragoni and Paul Buitelaar + 2016-10-19T14:40:00 + Translating Ontologies in a Real-World Setting with ESSOT + + + + + Yuan-Fang Li + + f1286b01c0c51c46b55258088fc54824d6b86bf1 + Yuan-Fang Li + + + + + + + Yuan-Fang Li + + + + + + + + + + + + + + + + + Serving Ireland's Geospatial Information as Linked Data + + + + + + Serving Ireland's Geospatial Information as Linked Data + + + + + Serving Ireland's Geospatial Information as Linked Data + + + In this paper we present data.geohive.ie, which aims to serve Ireland’s national geospatial data as authoritative Linked Data. Currently, the platform provides information on Irish administrative boundaries and the platform was designed to support two use cases: serving boundary data of geographic features at various level of detail and capturing the evolution of administrative boundaries. We report on the decisions taken for modeling and serving the information such as the adoption of an appropriate URI strategy, the devel-opment of necessary ontologies, and the use of (named) graphs to support the aforementioned use cases. + Ontology Engineering + + + Linked Data + In this paper we present data.geohive.ie, which aims to serve Ireland’s national geospatial data as authoritative Linked Data. Currently, the platform provides information on Irish administrative boundaries and the platform was designed to support two use cases: serving boundary data of geographic features at various level of detail and capturing the evolution of administrative boundaries. We report on the decisions taken for modeling and serving the information such as the adoption of an appropriate URI strategy, the devel-opment of necessary ontologies, and the use of (named) graphs to support the aforementioned use cases. + + + Ontology Engineering + + + Geospatial Data + Linked Data + + Geospatial Data + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Conference Linked Data: the ScholarlyData project + semantic web dog food + The Semantic Web Dog Food (SWDF) is the reference linked dataset of the Semantic Web community about papers, people, organisations, and events related to its academic conferences. In this paper we analyse the existing problems of generating, representing and maintaining Linked Data for the SWDF. With this work (i) we provide a refactored and cleaned SWDF dataset; (ii) we use a novel data model which improves the Semantic Web Conference Ontology, adopting best ontology design practices and (iii) we provide an open source maintenance workflow to support a healthy grow of the dataset beyond the Semantic Web conferences. + + + + linked data + + + + + + + + semantic web dog food + The Semantic Web Dog Food (SWDF) is the reference linked dataset of the Semantic Web community about papers, people, organisations, and events related to its academic conferences. In this paper we analyse the existing problems of generating, representing and maintaining Linked Data for the SWDF. With this work (i) we provide a refactored and cleaned SWDF dataset; (ii) we use a novel data model which improves the Semantic Web Conference Ontology, adopting best ontology design practices and (iii) we provide an open source maintenance workflow to support a healthy grow of the dataset beyond the Semantic Web conferences. + Conference Linked Data: the ScholarlyData project + + ontology design pattern + linked data + + ontology design pattern + + Conference Linked Data: the ScholarlyData project + + + + + + + + + + MIT + + MIT + + + MIT + + + + + + + + + + + + + + + + + + + + + + + + + 2016-10-19T18:00:00 + Octavian Rinciog and Vlad Posea + 2016-10-19T18:00:00 + 2016-10-19T21:00:00 + 2016-10-19T21:00:00 + GovLOD: Towards a Linked Open Data Portal + + 2016-10-19T21:00:00 + + + GovLOD: Towards a Linked Open Data Portal + 2016-10-19T21:00:00 + + + + + + + + + + Jürgen Jakobitsch + 1592785a24a90d15257639d6016fb93706dfbb2d + Jürgen Jakobitsch + Jürgen Jakobitsch + + + + + + + + + + + + + + + + + + + + + + + WizeNoze + WizeNoze + + + WizeNoze + + + + + + + + + + + + + + + + + + + + + Hanoi University of Science and Technology + + + Hanoi University of Science and Technology + + + Hanoi University of Science and Technology + + + + + + + + + + + + Sebastian Neumaier + + + Sebastian Neumaier + da0be3bba39587dd2d9f5c2793e1307fe9ddb14a + Sebastian Neumaier + + + + + + Ivan Ermilov + ad7372712623405f14b0a640808cc490603e2163 + Ivan Ermilov + + + Ivan Ermilov + + + + + + + + + + + Philippe Cudré-Mauroux + + + + + b2a53294e6396a09d8b00cd04d5b90133946b642 + + + + + Philippe Cudre-Mauroux + + + Philippe Cudre-Mauroux + Philippe Cudré-Mauroux + + + Philippe Cudré-Mauroux + + + Philippe Cudre-Mauroux + + + + + + + + + + + + + + + + + + 2016-10-20T11:30:00 + + LinkGen: Multipurpose Linked Data Generator + + LinkGen: Multipurpose Linked Data Generator + 2016-10-20T11:30:00 + 2016-10-20T11:30:00 + 2016-10-20T11:10:00 + 2016-10-20T11:10:00 + Amit Joshi, Pascal Hitzler and Guozhu Dong + + 2016-10-20T11:30:00 + + + + Professor at the University of Oslo + + + Professor at the University of Oslo + Professor at the University of Oslo + + + + + + + + + + + + + + + + + + + Rakuten, Inc. + + + + Rakuten, Inc. + + + + + Rakuten, Inc. + + + + + + + + + + + + + + + + + 4e67340de2275d16373425555cb401a3006329ef + + Xiangnan Ren + + + + + + + Xiangnan Ren + + + Xiangnan Ren + + + + + + Space Systems + + + Systems Engineering + + + Reasoning + Ontology-Based Design of Space Systems + ECSS-E-TM-10-23 + Systems Engineering + + + MBSE + + + + + Conceptual Data Model + + In model-based systems engineering a model specifying the system's design is shared across a variety of disciplines and used to ensure the consistency and quality of the overall design. Existing implementations for describing these system models exhibit a number of shortcomings regarding their approach to data management. In this emerging applications paper, we present the application of an ontology for space system design providing increased semantic soundness of the underlying standardized data specification, enabling reasoners to identify problems in the system, and allowing the application of operational knowledge collected over past projects to the system to be designed. Based on a qualitative evaluation driven by data derived from an actual satellite design project, a reflection on the applicability of ontologies in the overall model-based systems engineering approach is pursued. + In model-based systems engineering a model specifying the system's design is shared across a variety of disciplines and used to ensure the consistency and quality of the overall design. Existing implementations for describing these system models exhibit a number of shortcomings regarding their approach to data management. In this emerging applications paper, we present the application of an ontology for space system design providing increased semantic soundness of the underlying standardized data specification, enabling reasoners to identify problems in the system, and allowing the application of operational knowledge collected over past projects to the system to be designed. Based on a qualitative evaluation driven by data derived from an actual satellite design project, a reflection on the applicability of ontologies in the overall model-based systems engineering approach is pursued. + Conceptual Data Model + Space Systems + + ECSS-E-TM-10-23 + + MBSE + + OWL + OWL + Reasoning + + Ontology-Based Design of Space Systems + + + Ontology-Based Design of Space Systems + + + + + Gulnar Mehdi + ecf592192952adf92e2607df0010d0848414bb61 + Gulnar Mehdi + + + + + Gulnar Mehdi + + + + + + + + + + Andreas Dengel + + + + + Andreas Dengel + d462fcc476df738f8b1552398c0edf6008b9c626 + + + + Andreas Dengel + + + + + + + + + + + + + + + + + + + + + Erfan Younesi + + + cb54f05d376eac6899d7d9b6efcf31c7718eb866 + + + + Erfan Younesi + Erfan Younesi + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Kazuya Ohshima + + + + + + d182d74107c24c54c79380a2cd7eddbe1b1d0b19 + Kazuya Ohshima + + Kazuya Ohshima + + + + + + + + + + Clinga: Bringing Chinese Physical and Human Geography in Linked Open Data + geography ontology + + + Chinese linked geographical dataset + Chinese linked geographical dataset + + Clinga: Bringing Chinese Physical and Human Geography in Linked Open Data + + While the geographical domain has long been involved as an important part of the Linked Data, the small amount of Chinese linked geographical data hinders the integration and sharing of both Chinese and cross-lingual knowledge. In this paper, we contribute to the development of a new Chinese linked geographical dataset named Clinga, by obtaining data from the largest Chinese wiki encyclopedia. We manually design a new geography ontology to categorize a wide range of physical and human geographical entities, and carry out an automatic discovery of links to existing knowledge bases. The resulted Clinga dataset contains over half million Chinese geographical entities and is open access. + + + + + + Linked Data + + + + Clinga + + geography ontology + + + + While the geographical domain has long been involved as an important part of the Linked Data, the small amount of Chinese linked geographical data hinders the integration and sharing of both Chinese and cross-lingual knowledge. In this paper, we contribute to the development of a new Chinese linked geographical dataset named Clinga, by obtaining data from the largest Chinese wiki encyclopedia. We manually design a new geography ontology to categorize a wide range of physical and human geographical entities, and carry out an automatic discovery of links to existing knowledge bases. The resulted Clinga dataset contains over half million Chinese geographical entities and is open access. + + + + Linked Data + Clinga + Clinga: Bringing Chinese Physical and Human Geography in Linked Open Data + + + + + + + + + + + + + + + Graduate School of Yonsei University + + + + + + + + + + Graduate School of Yonsei University + Graduate School of Yonsei University + + + + + + Software Engineering + + Linked Data + Linked Data + Data Engineering + Semantic Web + Data Engineering + + Ontologies + + + Semantic Web + Enabling combined software and data engineering: the ALIGNED suite of ontologies + + Enabling combined software and data engineering: the ALIGNED suite of ontologies + Effective, collaborative integration of software and big data + engineering for Web-scale systems, is now a crucial technical and + economic challenge. This requires new combined data and software + engineering processes and tools. Semantic metadata standards and + linked data principles, provide a technical grounding for such + integrated systems given an appropriate model of the domain. In this + paper we introduce the ALIGNED suite of ontologies specifically + designed to model the information exchange needs of combined + software and data engineering. The models have been deployed to + enable: tool-chain integration, such as the exchange of data quality + reports; cross-domain communication, such as interlinked data and + software unit testing; mediation of the system design process + through the capture of design intents and as a source of context for + model-driven software engineering processes. These ontologies are + deployed in web-scale, data-intensive, system development + environments in both the commercial and academic domains. We + exemplify the usage of the suite on a complex collaborative software + and data engineering scenario from the legal information system + domain. + + + + Ontologies + Enabling combined software and data engineering: the ALIGNED suite of ontologies + + + Effective, collaborative integration of software and big data + engineering for Web-scale systems, is now a crucial technical and + economic challenge. This requires new combined data and software + engineering processes and tools. Semantic metadata standards and + linked data principles, provide a technical grounding for such + integrated systems given an appropriate model of the domain. In this + paper we introduce the ALIGNED suite of ontologies specifically + designed to model the information exchange needs of combined + software and data engineering. The models have been deployed to + enable: tool-chain integration, such as the exchange of data quality + reports; cross-domain communication, such as interlinked data and + software unit testing; mediation of the system design process + through the capture of design intents and as a source of context for + model-driven software engineering processes. These ontologies are + deployed in web-scale, data-intensive, system development + environments in both the commercial and academic domains. We + exemplify the usage of the suite on a complex collaborative software + and data engineering scenario from the legal information system + domain. + Software Engineering + + + + Metadata + + Data Mining + + Ontology Learning + Scholarly Ontologies + Automatic Classification of Springer Nature Proceedings with Smart Topic Miner + + Bibliographic Data + + + Automatic Classification of Springer Nature Proceedings with Smart Topic Miner + The process of classifying scholarly outputs is crucial to ensure timely access to knowledge. However, this process is typically carried out manually by expert editors, leading to high costs and slow throughput. In this paper we present Smart Topic Miner (STM), a novel solution which uses semantic web technologies to classify scholarly publications on the basis of a very large automatically generated ontology of research areas. STM was developed to support the Springer Nature Computer Science editorial team in classifying proceedings in the LNCS family. It analyses in real time a set of publications provided by an editor and produces a structured set of topics and a number of Springer Nature classification tags, which best characterise the given input. In this paper we present the architecture of the system and report on an evaluation study conducted with a team of Springer Nature editors. The results of the evaluation, which showed that STM classifies publications with a high degree of accuracy, are very encouraging and as a result we are currently discussing the required next steps to ensure large scale deployment within the company. + Data Mining + + + The process of classifying scholarly outputs is crucial to ensure timely access to knowledge. However, this process is typically carried out manually by expert editors, leading to high costs and slow throughput. In this paper we present Smart Topic Miner (STM), a novel solution which uses semantic web technologies to classify scholarly publications on the basis of a very large automatically generated ontology of research areas. STM was developed to support the Springer Nature Computer Science editorial team in classifying proceedings in the LNCS family. It analyses in real time a set of publications provided by an editor and produces a structured set of topics and a number of Springer Nature classification tags, which best characterise the given input. In this paper we present the architecture of the system and report on an evaluation study conducted with a team of Springer Nature editors. The results of the evaluation, which showed that STM classifies publications with a high degree of accuracy, are very encouraging and as a result we are currently discussing the required next steps to ensure large scale deployment within the company. + Ontology Learning + + + + Scholarly Data + Conference Proceedings + + Metadata + Scholarly Data + + + Conference Proceedings + + Bibliographic Data + + + + + Scholarly Ontologies + Automatic Classification of Springer Nature Proceedings with Smart Topic Miner + + + + + + + + + + + + + + + + Rosa Gil + + Rosa Gil + + + + + Rosa Gil + + + + + + + + Fraunhofer IAIS + + + + + + + Fraunhofer IAIS + + Fraunhofer IAIS + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Ziqi Zhang + + + + + Ziqi Zhang + + + + + + 23da01f2d0efbf766d22725f48eaee60e72096c1 + Ziqi Zhang + + + + + Duhai Alshukaili + + Duhai Alshukaili + + + 9f1925d57a346ce43fc0e013e19fce273dadc426 + + + Duhai Alshukaili + + + + + + + + + + + + db8051845a44150cf704274420bdc9c8954355c5 + + + + + + + Daniele Dell'Aglio + Daniele Dell'Aglio + + + Daniele Dell'Aglio + + + + + + + SPARQL + RDF + + + + + In this paper, we present a querying language for probabilistic RDF databases, where each triple has a probability, called pSRARQL, built on SPARQL, recommended by W3C as a querying language for RDF databases. Firstly, we present the syntax and semantics of pSPARQL. Secondly, we define the query problem of pSPARQL corresponding to probabilities of solutions. Finally, we show that the query evaluation of general pSPARQL patterns is PSPACE-complete. + pSPARQL: A Querying Language for Probabilistic RDF (Extended Abstract) + In this paper, we present a querying language for probabilistic RDF databases, where each triple has a probability, called pSRARQL, built on SPARQL, recommended by W3C as a querying language for RDF databases. Firstly, we present the syntax and semantics of pSPARQL. Secondly, we define the query problem of pSPARQL corresponding to probabilities of solutions. Finally, we show that the query evaluation of general pSPARQL patterns is PSPACE-complete. + uncertain queries + + + SPARQL + pSPARQL: A Querying Language for Probabilistic RDF (Extended Abstract) + Probabilistic RDF + pSPARQL: A Querying Language for Probabilistic RDF (Extended Abstract) + + + RDF + + + + uncertain queries + Probabilistic RDF + + + + + + + + diff --git a/rulewerk-examples/src/main/data/input/ternaryBicycleEDB.nt.gz b/rulewerk-examples/src/main/data/input/ternaryBicycleEDB.nt.gz new file mode 100644 index 000000000..bdf85d3d6 Binary files /dev/null and b/rulewerk-examples/src/main/data/input/ternaryBicycleEDB.nt.gz differ diff --git a/rulewerk-examples/src/main/data/input/wheelEDB.csv.gz b/rulewerk-examples/src/main/data/input/wheelEDB.csv.gz new file mode 100644 index 000000000..3d933fd8a Binary files /dev/null and b/rulewerk-examples/src/main/data/input/wheelEDB.csv.gz differ diff --git a/vlog4j-core/src/test/data/input/empty.csv b/rulewerk-examples/src/main/data/output/.keep similarity index 100% rename from vlog4j-core/src/test/data/input/empty.csv rename to rulewerk-examples/src/main/data/output/.keep diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java new file mode 100644 index 000000000..18a01975f --- /dev/null +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CompareWikidataDBpedia.java @@ -0,0 +1,110 @@ +package org.semanticweb.rulewerk.examples; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; + +/** + * This example shows how to integrate and compare the contents of two SPARQL + * endpoints, in this case for Wikidata and DBpedia. We are asking both sources + * for the same information (each using their terms to express it), and query + * for related English Wikipedia article URLs as a key to integrate the data + * over. For a fair comparison, we restrict to Wikidata entities that have a + * related English Wikipedia page (others cannot be in English DBpedia in the + * first place). + * + * The example query used asks for alumni of the University of Leipzig (one of + * the oldest European universities). + * + * @author Markus Kroetzsch + * + */ +public class CompareWikidataDBpedia { + + /** + * SPARQL pattern snippet to find an English Wikipedia page URL from a Wikidata + * entity ?result. + */ + static String sparqlGetWikiIriWikidata = "?enwikipage schema:about ?result ; " + + "schema:isPartOf . "; + /** + * SPARQL pattern snippet to find an English Wikipedia page URL from a DBpedia + * entity ?result. Some string magic is needed to replace the outdated http + * protocol used in DBpedia's Wikidata page names by the current https. + */ + static String sparqlGetWikiIriDBpedia = "?result ?enwikipageHttp . " + + "BIND( IRI(CONCAT(\"https\",SUBSTR(str(?enwikipageHttp), 5))) AS ?enwikipage)"; + + public static void main(final String[] args) throws ParsingException, IOException { + ExamplesUtils.configureLogging(); + + // Wikidata pattern: P69 is "educated at"; Q154804 is "University of Leipzig" + final String wikidataSparql = "?result wdt:P69 wd:Q154804 . " + sparqlGetWikiIriWikidata; + // DBpedia pattern: + final String dbpediaSparql = "?result . " + + sparqlGetWikiIriDBpedia; + + // Configure the SPARQL data sources and some rules to analyse results: + final String rules = "" // + + "@prefix wdqs: .\n" // + + "@prefix dbp: .\n" // + + "@source dbpResult[2] : sparql(dbp:sparql, \"result,enwikipage\", '''" + dbpediaSparql + "''') .\n" // + + "@source wdResult[2] : sparql(wdqs:sparql, \"result,enwikipage\", '''" + wikidataSparql + "''') .\n" // + + "% Rules:\n" // + + "inWd(?Wikipage) :- wdResult(?WdId,?Wikipage).\n" // + + "inDbp(?Wikipage) :- dbpResult(?DbpId,?Wikipage).\n" // + + "result(?Wikipage) :- inWd(?Wikipage).\n" // + + "result(?Wikipage) :- inDbp(?Wikipage).\n" // + + "match(?WdId,?DbpId) :- dbpResult(?DbpId,?Wikipage), wdResult(?WdId,?Wikipage).\n" + + "dbpOnly(?Wikipage) :- inDbp(?Wikipage), ~inWd(?Wikipage).\n" + + "wdpOnly(?WdId,?Wikipage) :- wdResult(?WdId,?Wikipage), ~inDbp(?Wikipage).\n"; // + + System.out.println("Knowledge base used in this example:\n\n" + rules); + + final KnowledgeBase kb = RuleParser.parse(rules); + + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + + final long resultCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("result(?X)")) + .getCount(); + final long wdCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("inWd(?X)")).getCount(); + final long dbpCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("inDbp(?X)")).getCount(); + + System.out.println("Found " + resultCount + " matching entities overall, of which " + wdCount + + " were in Wikidata and " + dbpCount + " were in DBPedia"); + + System.out.println("We focus on results found in DBpedia only (usually the smaller set)."); + ExamplesUtils.printOutQueryAnswers("dbpOnly(?X)", reasoner); + + System.out.println("Note: some of these results might still be in Wikidata, due to:\n" + + "* recent Wikipedia article renamings that are not updated in DBpedia\n" + + "* failure to match Wikipedia URLs due to small differences in character encoding\n"); + } + } + +} diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CountingTriangles.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CountingTriangles.java new file mode 100644 index 000000000..7d9cbb509 --- /dev/null +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/CountingTriangles.java @@ -0,0 +1,81 @@ +package org.semanticweb.rulewerk.examples; + +import java.io.FileInputStream; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; + +/** + * In this example we count the number of triangles in the reflexive + * sharingBorderWith relation from Wikidata. + * + * @author Markus Kroetzsch + * @author Larry Gonzalez + * + */ +public class CountingTriangles { + + public static void main(final String[] args) throws IOException, ParsingException { + ExamplesUtils.configureLogging(); + + KnowledgeBase kb; + /* Configure rules */ + try { + kb = RuleParser.parse(new FileInputStream(ExamplesUtils.INPUT_FOLDER + "counting-triangles.rls")); + } catch (final ParsingException e) { + System.out.println("Failed to parse rules: " + e.getMessage()); + return; + } + System.out.println("Rules used in this example:"); + kb.getRules().forEach(System.out::println); + System.out.println(""); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + + System.out.println("Note: Materialisation includes SPARQL query answering."); + + /* Initialise reasoner and compute inferences */ + reasoner.reason(); + + final double countries = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("country(?X)")) + .getCount(); + final double shareBorder = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("shareBorder(?X,?Y)")) + .getCount(); + final double triangles = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("triangle(?X,?Y,?Z)")) + .getCount(); + + System.out.print("Found " + countries + " countries in Wikidata"); + // Due to symmetry, each joint border is found twice, hence we divide by 2: + System.out.println(", with " + (shareBorder / 2) + " pairs of them sharing a border."); + // Due to symmetry, each triangle is found six times, hence we divide by 6: + System.out.println("The number of triangles of countries that mutually border each other was " + + (triangles / 6) + "."); + } + + } + +} diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/DoidExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/DoidExample.java new file mode 100644 index 000000000..f903a5207 --- /dev/null +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/DoidExample.java @@ -0,0 +1,81 @@ +package org.semanticweb.rulewerk.examples; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Arrays; +import java.util.List; + +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.LogLevel; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; + +/** + * This example reasons about human diseases, based on information from the + * Disease Ontology (DOID) and Wikidata. It illustrates how to load data from + * different sources (RDF file, SPARQL), and reason about these inputs using + * rules that are loaded from a file. The rules used here employ existential + * quantifiers and stratified negation. + * + * @author Markus Kroetzsch + * @author Larry Gonzalez + */ +public class DoidExample { + + public static void main(final String[] args) throws IOException, ParsingException { + ExamplesUtils.configureLogging(); + + /* Configure rules */ + KnowledgeBase kb; + try { + kb = RuleParser.parse(new FileInputStream(ExamplesUtils.INPUT_FOLDER + "/doid.rls")); + } catch (final ParsingException e) { + System.out.println("Failed to parse rules: " + e.getMessage()); + return; + } + System.out.println("Rules used in this example:"); + kb.getRules().forEach(System.out::println); + System.out.println(""); + + try (Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.setLogFile(ExamplesUtils.OUTPUT_FOLDER + "vlog.log"); + reasoner.setLogLevel(LogLevel.DEBUG); + + System.out.println("Note: Materialisation includes SPARQL query answering."); + + /* Initialise reasoner and compute inferences */ + reasoner.reason(); + + /* Execute some queries */ + final List queries = Arrays.asList("humansWhoDiedOfCancer(?X)", "humansWhoDiedOfNoncancer(?X)"); + System.out.println("\nNumber of inferred tuples for selected query atoms:"); + for (final String queryString : queries) { + double answersCount = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral(queryString)).getCount(); + System.out.println(" " + queryString + ": " + answersCount); + } + } + } + +} diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java new file mode 100644 index 000000000..8d18a35f3 --- /dev/null +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/ExamplesUtils.java @@ -0,0 +1,155 @@ +package org.semanticweb.rulewerk.examples; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.util.ArrayList; +import java.util.List; + +import org.apache.log4j.ConsoleAppender; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; +import org.apache.log4j.PatternLayout; +import org.semanticweb.rulewerk.core.exceptions.ReasonerStateException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.LiteralQueryResultPrinter; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; + +public final class ExamplesUtils { + + public static final String OUTPUT_FOLDER = "src/main/data/output/"; + public static final String INPUT_FOLDER = "src/main/data/input/"; + + /* + * This is a utility class. Therefore, it is best practice to do the following: + * (1) Make the class final, (2) make its constructor private, (3) make all its + * fields and methods static. This prevents the classes instantiation and + * inheritance. + */ + private ExamplesUtils() { + + } + + /** + * Defines how messages should be logged. This method can be modified to + * restrict the logging messages that are shown on the console or to change + * their formatting. See the documentation of Log4J for details on how to do + * this. + * + * Note: The VLog C++ backend performs its own logging. The log-level for this + * can be configured using + * {@link Reasoner#setLogLevel(org.semanticweb.rulewerk.core.reasoner.LogLevel)}. + * It is also possible to specify a separate log file for this part of the logs. + */ + public static void configureLogging() { + // Create the appender that will write log messages to the console. + final ConsoleAppender consoleAppender = new ConsoleAppender(); + // Define the pattern of log messages. + // Insert the string "%c{1}:%L" to also show class name and line. + final String pattern = "%d{yyyy-MM-dd HH:mm:ss} %-5p - %m%n"; + consoleAppender.setLayout(new PatternLayout(pattern)); + // Change to Level.ERROR for fewer messages: + consoleAppender.setThreshold(Level.ERROR); + + consoleAppender.activateOptions(); + Logger.getRootLogger().addAppender(consoleAppender); + } + + /** + * Prints out the answers given by {@code reasoner} to the query + * ({@code queryAtom}). + * + * @param queryAtom query to be answered + * @param reasoner reasoner to query on + */ + public static void printOutQueryAnswers(final PositiveLiteral queryAtom, final Reasoner reasoner) { + System.out.println("Answers to query " + queryAtom + " :"); + OutputStreamWriter writer = new OutputStreamWriter(System.out); + LiteralQueryResultPrinter printer = new LiteralQueryResultPrinter(queryAtom, writer, + reasoner.getKnowledgeBase().getPrefixDeclarationRegistry()); + try (final QueryResultIterator answers = reasoner.answerQuery(queryAtom, true)) { + while (answers.hasNext()) { + printer.write(answers.next()); + writer.flush(); + } + System.out.println("Query answers are: " + answers.getCorrectness()); + } catch (IOException e) { + throw new RuntimeException(e); + } + System.out.println(); + } + + /** + * Prints out the answers given by {@code reasoner} to the query + * ({@code queryAtom}). + * + * @param queryString query to be answered + * @param reasoner reasoner to query on + */ + public static void printOutQueryAnswers(final String queryString, final Reasoner reasoner) { + try { + final PositiveLiteral query = RuleParser.parsePositiveLiteral(queryString); + printOutQueryAnswers(query, reasoner); + } catch (final ParsingException e) { + throw new RulewerkRuntimeException(e.getMessage(), e); + } + } + + /** + * Creates an Atom with @numberOfVariables distinct variables + * + * @param predicateName for the new predicate + * @param arity number of variables + */ + private static PositiveLiteral makeQueryAtom(final String predicateName, final int arity) { + final List vars = new ArrayList<>(); + for (int i = 0; i < arity; i++) { + vars.add(Expressions.makeUniversalVariable("x" + i)); + } + return Expressions.makePositiveLiteral(predicateName, vars); + } + + /** + * Exports the extension of the Atom with name @predicateName + * + * @param reasoner reasoner to query on + * @param atomName atom's name + * @param arity atom's arity + */ + public static void exportQueryAnswersToCSV(final Reasoner reasoner, final String atomName, final int arity) + throws ReasonerStateException, IOException { + final PositiveLiteral atom = makeQueryAtom(atomName, arity); + final String path = ExamplesUtils.OUTPUT_FOLDER + atomName + ".csv"; + + final Correctness correctness = reasoner.exportQueryAnswersToCsv(atom, path, true); + + System.out.println("Query answers are: " + correctness); + } + +} diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java new file mode 100644 index 000000000..9bec5600e --- /dev/null +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/InMemoryGraphAnalysisExample.java @@ -0,0 +1,104 @@ +package org.semanticweb.rulewerk.examples; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.reasoner.vlog.VLogInMemoryDataSource; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; + +/** + * This example shows how to reason efficiently with data sets generated in + * Java. We generate a random graph with several million edges, check + * connectivity, and count triangles. + * + * Parameters can be modified to obtain graphs of different sizes and density. + * It should be noted, however, that the number of triangles in reasonably dense + * graphs tends to be huge, and it is easy to exhaust memory in this way. + * + * @author Markus Kroetzsch + * + */ +public class InMemoryGraphAnalysisExample { + + public static void main(final String[] args) throws ParsingException, IOException { + ExamplesUtils.configureLogging(); + + /* 1. Create a simple random graph */ + System.out.println("Generating random graph ..."); + final int vertexCount = 10000; + final double density = 0.03; + // initialise data source for storing edges (estimate how many we'll need) + final InMemoryDataSource edges = new VLogInMemoryDataSource(2, (int) (vertexCount * vertexCount * density) + 1000); + int edgeCount = 0; + for (int i = 1; i <= vertexCount; i++) { + for (int j = 1; j <= vertexCount; j++) { + if (Math.random() < density) { + edges.addTuple("v" + i, "v" + j); + edgeCount++; + } + } + } + // also make a unary data source to mark vertices: + final InMemoryDataSource vertices = new VLogInMemoryDataSource(1, vertexCount); + for (int i = 1; i <= vertexCount; i++) { + vertices.addTuple("v" + i); + } + System.out.println("Generated " + edgeCount + " edges in random graph of " + vertexCount + " vertices."); + + /* 2. Initialise database with random data and some rules */ + + final String rules = "" // + + "biedge(?X,?Y) :- edge(?X,?Y), edge(?Y,?X) ." // + + "connected(v1) ." // + + "connected(?X) :- connected(?Y), biedge(?Y,?X) ." // + + "unreachable(?X) :- vertex(?X), ~connected(?X) . " // + + "triangle(?X, ?Y, ?Z) :- biedge(?X,?Y), biedge(?Y, ?Z), biedge(?Z,?X) ." // + + "loop(?X,?X) :- edge(?X,?X) . " // + + "properTriangle(?X, ?Y, ?Z) :- triangle(?X,?Y,?Z), ~loop(?X,?Y), ~loop(?Y, ?Z), ~loop(?Z, ?X) . "; + + final KnowledgeBase kb = RuleParser.parse(rules); + kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("vertex", 1), vertices)); + kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("edge", 2), edges)); + + /* 3. Use reasoner to compute some query results */ + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + + final long unreachable = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("unreachable(?X)")) + .getCount(); + final long triangles = reasoner.countQueryAnswers(RuleParser.parsePositiveLiteral("properTriangle(?X,?Y,?Z)")) + .getCount(); + + System.out + .println("Number of vertices not reachable from vertex 1 by a bi-directional path: " + unreachable); + System.out.println("Number of proper bi-directional triangles: " + (triangles / 6) + " (found in " + triangles + " matches due to symmetry.)"); + } + } + +} diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java new file mode 100644 index 000000000..3f0f1ab6e --- /dev/null +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/SimpleReasoningExample.java @@ -0,0 +1,87 @@ +package org.semanticweb.rulewerk.examples; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; + +/** + * This example demonstrates the basic usage of Rulewerk for rule reasoning. We + * are using a fixed set of rules and facts defined in Java without any external + * sources, and we query for some of the results. + * + * @author Markus Kroetzsch + * + */ +public class SimpleReasoningExample { + + public static void main(final String[] args) throws IOException { + + ExamplesUtils.configureLogging(); // use simple logger for the example + + // Define some facts and rules in VLog's basic syntax: + final String rules = "% --- Some facts --- \n" // + + "location(germany,europe). \n" // + + "location(uk,europe). \n" // + + "location(saxony,germany). \n" // + + "location(dresden,saxony). \n" // + + "city(dresden). \n" // + + "country(germany). \n" // + + "country(uk). \n" // + + "university(tudresden, germany). \n" // + + "university(uoxford, uk) . \n" // + + "streetAddress(tudresden, \"Mommsenstraße 9\", \"01069\", \"Dresden\") . \n" // + + "zipLocation(\"01069\", dresden) . \n" // + + "% --- Standard recursion: locations are transitive --- \n" // + + "locatedIn(?X,?Y) :- location(?X,?Y) . \n" // + + "locatedIn(?X,?Z) :- locatedIn(?X,?Y), locatedIn(?Y,?Z) . \n" // + + "% --- Build address facts using the city constant --- \n" // + + "address(?Uni, ?Street, ?ZIP, ?City) :- streetAddress(?Uni, ?Street, ?ZIP, ?CityName), zipLocation(?ZIP, ?City) . \n" + + "% --- Value invention: universities have some address --- \n" // + + "address(?Uni, !Street, !ZIP, !City), locatedIn(!City, ?Country) :- university(?Uni, ?Country) . \n" + + "% --- Negation: organisations in Europe but not in Germany --- \n" // + + "inEuropeOutsideGermany(?Org) :- address(?Org, ?S, ?Z, ?City), locatedIn(?City, europe), ~locatedIn(?City, germany) . \n" + + "% ---\n"; + + System.out.println("Knowledge base used in this example:\n\n" + rules); + + KnowledgeBase kb; + try { + kb = RuleParser.parse(rules); + } catch (final ParsingException e) { + System.out.println("Failed to parse rules: " + e.getMessage()); + return; + } + + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + /* Execute some queries */ + ExamplesUtils.printOutQueryAnswers("address(?Org, ?Street, ?ZIP, ?City)", reasoner); + ExamplesUtils.printOutQueryAnswers("locatedIn(?place, europe)", reasoner); + ExamplesUtils.printOutQueryAnswers("inEuropeOutsideGermany(?Org)", reasoner); + } + } +} diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromCsvFile.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromCsvFile.java new file mode 100644 index 000000000..4aea67362 --- /dev/null +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromCsvFile.java @@ -0,0 +1,116 @@ +package org.semanticweb.rulewerk.examples.core; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; + +/** + * This example shows how facts can be imported from files in the CSV format. + * Specifically, it imports from a {@code .csv.gz} file, but you can also import + * from {@code .csv} files. Moreover, it shows how query answers that result + * from reasoning over these facts can be exported to {@code .csv} files. + *

+ * For importing, a {@link CsvFileDataSource} that contains a path to the + * corresponding {@code .csv.gz} file must be created. A {@code .csv} file + * contains facts in the CSV format over exactly one predicate. A + * {@code .csv.gz} file is the gzipped version of such a {@code .csv} file. + *

+ * For exporting, a path to the output {@code .csv} file must be specified. + * + * @author Christian Lewe + * @author Irina Dragoste + * @author Markus Kroetzsch + * + */ +public class AddDataFromCsvFile { + + public static void main(final String[] args) throws IOException, ParsingException { + + ExamplesUtils.configureLogging(); + + final String initialFactsHasPart = ""// a file input: + + "@source hasPart[2] : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "hasPartEDB.csv.gz\") ."; + + final String rules = "" // first declare file inputs: + + "@source bicycle[1] : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "bicycleEDB.csv.gz\") ." + + "@source wheel[1] : load-csv(\"" + ExamplesUtils.INPUT_FOLDER + "wheelEDB.csv.gz\") ." + // every bicycle has some part that is a wheel: + + "hasPart(?X, !Y), wheel(!Y) :- bicycle(?X) ." + // every wheel is part of some bicycle: + + "isPartOf(?X, !Y), bicycle(!Y) :- wheel(?X) ." + // hasPart and isPartOf are mutually inverse relations: + + "hasPart(?X, ?Y) :- isPartOf(?Y, ?X) ." // + + "isPartOf(?X, ?Y) :- hasPart(?Y, ?X) ."; + + /* + * Loading, reasoning, and querying while using try-with-resources to close the + * reasoner automatically. + */ + final KnowledgeBase kb = new KnowledgeBase(); + try (final Reasoner reasoner = new VLogReasoner(kb)) { + + /* + * 1. Loading the initial facts with hasPart predicate into reasoner. + */ + RuleParser.parseInto(kb, initialFactsHasPart); + reasoner.reason(); + + /* + * Query initial facts with hasPart predicate. + */ + System.out.println("Before materialisation:"); + ExamplesUtils.printOutQueryAnswers("hasPart(?X, ?Y)", reasoner); + + /* + * 2. Loading further facts and rules into the reasoner, and materialising the + * loaded facts with the rules. + */ + RuleParser.parseInto(kb, rules); + /* The reasoner will use the Restricted Chase by default. */ + reasoner.reason(); + + /* + * Querying facts with hasPart predicate after materialisation. + */ + System.out.println("After materialisation:"); + final PositiveLiteral hasPartXY = RuleParser.parsePositiveLiteral("hasPart(?X, ?Y)"); + ExamplesUtils.printOutQueryAnswers(hasPartXY, reasoner); + + /* Exporting query answers to {@code .csv} files. */ + reasoner.exportQueryAnswersToCsv(hasPartXY, ExamplesUtils.OUTPUT_FOLDER + "hasPartWithBlanks.csv", true); + reasoner.exportQueryAnswersToCsv(hasPartXY, ExamplesUtils.OUTPUT_FOLDER + "hasPartWithoutBlanks.csv", + false); + final PositiveLiteral hasPartRedBikeY = RuleParser.parsePositiveLiteral("hasPart(redBike, ?Y)"); + reasoner.exportQueryAnswersToCsv(hasPartRedBikeY, + ExamplesUtils.OUTPUT_FOLDER + "hasPartRedBikeWithBlanks.csv", true); + } + } + +} diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromRdfFile.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromRdfFile.java new file mode 100644 index 000000000..50770072d --- /dev/null +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromRdfFile.java @@ -0,0 +1,107 @@ +package org.semanticweb.rulewerk.examples.core; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; + +/** + * This example shows how facts can be imported from files in the RDF N-Triples + * format. Specifically, it imports from a {@code .nt.gz} file, but you can also + * import from {@code .nt} files. Moreover, it shows how query answers that + * result from reasoning over these facts can be exported to {@code .csv} files. + *

+ * This example is an adaptation of {@link AddDataFromCsvFile}, where the rules + * have been modified to work with the ternary predicates that N-Triples + * enforces. + *

+ * For importing, an {@link RdfFileDataSource} that contains a path to the + * corresponding {@code .nt.gz} file must be created. An {@code .nt} file + * contains facts in the RDF N-Triples format, which can be associated with a + * ternary {@link Predicate}. A {@code .nt.gz} file is the gzipped version of + * such an {@code .nt} file. + *

+ * For exporting, a path to the output {@code .csv} file must be specified. + *

    + *
  • Setting the log level is done via + * {@link Reasoner#setLogLevel(LogLevel)}, the default being + * {@link LogLevel#WARNING}.
  • + *
  • The log file where the logging information will be exported is set + * via {@link Reasoner#setLogFile(String)}. If no log file is set, or the log + * file is invalid, the logging will be redirected to the System output. If the + * log file does not exist at given path, it will be created. If a file already + * exists, it will be over-written, so we suggest backing up and versioning log + * files.
  • + *
+ */ +public class ConfigureReasonerLogging { + private static String logsFolder = "src/main/logs/"; + + /** + * Path to the file where the default WARNING level reasoner logs will be + * exported. + */ + private static String reasonerWarningLogFilePath = logsFolder + "ReasonerWarningLogFile.log"; + + /** + * Path to the file where INFO level reasoner logs will be exported. + */ + private static String reasonerInfoLogFilePath = logsFolder + "ReasonerInfoLogFile.log"; + + /** + * Path to the file where DEBUG level reasoner logs will be exported. + */ + private static String reasonerDebugLogFilePath = logsFolder + "ReasonerDebugLogFile.log"; + + public static void main(final String[] args) throws IOException, ParsingException { + + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + final KnowledgeBase kb = reasoner.getKnowledgeBase(); + /* exists z. B(?y, !z) :- A(?x, ?y) . */ + kb.addStatements(RuleParser.parseRule("B(?Y, !Z) :- A(?X, ?Y) .")); + /* B(?y, ?x), A(?y, ?x) :- B(?x, ?y) . */ + kb.addStatements(RuleParser.parseRule("B(?Y, ?X), A(?Y, ?X) :- B(?X, ?Y) .")); + /* A(c,d) */ + kb.addStatement(RuleParser.parseFact("A(\"c\",\"d\")")); + + /* + * Default reasoner log level is WARNING. + */ + reasoner.setLogFile(reasonerWarningLogFilePath); + reasoner.reason(); + + /* + * We reset the reasoner and repeat reasoning over the same knowledge base, with + * different log levels + */ + reasoner.resetReasoner(); + + /* + * INFO level logs the number of iterations, the materialisation duration (in + * miliseconds), and the number of derivations. + */ + reasoner.setLogLevel(LogLevel.INFO); + /* + * If no log file is set, or given log file is invalid, the reasoner logging is + * redirected to System output by default. + */ + reasoner.setLogFile(reasonerInfoLogFilePath); + + reasoner.reason(); + + reasoner.resetReasoner(); + + /* + * DEBUG level is the most informative, logging internal details useful for + * debugging: rule optimisations, rule application details etc. + */ + reasoner.setLogLevel(LogLevel.DEBUG); + /* + * If no log file is set, or given log file is invalid, the reasoner logging is + * redirected to System output by default. + */ + reasoner.setLogFile(reasonerDebugLogFilePath); + reasoner.reason(); + } + + } + +} diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/SkolemVsRestrictedChaseTermination.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/SkolemVsRestrictedChaseTermination.java new file mode 100644 index 000000000..84b1291af --- /dev/null +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/SkolemVsRestrictedChaseTermination.java @@ -0,0 +1,136 @@ +package org.semanticweb.rulewerk.examples.core; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; + +/** + * This example shows non-termination of the Skolem Chase, versus termination of + * the Restricted Chase on the same set of rules and facts. Note that the + * Restricted Chase is the default reasoning algorithm, as it terminates in most + * cases and generates a smaller number of facts. + * + * @author Irina Dragoste + * + */ +public class SkolemVsRestrictedChaseTermination { + + public static void main(final String[] args) throws IOException, ParsingException { + + ExamplesUtils.configureLogging(); + + final String facts = ""// define some facts: + + "bicycle(bicycle1) ." // + + "hasPart(bicycle1, wheel1) ." // + + "wheel(wheel1) ." // + + "bicycle(bicycle2) ."; + + final String rules = "" + // every bicycle has some part that is a wheel: + + "hasPart(?X, !Y), wheel(!Y) :- bicycle(?X) ." // + // every wheel is part of some bicycle: + + "isPartOf(?X, !Y), bicycle(!Y) :- wheel(?X) ." // + // hasPart and isPartOf are mutually inverse relations: + + "hasPart(?X, ?Y) :- isPartOf(?Y, ?X) ." // + + "isPartOf(?X, ?Y) :- hasPart(?Y, ?X) ."; + + /* + * 1. Load facts into a knowledge base + */ + final KnowledgeBase kb = RuleParser.parse(facts); + + /* + * 2. Load the knowledge base into the reasoner + */ + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + + /* + * 3. Query the reasoner before applying rules for fact materialisation + */ + final PositiveLiteral queryHasPart = RuleParser.parsePositiveLiteral("hasPart(?X, ?Y)"); + + /* See that there is no fact HasPartIDB before reasoning. */ + System.out.println("Before reasoning is started, no inferrences have been computed yet."); + ExamplesUtils.printOutQueryAnswers(queryHasPart, reasoner); + + /* + * 4. Load rules into the knowledge base + */ + RuleParser.parseInto(kb, rules); + /* + * 5. Materialise with the Skolem Chase. As the Skolem Chase is known not to + * terminate for this set of rules and facts, it is interrupted after one + * second. + */ + reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); + reasoner.setReasoningTimeout(1); + System.out.println("Starting Skolem Chase (a.k.a. semi-oblivious chase) with 1 second timeout ..."); + final boolean skolemChaseFinished = reasoner.reason(); + + /* Verify that the Skolem Chase did not terminate before timeout. */ + System.out.println("Has Skolem Chase algorithm finished before 1 second timeout? " + skolemChaseFinished); + /* + * See that the Skolem Chase generated a very large number of facts in 1 second, + * extensively introducing new unnamed individuals to satisfy existential + * restrictions. + */ + System.out.println("Before the timeout, the Skolem chase had produced " + + reasoner.countQueryAnswers(queryHasPart).getCount() + " results for hasPart(?X, ?Y)."); + + /* + * 6. We reset the reasoner to discard all inferences, and apply the Restricted + * Chase on the same set of rules and facts + */ + System.out.println(); + reasoner.resetReasoner(); + + /* + * 7. Materialise with the Restricted Chase. As the Restricted Chase is known to + * terminate for this set of rules and facts, we will not interrupt it. + */ + reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); + reasoner.setReasoningTimeout(null); + final long restrictedChaseStartTime = System.currentTimeMillis(); + System.out.println("Starting Restricted Chase (a.k.a. Standard Chase) without any timeout ... "); + reasoner.reason(); + + /* The Restricted Chase terminates: */ + final long restrictedChaseDuration = System.currentTimeMillis() - restrictedChaseStartTime; + System.out.println("The Restricted Chase finished in " + restrictedChaseDuration + " ms."); + + /* + * See that the Restricted Chase generated a small number of facts, reusing + * individuals that satisfy existential restrictions. + */ + ExamplesUtils.printOutQueryAnswers(queryHasPart, reasoner); + } + } + +} diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java new file mode 100644 index 000000000..14e0116ba --- /dev/null +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromDlgpFile.java @@ -0,0 +1,139 @@ +package org.semanticweb.rulewerk.examples.graal; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.Reader; +import java.util.ArrayList; +import java.util.List; + +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.graal.GraalConjunctiveQueryToRule; +import org.semanticweb.rulewerk.graal.GraalToRulewerkModelConverter; + +import fr.lirmm.graphik.graal.api.core.Atom; +import fr.lirmm.graphik.graal.api.core.ConjunctiveQuery; +import fr.lirmm.graphik.graal.api.core.Rule; +import fr.lirmm.graphik.graal.io.dlp.DlgpParser; + +/** + * This example shows how facts can be imported from files in the + * DLGP/DLP format. + * + * The Graal + * {@link DlgpParser} is used to parse the program. This step requires a + * {@link File}, {@link InputStream}, {@link Reader}, or {@link String} + * containing or pointing to the program. + * + * The {@link Atom Atoms}, {@link Rule Rules}, and {@link ConjunctiveQuery + * ConjunctiveQueries} are then converted for use by Rulewerk. Take care to add + * the rules resulting from the {@link ConjunctiveQuery ConjunctiveQueries} as + * well as the {@link Rule Rules} to the {@link Reasoner}; see + * {@link GraalConjunctiveQueryToRule} for details. + * + * @author Adrian Bielefeldt + * + */ +public class AddDataFromDlgpFile { + + public static void main(final String[] args) throws IOException { + + final List graalAtoms = new ArrayList<>(); + final List graalRules = new ArrayList<>(); + final List graalConjunctiveQueries = new ArrayList<>(); + + /* + * 1. Parse the DLGP/DLP file using the DlgpParser. + * + * DlgpParser supports Files, InputStreams, Readers, and Strings. While other + * objects such as prefixes can also be part of the iterator, they are + * automatically resolved and do not need to be handled here. + */ + try (final DlgpParser parser = new DlgpParser(new File("src/main/data/input/graal/", "example.dlgp"))) { + while (parser.hasNext()) { + final Object object = parser.next(); + if (object instanceof Atom) { + graalAtoms.add((Atom) object); + } else if (object instanceof Rule) { + graalRules.add((Rule) object); + } else if (object instanceof ConjunctiveQuery) { + graalConjunctiveQueries.add((ConjunctiveQuery) object); + } + } + } + + /* + * 2. ConjunctiveQueries consist of a conjunction of literals and a set of + * answer variables. To query this with Rulewerk, an additional rule needs to be + * added for each ConjunctiveQuery. See GraalConjunctiveQueryToRule for details. + */ + final List convertedConjunctiveQueries = new ArrayList<>(); + + for (final ConjunctiveQuery conjunctiveQuery : graalConjunctiveQueries) { + final String queryUniqueId = "query" + convertedConjunctiveQueries.size(); + convertedConjunctiveQueries.add(GraalToRulewerkModelConverter.convertQuery(queryUniqueId, conjunctiveQuery)); + } + + /* + * 3. Loading, reasoning, and querying while using try-with-resources to close + * the reasoner automatically. + */ + + try (Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + final KnowledgeBase kb = reasoner.getKnowledgeBase(); + + /* + * Add facts to the reasoner knowledge base + */ + kb.addStatements(GraalToRulewerkModelConverter.convertAtomsToFacts(graalAtoms)); + /* + * Load the knowledge base into the reasoner + */ + reasoner.reason(); + System.out.println("Before materialisation:"); + for (final GraalConjunctiveQueryToRule graalConjunctiveQueryToRule : convertedConjunctiveQueries) { + ExamplesUtils.printOutQueryAnswers(graalConjunctiveQueryToRule.getQuery(), reasoner); + } + + /* + * Add rules to the reasoner knowledge base + */ + kb.addStatements(GraalToRulewerkModelConverter.convertRules(graalRules)); + for (final GraalConjunctiveQueryToRule graalConjunctiveQueryToRule : convertedConjunctiveQueries) { + kb.addStatement(graalConjunctiveQueryToRule.getRule()); + } + /* + * Materialise facts using rules + */ + reasoner.reason(); + System.out.println("After materialisation:"); + for (final GraalConjunctiveQueryToRule graalConjunctiveQueryToRule : convertedConjunctiveQueries) { + ExamplesUtils.printOutQueryAnswers(graalConjunctiveQueryToRule.getQuery(), reasoner); + } + } + } + +} diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java new file mode 100644 index 000000000..b5c39441e --- /dev/null +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/AddDataFromGraal.java @@ -0,0 +1,161 @@ +package org.semanticweb.rulewerk.examples.graal; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.graal.GraalConjunctiveQueryToRule; +import org.semanticweb.rulewerk.graal.GraalToRulewerkModelConverter; + +import fr.lirmm.graphik.graal.api.core.ConjunctiveQuery; +import fr.lirmm.graphik.graal.io.dlp.DlgpParser; + +/** + * This example shows how facts and rules can be imported from objects of the + * Graal library. Special + * care must be taken with the import of Graal {@link ConjunctiveQuery}-objects, + * since unlike with Rulewerk, they represent both the query atom and the + * corresponding rule. + *

+ * In Rulewerk, the reasoner is queried by a query Atom and the results are all + * facts matching this query Atom.
+ * Answering a Graal {@link ConjunctiveQuery} over a certain knowledge base is + * equivalent to adding a {@link Rule} to the knowledge base, prior to + * reasoning. The rule consists of the query Atoms as the Rule body and a + * single Atom with a fresh predicate containing all the answer variables of the + * {@link ConjunctiveQuery} as the Rule head. After the reasoning process, in + * which the rule is materialised, is completed, this Rule head can then be used + * as a a query Atom to obtain the results of the Graal + * {@link ConjunctiveQuery}. + *

+ * + * @author Adrian Bielefeldt + * + */ +public class AddDataFromGraal { + + public static void main(final String[] args) throws IOException { + /* + * 1. Instantiating rules + */ + final List graalRules = new ArrayList<>(); + + /* + * 1.1 Rules to map external database (EDB) predicates to internal database + * predicates (IDB). Necessary because Rulewerk requires separation between input + * predicates and predicates for which additional facts can be derived. + */ + graalRules.add(DlgpParser.parseRule("bicycleIDB(X) :- bicycleEDB(X).")); + graalRules.add(DlgpParser.parseRule("wheelIDB(X) :- wheelEDB(X).")); + graalRules.add(DlgpParser.parseRule("hasPartIDB(X, Y) :- hasPartEDB(X, Y).")); + graalRules.add(DlgpParser.parseRule("isPartOfIDB(X, Y) :- isPartOfEDB(X, Y).")); + + /* + * 1.2 Rules modelling that every bicycle has wheels and that the has part + * relation is inverse to the is part of relation. + */ + graalRules.add(DlgpParser.parseRule("hasPartIDB(X, Y), wheelIDB(Y) :- bicycleIDB(X).")); + graalRules.add(DlgpParser.parseRule("isPartOfIDB(X, Y) :- wheelIDB(X).")); + graalRules.add(DlgpParser.parseRule("isPartOfIDB(X, Y) :- hasPartIDB(Y, X).")); + graalRules.add(DlgpParser.parseRule("hasPartIDB(X, Y) :- isPartOfIDB(Y, X).")); + + /** + * 2. Instantiating Atoms representing the data to reason on (EDB). + */ + final List graalAtoms = new ArrayList<>(); + + /* + * bicycleEDB + */ + graalAtoms.add(DlgpParser.parseAtom("bicycleEDB(redBike).")); + graalAtoms.add(DlgpParser.parseAtom("bicycleEDB(blueBike).")); + graalAtoms.add(DlgpParser.parseAtom("bicycleEDB(blackBike).")); + + /* + * wheelEDB + */ + graalAtoms.add(DlgpParser.parseAtom("wheelEDB(redWheel).")); + graalAtoms.add(DlgpParser.parseAtom("wheelEDB(blueWheel).")); + + /* + * hasPartEDB + */ + graalAtoms.add(DlgpParser.parseAtom("hasPartEDB(redBike, redWheel).")); + graalAtoms.add(DlgpParser.parseAtom("hasPartEDB(blueBike, blueWheel).")); + + /* + * 3. Instantiating a Graal conjunctive query. This is equivalent to adding the + * rule query(?b, ?w) :- bicycleIDB(?b), wheelIDB(?w), isPartOfIDB(?w, ?b) and + * then querying with query(?b, ?w) The rule from convertedGraalConjunctiveQuery + * needs to be added to the reasoner. + */ + final GraalConjunctiveQueryToRule convertedGraalConjunctiveQuery = GraalToRulewerkModelConverter.convertQuery( + "graalQuery", DlgpParser.parseQuery("?(B, W) :- bicycleIDB(B), wheelIDB(W), isPartOfIDB(W, B).")); + + /* + * 4. Loading, reasoning, and querying while using try-with-resources to close + * the reasoner automatically. + */ + final KnowledgeBase kb = new KnowledgeBase(); + + try (Reasoner reasoner = new VLogReasoner(kb)) { + + /* + * Add facts to the reasoner knowledge base + */ + kb.addStatements(GraalToRulewerkModelConverter.convertAtomsToFacts(graalAtoms)); + /* + * Load the knowledge base into the reasoner + */ + reasoner.reason(); + + /* + * Query the loaded facts + */ + System.out.println("Before materialisation:"); + ExamplesUtils.printOutQueryAnswers(convertedGraalConjunctiveQuery.getQuery(), reasoner); + + /* + * Add rules to the reasoner knowledge base + */ + kb.addStatements(GraalToRulewerkModelConverter.convertRules(graalRules)); + kb.addStatements(convertedGraalConjunctiveQuery.getRule()); + + /* + * Materialise facts using rules + */ + reasoner.reason(); + + System.out.println("After materialisation:"); + ExamplesUtils.printOutQueryAnswers(convertedGraalConjunctiveQuery.getQuery(), reasoner); + + } + + } + +} diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java new file mode 100644 index 000000000..9bfac8c74 --- /dev/null +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/graal/DoidExampleGraal.java @@ -0,0 +1,142 @@ +package org.semanticweb.rulewerk.examples.graal; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.io.IOException; +import java.net.URL; + +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.NegativeLiteral; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; +import org.semanticweb.rulewerk.examples.DoidExample; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.graal.GraalToRulewerkModelConverter; + +import fr.lirmm.graphik.graal.io.dlp.DlgpParser; + +/** + * This example is a variant of {@link DoidExample} using Graal. It reasons + * about human diseases, based on information from the Disease Ontology (DOID) + * and Wikidata. It illustrates how to load data from different sources (RDF + * file, SPARQL), and reason about these inputs using rules that are loaded from + * a file in DLGP syntax. Since DLGP does not support negation, an additional + * rule with stratified negation is added through custom Java code. + * + * @author Markus Kroetzsch + * @author Larry Gonzalez + */ +public class DoidExampleGraal { + + public static void main(final String[] args) throws IOException { + + ExamplesUtils.configureLogging(); + + final URL wikidataSparqlEndpoint = new URL("https://query.wikidata.org/sparql"); + + final KnowledgeBase kb = new KnowledgeBase(); + + try (final Reasoner reasoner = new VLogReasoner(kb)) { + + /* Configure RDF data source */ + final Predicate doidTriplePredicate = Expressions.makePredicate("doidTriple", 3); + final DataSource doidDataSource = new RdfFileDataSource(ExamplesUtils.INPUT_FOLDER + "doid.nt.gz"); + kb.addStatement(new DataSourceDeclarationImpl(doidTriplePredicate, doidDataSource)); + + /* Configure SPARQL data sources */ + final String sparqlHumansWithDisease = "?disease wdt:P699 ?doid ."; + // (wdt:P669 = "Disease Ontology ID") + final DataSource diseasesDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, + "disease,doid", sparqlHumansWithDisease); + final Predicate diseaseIdPredicate = Expressions.makePredicate("diseaseId", 2); + kb.addStatement(new DataSourceDeclarationImpl(diseaseIdPredicate, diseasesDataSource)); + + final String sparqlRecentDeaths = "?human wdt:P31 wd:Q5; wdt:P570 ?deathDate . FILTER (YEAR(?deathDate) = 2018)"; + // (wdt:P31 = "instance of"; wd:Q5 = "human", wdt:570 = "date of death") + final DataSource recentDeathsDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, "human", + sparqlRecentDeaths); + final Predicate recentDeathsPredicate = Expressions.makePredicate("recentDeaths", 1); + kb.addStatement(new DataSourceDeclarationImpl(recentDeathsPredicate, recentDeathsDataSource)); + + final String sparqlRecentDeathsCause = sparqlRecentDeaths + "?human wdt:P509 ?causeOfDeath . "; + // (wdt:P509 = "cause of death") + final DataSource recentDeathsCauseDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, + "human,causeOfDeath", sparqlRecentDeathsCause); + final Predicate recentDeathsCausePredicate = Expressions.makePredicate("recentDeathsCause", 2); + kb.addStatement(new DataSourceDeclarationImpl(recentDeathsCausePredicate, recentDeathsCauseDataSource)); + + /* Load rules from DLGP file */ + try (final DlgpParser parser = new DlgpParser( + new File(ExamplesUtils.INPUT_FOLDER + "/graal", "doid-example.dlgp"))) { + while (parser.hasNext()) { + final Object object = parser.next(); + if (object instanceof fr.lirmm.graphik.graal.api.core.Rule) { + kb.addStatement(GraalToRulewerkModelConverter + .convertRule((fr.lirmm.graphik.graal.api.core.Rule) object)); + } + } + } + + /* Create additional rules with negated literals */ + final Variable x = Expressions.makeUniversalVariable("X"); + final Variable y = Expressions.makeUniversalVariable("Y"); + final Variable z = Expressions.makeUniversalVariable("Z"); + // humansWhoDiedOfNoncancer(X):-deathCause(X,Y),diseaseId(Y,Z),~cancerDisease(Z) + final NegativeLiteral notCancerDisease = Expressions.makeNegativeLiteral("cancerDisease", z); + final PositiveLiteral diseaseId = Expressions.makePositiveLiteral("diseaseId", y, z); + final PositiveLiteral deathCause = Expressions.makePositiveLiteral("deathCause", x, y); + final PositiveLiteral humansWhoDiedOfNoncancer = Expressions.makePositiveLiteral("humansWhoDiedOfNoncancer", + x); + kb.addStatement(Expressions.makeRule(Expressions.makePositiveConjunction(humansWhoDiedOfNoncancer), + Expressions.makeConjunction(deathCause, diseaseId, notCancerDisease))); + // humansWhoDiedOfNoncancer(X) :- deathCause(X,Y), ~hasDoid(Y) + final NegativeLiteral hasNotDoid = Expressions.makeNegativeLiteral("hasDoid", y); + kb.addStatement(Expressions.makeRule(Expressions.makePositiveConjunction(humansWhoDiedOfNoncancer), + Expressions.makeConjunction(deathCause, hasNotDoid))); + + System.out.println("Rules configured:\n--"); + kb.getRules().forEach(System.out::println); + System.out.println("--"); + System.out.println("Starting reasoning (including SPARQL query answering) ..."); + reasoner.reason(); + System.out.println("... reasoning completed."); + + final PositiveLiteral humansWhoDiedOfCancer = Expressions.makePositiveLiteral("humansWhoDiedOfCancer", x); + System.out.println("Humans in Wikidata who died in 2018 due to cancer: " + + reasoner.countQueryAnswers(humansWhoDiedOfCancer).getCount()); + + System.out.println("Humans in Wikidata who died in 2018 due to some other cause: " + + reasoner.countQueryAnswers(humansWhoDiedOfNoncancer).getCount()); + System.out.println("Done."); + } + + } + +} diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java new file mode 100644 index 000000000..52815942e --- /dev/null +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/owlapi/OwlOntologyToRulesAndFacts.java @@ -0,0 +1,141 @@ +package org.semanticweb.rulewerk.examples.owlapi; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Set; + +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyCreationException; +import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.owlapi.OwlToRulesConverter; + +/** + * This example shows how rulewerk-owlapi library (class + * {@link OwlToRulesConverter}) can be used to transform an OWL ontology into + * rulewerk-core {@link Rule}s and {@link Fact}s. + * + * @author Irina Dragoste + * + */ +public class OwlOntologyToRulesAndFacts { + + public static void main(final String[] args) throws OWLOntologyCreationException, IOException { + + /* Bike ontology is loaded from a Bike file using OWL API */ + final OWLOntologyManager ontologyManager = OWLManager.createOWLOntologyManager(); + final OWLOntology ontology = ontologyManager + .loadOntologyFromOntologyDocument(new File(ExamplesUtils.INPUT_FOLDER + "owl/bike.owl")); + + /* + * rulewerk.owlapi.OwlToRulesConverter can be used to convert the OWL axiom in + * source ontology to target Rule and Atom objects + */ + final OwlToRulesConverter owlToRulesConverter = new OwlToRulesConverter(); + owlToRulesConverter.addOntology(ontology); + + /* Print out the Rules extracted from bike ontology. */ + System.out.println("Rules extracted from Bike ontology:"); + final Set rules = owlToRulesConverter.getRules(); + for (final Rule rule : rules) { + System.out.println(" - rule: " + rule); + } + System.out.println(); + + /* Print out Facts extracted from bike ontology */ + System.out.println("Facts extracted from Bike ontology:"); + final Set facts = owlToRulesConverter.getFacts(); + for (final PositiveLiteral fact : facts) { + System.out.println(" - fact: " + fact); + } + System.out.println(); + + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(new ArrayList<>(owlToRulesConverter.getRules())); + kb.addStatements(owlToRulesConverter.getFacts()); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + /* + * Load rules and facts obtained from the ontology, and reason over loaded + * ontology with the default algorithm Restricted Chase + */ + System.out.println("Reasoning default algorithm: " + reasoner.getAlgorithm()); + reasoner.reason(); + + /* Query for the parts of bike constant "b2". */ + final Variable vx = Expressions.makeUniversalVariable("x"); + final Constant b2 = Expressions.makeAbstractConstant("http://www.bike-example.ontology#b2"); + + final PositiveLiteral b2HasPart = Expressions + .makePositiveLiteral("http://www.bike-example.ontology#hasPart", b2, vx); + System.out.println("Answers to query " + b2HasPart + " :"); + + /* + * See that an unnamed individual has been introduced to satisfy + * owl:someValuesFrom restriction: + * + * :Bike rdf:type owl:Class ; rdfs:subClassOf [ rdf:type owl:Restriction ; + * owl:onProperty :hasPart ; owl:someValuesFrom :Wheel ] . + */ + try (QueryResultIterator answers = reasoner.answerQuery(b2HasPart, true);) { + answers.forEachRemaining(answer -> { + final Term constantB2 = answer.getTerms().get(0); + final Term term = answer.getTerms().get(1); + System.out.println(" - " + constantB2 + " hasPart " + term); + System.out.println(" Term " + term + " is of type " + term.getType()); + }); + } + + final PositiveLiteral isPartOfB2 = Expressions + .makePositiveLiteral("http://www.bike-example.ontology#isPartOf", vx, b2); + + System.out.println("Answers to query " + isPartOfB2 + " :"); + /* + * See that the same unnamed individual is part of Bike b2, satisfying + * restriction :Wheel rdf:type owl:Class ; rdfs:subClassOf [ rdf:type + * owl:Restriction ; owl:onProperty :isPartOf ; owl:someValuesFrom :Bike ] . + */ + try (QueryResultIterator answers = reasoner.answerQuery(isPartOfB2, true);) { + answers.forEachRemaining(answer -> { + final Term term = answer.getTerms().get(0); + final Term constantB2 = answer.getTerms().get(1); + System.out.println(" - " + term + " isPartOf " + constantB2); + System.out.println(" Term " + term + " is of type " + term.getType()); + }); + } + + } + } +} diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java new file mode 100644 index 000000000..8600edae6 --- /dev/null +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/rdf/AddDataFromRdfModel.java @@ -0,0 +1,185 @@ +package org.semanticweb.rulewerk.examples.rdf; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.util.Set; + +import org.openrdf.model.Model; +import org.openrdf.model.impl.LinkedHashModel; +import org.openrdf.rio.RDFFormat; +import org.openrdf.rio.RDFHandlerException; +import org.openrdf.rio.RDFParseException; +import org.openrdf.rio.RDFParser; +import org.openrdf.rio.Rio; +import org.openrdf.rio.helpers.StatementCollector; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.rdf.RdfModelConverter; + +/** + * This example shows how rulewerk-rdf library's utility class + * {@link RdfModelConverter} can be used to convert RDF {@link Model}s from + * various types of RDF resources to rulewerk-core {@code Atom} sets. + * + * @author Irina Dragoste + * + */ +public class AddDataFromRdfModel { + + public static void main(final String[] args) + throws IOException, RDFParseException, RDFHandlerException, URISyntaxException { + + ExamplesUtils.configureLogging(); + RdfModelConverter rdfModelConverter = new RdfModelConverter(); + + /* + * Local file containing metadata of publications from ISWC'16 conference, in + * RDF/XML format. + */ + final File rdfXMLResourceFile = new File(ExamplesUtils.INPUT_FOLDER + "rdf/iswc-2016-complete-alignments.rdf"); + final FileInputStream inputStreamISWC2016 = new FileInputStream(rdfXMLResourceFile); + /* An RDF Model is obtained from parsing the RDF/XML resource. */ + final Model rdfModelISWC2016 = parseRdfResource(inputStreamISWC2016, rdfXMLResourceFile.toURI(), + RDFFormat.RDFXML); + + /* + * Using rulewerk-rdf library, we convert RDF Model triples to facts, each + * having the ternary predicate "TRIPLE". + */ + final Set tripleFactsISWC2016 = rdfModelConverter.rdfModelToFacts(rdfModelISWC2016); + System.out.println("Example triple fact from iswc-2016 dataset:"); + System.out.println(" - " + tripleFactsISWC2016.iterator().next()); + + /* + * URL of online resource containing metadata of publications from ISWC'17 + * conference, in TURTLE format. + */ + final URL turtleResourceURL = new URL( + "http://www.scholarlydata.org/dumps/conferences/alignments/iswc-2017-complete-alignments.ttl"); + final InputStream inputStreamISWC2017 = turtleResourceURL.openStream(); + /* An RDF Model is obtained from parsing the TURTLE resource. */ + final Model rdfModelISWC2017 = parseRdfResource(inputStreamISWC2017, turtleResourceURL.toURI(), + RDFFormat.TURTLE); + + /* + * Using rulewerk-rdf library, we convert RDF Model triples to facts, each + * having the ternary predicate "TRIPLE". + */ + final Set tripleFactsISWC2017 = rdfModelConverter.rdfModelToFacts(rdfModelISWC2017); + System.out.println("Example triple fact from iswc-2017 dataset:"); + System.out.println(" - " + tripleFactsISWC2017.iterator().next()); + + /** + * We wish to combine triples about a person's affiliation, an affiliation's + * organization and an organization's name, to find a person's organization + * name. + */ + + /* Predicate names of the triples found in both RDF files. */ + final Variable varPerson = Expressions.makeUniversalVariable("person"); + final Predicate predicateHasOrganizationName = Expressions.makePredicate("hasOrganizationName", 2); + + /* + * Rule that retrieves pairs of persons and their organization name: + */ + final String rules = "%%%% We specify the rules syntactically for convenience %%%\n" + + "@prefix cnf: ." + + "hasOrganizationName(?Person, ?OrgName) :- " + + " TRIPLE(?Person, cnf:hasAffiliation, ?Aff), TRIPLE(?Aff, cnf:withOrganisation, ?Org)," + + " TRIPLE(?Org, cnf:name, ?OrgName) ."; + KnowledgeBase kb; + try { + kb = RuleParser.parse(rules); + } catch (final ParsingException e) { + System.out.println("Failed to parse rules: " + e.getMessage()); + return; + } + kb.addStatements(tripleFactsISWC2016); + kb.addStatements(tripleFactsISWC2017); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + + /* We query for persons whose organization name is "TU Dresden" . */ + final Constant constantTuDresden = Expressions.makeDatatypeConstant("TU Dresden", + "http://www.w3.org/2001/XMLSchema#string"); + /* hasOrganizationName(?person, "TU Dresden") */ + final PositiveLiteral queryTUDresdenParticipantsAtISWC = Expressions + .makePositiveLiteral(predicateHasOrganizationName, varPerson, constantTuDresden); + + System.out.println("\nParticipants at ISWC'16 and '17 from Organization 'TU Dresden':"); + System.out.println("(Answers to query " + queryTUDresdenParticipantsAtISWC + ")\n"); + try (QueryResultIterator queryResultIterator = reasoner.answerQuery(queryTUDresdenParticipantsAtISWC, + false)) { + queryResultIterator.forEachRemaining(answer -> System.out + .println(" - " + answer.getTerms().get(0) + ", organization " + answer.getTerms().get(1))); + } + + } + + } + + /** + * Parses the data from the supplied InputStream, using the supplied baseURI to + * resolve any relative URI references. + * + * @param inputStream The content to be parsed, expected to be in the given + * {@code rdfFormat}. + * @param baseURI The URI associated with the data in the InputStream. + * @param rdfFormat The expected RDFformat of the inputStream resource that is + * to be parsed. + * @return A Model containing the RDF triples. Blanks have unique ids across + * different models. + * @throws IOException If an I/O error occurred while data was read from + * the InputStream. + * @throws RDFParseException If the parser has found an unrecoverable parse + * error. + * @throws RDFHandlerException If the configured statement handler has + * encountered an unrecoverable error. + */ + private static Model parseRdfResource(final InputStream inputStream, final URI baseURI, final RDFFormat rdfFormat) + throws IOException, RDFParseException, RDFHandlerException { + final Model model = new LinkedHashModel(); + final RDFParser rdfParser = Rio.createParser(rdfFormat); + rdfParser.setRDFHandler(new StatementCollector(model)); + rdfParser.parse(inputStream, baseURI.toString()); + + return model; + } + +} diff --git a/vlog4j-core/src/test/data/output/empty.csv b/rulewerk-examples/src/main/logs/.keep similarity index 100% rename from vlog4j-core/src/test/data/output/empty.csv rename to rulewerk-examples/src/main/logs/.keep diff --git a/rulewerk-graal/LICENSE.txt b/rulewerk-graal/LICENSE.txt new file mode 100644 index 000000000..261eeb9e9 --- /dev/null +++ b/rulewerk-graal/LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/rulewerk-graal/pom.xml b/rulewerk-graal/pom.xml new file mode 100644 index 000000000..682546386 --- /dev/null +++ b/rulewerk-graal/pom.xml @@ -0,0 +1,36 @@ + + + 4.0.0 + + + org.semanticweb.rulewerk + rulewerk-parent + 0.10.0-SNAPSHOT + + + rulewerk-graal + jar + + Rulewerk Graal Import Components + Components to import Graal data structures. + + + + fr.lirmm.graphik + graal-api + ${graal.version} + + + fr.lirmm.graphik + graal-core + ${graal.version} + test + + + ${project.groupId} + rulewerk-core + ${project.version} + + + diff --git a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConjunctiveQueryToRule.java b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConjunctiveQueryToRule.java new file mode 100644 index 000000000..ada714cbb --- /dev/null +++ b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConjunctiveQueryToRule.java @@ -0,0 +1,129 @@ +package org.semanticweb.rulewerk.graal; + + +/*- + * #%L + * Rulewerk Graal Import Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + + +import java.util.List; + +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + + +import fr.lirmm.graphik.graal.api.core.ConjunctiveQuery; + +/** + * A utility class containing a + * Graal + * {@link ConjunctiveQuery}. Answering a + * Graal + * {@link ConjunctiveQuery} over a certain knowledge base is equivalent to + * adding a {@link Rule} to the knowledge base, prior to reasoning. + * The rule consists of the query {@link Literal}s as the body and a single + * {@link PositiveLiteral} with a new predicate containing all the answer + * variables of the query as the head. After the reasoning process, in which the + * rule is materialised, is completed, this rule head can then be used as a + * query to obtain the results of the Graal {@link ConjunctiveQuery}. + * + * @author Adrian Bielefeldt + */ +public class GraalConjunctiveQueryToRule { + + private final Rule rule; + + private final PositiveLiteral query; + + /** + * Constructor for a GraalConjunctiveQueryToRule. + * + * @param ruleHeadPredicateName the query predicate name. Becomes the name of + * the rule head Predicate. + * @param answerVariables the query answer variables. They become the + * terms of the rule head PositiveLiteral. + * @param conjunction the query body. Becomes the rule body. + */ + protected GraalConjunctiveQueryToRule(final String ruleHeadPredicateName, final List answerVariables, + final Conjunction conjunction) { + this.query = Expressions.makePositiveLiteral(ruleHeadPredicateName, answerVariables); + this.rule = Expressions.makePositiveLiteralsRule(Expressions.makePositiveConjunction(this.query), conjunction); + } + + /** + * A rule that needs to be added to the program to answer the + * {@link ConjunctiveQuery Graal ConjunctiveQuery} represented by this object. + * It consists of all query literals from the original Graal ConjunctiveQuery as + * the body and a single PositiveLiteral containing all the answer variables of + * the query as the head. + * + * @return The rule equivalent to the Graal ConjunctiveQuery represented by this + * object. + */ + public Rule getRule() { + return this.rule; + } + + /** + * A query {@link PositiveLiteral} that returns the results of the + * {@link ConjunctiveQuery Graal ConjunctiveQuery} represented by this object, + * provided the corresponding rule ({@link #getRule()}) has been added to the + * program. It is equal to the head of the rule returned by {@link #getRule()}. + * + * @return The query {@link PositiveLiteral} to obtain the results of the Graal + * ConjunctiveQuery represented by this object. + */ + public PositiveLiteral getQuery() { + return this.query; + } + + @Override + public int hashCode() { + return this.rule.hashCode(); + } + + @Override + public boolean equals(final Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (this.getClass() != obj.getClass()) { + return false; + } + final GraalConjunctiveQueryToRule other = (GraalConjunctiveQueryToRule) obj; + + if (!this.rule.equals(other.rule)) { + return false; + } + return true; + } + + @Override + public String toString() { + return "GraalConjunctiveQueryToRule [rule=" + this.rule + "]"; + } + +} diff --git a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java new file mode 100644 index 000000000..836fa51e0 --- /dev/null +++ b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalConvertException.java @@ -0,0 +1,46 @@ +package org.semanticweb.rulewerk.graal; + +/*- + * #%L + * Rulewerk Graal Import Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; + +/** + * An exception to signify that a conversion from Graal data structures to Rulewerk data structures could not + * be made. + * + * @author Adrian Bielefeldt + * + */ +public class GraalConvertException extends RulewerkRuntimeException { + + /** + * generated serial version UID + */ + private static final long serialVersionUID = -3228005099627492816L; + + public GraalConvertException(final String message) { + super(message); + } + + public GraalConvertException(final String message, final Throwable exception) { + super(message, exception); + } +} diff --git a/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverter.java b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverter.java new file mode 100644 index 000000000..94fa0cd7c --- /dev/null +++ b/rulewerk-graal/src/main/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverter.java @@ -0,0 +1,294 @@ +package org.semanticweb.rulewerk.graal; + +/*- + * #%L + * Rulewerk Graal Import Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Set; + +import org.apache.commons.lang3.StringUtils; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +import fr.lirmm.graphik.graal.api.core.AtomSet; +import fr.lirmm.graphik.graal.api.core.ConjunctiveQuery; +import fr.lirmm.graphik.util.stream.CloseableIterator; +import fr.lirmm.graphik.util.stream.IteratorException; + +/** + * Utility class to convert + * Graal data structures into + * Rulewerk data structures. Labels ({@link ConjunctiveQuery#getLabel()}, + * {@link fr.lirmm.graphik.graal.api.core.Rule#getLabel() Rule.getLabel()}, or + * {@link fr.lirmm.graphik.graal.api.core.Term#getLabel() Term.getLabel()}) are + * not converted since Rulewerk does not support them. + * + * @author Adrian Bielefeldt + * + */ +public final class GraalToRulewerkModelConverter { + + private GraalToRulewerkModelConverter() { + } + + /** + * Converts a {@link fr.lirmm.graphik.graal.api.core.Atom Graal Atom} into a + * {@link PositiveLiteral Rulewerk PositiveLiteral}. + * + * @param atom A {@link fr.lirmm.graphik.graal.api.core.Atom + * Graal Atom} + * @param existentialVariables set of variables that are existentially + * quantified + * @return A {@link PositiveLiteral Rulewerk PositiveLiteral} + */ + public static PositiveLiteral convertAtom(final fr.lirmm.graphik.graal.api.core.Atom atom, + final Set existentialVariables) { + final Predicate predicate = convertPredicate(atom.getPredicate()); + final List terms = convertTerms(atom.getTerms(), existentialVariables); + return Expressions.makePositiveLiteral(predicate, terms); + } + + /** + * Converts a {@link fr.lirmm.graphik.graal.api.core.Atom Graal Atom} into a + * {@link Fact Rulewerk fact}. + * + * @param atom A {@link fr.lirmm.graphik.graal.api.core.Atom Graal Atom} + * @return A {@link Fact Rulewerk fact} + * @throws IllegalArgumentException if the converted atom contains terms that + * cannot occur in facts + */ + public static Fact convertAtomToFact(final fr.lirmm.graphik.graal.api.core.Atom atom) { + final Predicate predicate = convertPredicate(atom.getPredicate()); + final List terms = convertTerms(atom.getTerms(), Collections.emptySet()); + return Expressions.makeFact(predicate, terms); + } + + /** + * Converts a {@link List} of {@link fr.lirmm.graphik.graal.api.core.Atom Graal + * Atoms} into a {@link List} of {@link PositiveLiteral Rulewerk + * PositiveLiterals}. + * + * @param atoms list of {@link fr.lirmm.graphik.graal.api.core.Atom Graal + * Atoms}. + * @return A {@link List} of {@link PositiveLiteral Rulewerk PositiveLiterals}. + */ + public static List convertAtoms(final List atoms) { + final List result = new ArrayList<>(); + for (final fr.lirmm.graphik.graal.api.core.Atom atom : atoms) { + result.add(convertAtom(atom, Collections.emptySet())); + } + return result; + } + + /** + * Converts a {@link List} of {@link fr.lirmm.graphik.graal.api.core.Atom Graal + * Atoms} into a {@link List} of {@link Fact Rulewerk facts}. + * + * @param atoms list of {@link fr.lirmm.graphik.graal.api.core.Atom Graal + * Atoms}. + * @return A {@link List} of {@link Fact Rulewerk facts}. + */ + public static List convertAtomsToFacts(final List atoms) { + final List result = new ArrayList<>(); + for (final fr.lirmm.graphik.graal.api.core.Atom atom : atoms) { + result.add(convertAtomToFact(atom)); + } + return result; + } + + /** + * Converts a {@link AtomSet Graal AtomSet} into a {@link Conjunction Rulewerk + * Conjunction} of {@link PositiveLiteral}s. + * + * @param atomSet A {@link AtomSet Graal AtomSet} + * @param existentialVariables set of variables that are existentially + * quantified + * @return A {@link Conjunction Rulewerk Conjunction} + */ + private static Conjunction convertAtomSet(final AtomSet atomSet, + final Set existentialVariables) { + final List result = new ArrayList<>(); + try (CloseableIterator iterator = atomSet.iterator()) { + while (iterator.hasNext()) { + result.add(convertAtom(iterator.next(), existentialVariables)); + } + } catch (final IteratorException e) { + throw new GraalConvertException(MessageFormat + .format("Unexpected Iterator Exception when converting PositiveLiteralSet {0}}.", atomSet), e); + } + return Expressions.makeConjunction(result); + } + + /** + * Converts a {@link fr.lirmm.graphik.graal.api.core.Predicate Graal Predicate} + * into a {@link Predicate Rulewerk Predicate}. + * + * @param predicate A {@link fr.lirmm.graphik.graal.api.core.Predicate Graal + * Predicate} + * @return A {@link Predicate Rulewerk Predicate} + */ + private static Predicate convertPredicate(final fr.lirmm.graphik.graal.api.core.Predicate predicate) { + return Expressions.makePredicate(predicate.getIdentifier().toString(), predicate.getArity()); + } + + /** + * Converts a {@link ConjunctiveQuery Graal ConjunctiveQuery} into a + * {@link GraalConjunctiveQueryToRule}. Answering a Graal ConjunctiveQuery over + * a certain knowledge base is equivalent to adding a {@link Rule} to the + * knowledge base, prior to reasoning. The rule consists of the query + * literals as the body and a single {@link PositiveLiteral} with a new + * predicate containing all the query variables as the head. After the reasoning + * process, in which the rule is materialised, is completed, this rule head can + * then be used as a query PositiveLiteral to obtain the results of the Graal + * ConjunctiveQuery. + * + *

+ * WARNING: The supplied {@code ruleHeadPredicateName} will be used to + * create a {@link Predicate} containing all answer variables from the + * {@code conjunctiveQuery}. If a Predicate with the same name and arity is used + * elsewhere in the same program, the result will differ from the one expected + * from the Graal ConjunctiveQuery. + *

+ * + * @param ruleHeadPredicateName A name to create a program-unique predicate for + * the query PositiveLiteral. + * @param conjunctiveQuery A {@link ConjunctiveQuery Graal Query}. + * @return A {@link GraalConjunctiveQueryToRule} equivalent to the + * {@code conjunctiveQuery} input. + */ + public static GraalConjunctiveQueryToRule convertQuery(final String ruleHeadPredicateName, + final ConjunctiveQuery conjunctiveQuery) { + if (StringUtils.isBlank(ruleHeadPredicateName)) { + throw new GraalConvertException(MessageFormat.format( + "Rule head predicate for Graal ConjunctiveQuery {0} cannot be a blank string.", conjunctiveQuery)); + } + + if (conjunctiveQuery.getAtomSet().isEmpty()) { + throw new GraalConvertException(MessageFormat.format( + "Graal ConjunctiveQuery {0} with empty body is not supported in Rulewerk.", conjunctiveQuery)); + } + + if (conjunctiveQuery.getAnswerVariables().isEmpty()) { + throw new GraalConvertException(MessageFormat.format( + "Graal ConjunctiveQuery {0} with no answer variables is not supported in Rulewerk.", + conjunctiveQuery)); + } + + final Conjunction conjunction = convertAtomSet(conjunctiveQuery.getAtomSet(), + Collections.emptySet()); + final List answerVariables = convertTerms(conjunctiveQuery.getAnswerVariables(), Collections.emptySet()); + + return new GraalConjunctiveQueryToRule(ruleHeadPredicateName, answerVariables, conjunction); + } + + /** + * Converts a {@link fr.lirmm.graphik.graal.api.core.Rule Graal Rule} into a + * {@link Rule Rulewerk Rule}. + * + * @param rule A {@link fr.lirmm.graphik.graal.api.core.Rule Graal Rule}. + * @return A {@link Rule Rulewerk Rule}. + */ + public static Rule convertRule(final fr.lirmm.graphik.graal.api.core.Rule rule) { + final Conjunction head = convertAtomSet(rule.getHead(), rule.getExistentials()); + final Conjunction body = convertAtomSet(rule.getBody(), Collections.emptySet()); + return Expressions.makePositiveLiteralsRule(head, body); + } + + /** + * Converts a {@link List} of {@link fr.lirmm.graphik.graal.api.core.Rule Graal + * Rules} into a {@link List} of {@link Rule Rulewerk Rules}. + * + * @param rules A {@link List} of {@link fr.lirmm.graphik.graal.api.core.Rule + * Graal Rules}. + * @return A {@link List} of {@link Rule Rulewerk Rules}. + */ + public static List convertRules(final List rules) { + final List result = new ArrayList<>(); + for (final fr.lirmm.graphik.graal.api.core.Rule rule : rules) { + result.add(convertRule(rule)); + } + return result; + } + + /** + * Converts a {@link fr.lirmm.graphik.graal.api.core.Term Graal Term} into a + * {@link Term Rulewerk Term}. If the {@code term} is neither + * {@link fr.lirmm.graphik.graal.api.core.Term#isVariable() Variable} nor + * {@link fr.lirmm.graphik.graal.api.core.Term#isConstant() Constant}, a + * {@link GraalConvertException} is thrown. + * + * @param term A {@link fr.lirmm.graphik.graal.api.core.Term} + * @param existentialVariables set of variables that are existentially + * quantified + * @return A {@link Term Rulewerk Term}, with {@link Term#getName()} equal to + * {@link fr.lirmm.graphik.graal.api.core.Term#getIdentifier()}, if it + * is a Variable, and {@link Term#getName()} equal to + * <{@link fr.lirmm.graphik.graal.api.core.Term#getIdentifier()}>, if it + * is a Constant.
+ * Graal Variable with identifier "a" will be transformed to + * rulewerk Variable with name "a". Graal Constant with identifier + * "c" will be transformed to rulewerk Constant with name + * "<c>". + * + * @throws GraalConvertException If the term is neither variable nor constant. + */ + private static Term convertTerm(final fr.lirmm.graphik.graal.api.core.Term term, + final Set existentialVariables) { + final String id = term.getIdentifier().toString(); + if (term.isConstant()) { + return Expressions.makeAbstractConstant(id); + } else if (term.isVariable()) { + if (existentialVariables.contains(term)) { + return Expressions.makeExistentialVariable(id); + } else { + return Expressions.makeUniversalVariable(id); + } + } else { + throw new GraalConvertException(MessageFormat.format( + "Term {0} with identifier {1} and label {2} could not be converted because it is neither constant nor variable.", + term, id, term.getLabel())); + } + } + + /** + * Converts a {@link List} of {@link fr.lirmm.graphik.graal.api.core.Term Graal + * Terms} into a {@link List} of {@link Term Rulewerk Terms}. + * + * @param terms A {@link List} of {@link fr.lirmm.graphik.graal.api.core.Term + * Graal Terms} + * @return A {@link List} of {@link Term Rulewerk Terms} + */ + private static List convertTerms(final List terms, + final Set existentialVariables) { + final List result = new ArrayList<>(); + for (final fr.lirmm.graphik.graal.api.core.Term term : terms) { + result.add(convertTerm(term, existentialVariables)); + } + return result; + } +} diff --git a/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverterTest.java b/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverterTest.java new file mode 100644 index 000000000..fb8fcc40f --- /dev/null +++ b/rulewerk-graal/src/test/java/org/semanticweb/rulewerk/graal/GraalToRulewerkModelConverterTest.java @@ -0,0 +1,257 @@ +package org.semanticweb.rulewerk.graal; + +/*- + * #%L + * Rulewerk Graal Import Components + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +import fr.lirmm.graphik.graal.api.core.ConjunctiveQuery; +import fr.lirmm.graphik.graal.api.io.ParseException; +import fr.lirmm.graphik.graal.core.DefaultAtom; +import fr.lirmm.graphik.graal.core.DefaultConjunctiveQuery; +import fr.lirmm.graphik.graal.core.DefaultRule; +import fr.lirmm.graphik.graal.core.atomset.LinkedListAtomSet; +import fr.lirmm.graphik.graal.core.term.DefaultTermFactory; + +/** + * @author Adrian Bielefeldt + */ +public class GraalToRulewerkModelConverterTest { + + private final String socrate = "socrate"; + private final String redsBike = "redsBike"; + + private final String bicycle = "bicycle"; + private final String hasPart = "hasPart"; + private final String human = "human"; + private final String mortal = "mortal"; + private final String wheel = "wheel"; + + private final String x = "X"; + private final String y = "Y"; + private final String z = "Z"; + + private final Constant rulewerk_socrate = Expressions.makeAbstractConstant(this.socrate); + + private final Predicate rulewerk_bicycle = Expressions.makePredicate(this.bicycle, 1); + private final Predicate rulewerk_hasPart = Expressions.makePredicate(this.hasPart, 2); + private final Predicate rulewerk_human = Expressions.makePredicate(this.human, 1); + private final Predicate rulewerk_mortal = Expressions.makePredicate(this.mortal, 1); + private final Predicate rulewerk_wheel = Expressions.makePredicate(this.wheel, 1); + + private final Variable rulewerk_x = Expressions.makeUniversalVariable(this.x); + private final Variable rulewerk_y = Expressions.makeUniversalVariable(this.y); + private final Variable rulewerk_z = Expressions.makeUniversalVariable(this.z); + private final Variable rulewerk_ex_y = Expressions.makeExistentialVariable(this.y); + + private final DefaultTermFactory termFactory = new DefaultTermFactory(); + + private final fr.lirmm.graphik.graal.api.core.Constant graal_socrate = this.termFactory + .createConstant(this.socrate); + private final fr.lirmm.graphik.graal.api.core.Constant graal_redsBike = this.termFactory + .createConstant(this.redsBike); + + private final fr.lirmm.graphik.graal.api.core.Predicate graal_bicycle = new fr.lirmm.graphik.graal.api.core.Predicate( + this.bicycle, 1); + private final fr.lirmm.graphik.graal.api.core.Predicate graal_hasPart = new fr.lirmm.graphik.graal.api.core.Predicate( + this.hasPart, 2); + private final fr.lirmm.graphik.graal.api.core.Predicate graal_human = new fr.lirmm.graphik.graal.api.core.Predicate( + this.human, 1); + private final fr.lirmm.graphik.graal.api.core.Predicate graal_mortal = new fr.lirmm.graphik.graal.api.core.Predicate( + this.mortal, 1); + private final fr.lirmm.graphik.graal.api.core.Predicate graal_wheel = new fr.lirmm.graphik.graal.api.core.Predicate( + this.wheel, 1); + + private final fr.lirmm.graphik.graal.api.core.Variable graal_x = this.termFactory.createVariable(this.x); + private final fr.lirmm.graphik.graal.api.core.Variable graal_y = this.termFactory.createVariable(this.y); + private final fr.lirmm.graphik.graal.api.core.Variable graal_z = this.termFactory.createVariable(this.z); + + @Test + public void testConvertAtom() throws ParseException { + final PositiveLiteral rulewerk_atom = Expressions.makePositiveLiteral(this.rulewerk_human, this.rulewerk_socrate); + final fr.lirmm.graphik.graal.api.core.Atom graal_atom = new DefaultAtom(this.graal_human, this.graal_socrate); + assertEquals(rulewerk_atom, GraalToRulewerkModelConverter.convertAtom(graal_atom, Collections.emptySet())); + + final PositiveLiteral rulewerk_atom_2 = Expressions.makePositiveLiteral(this.rulewerk_hasPart, this.rulewerk_x, + this.rulewerk_socrate); + final fr.lirmm.graphik.graal.api.core.Atom graal_atom_2 = new DefaultAtom(this.graal_hasPart, this.graal_x, + this.graal_socrate); + assertEquals(rulewerk_atom_2, GraalToRulewerkModelConverter.convertAtom(graal_atom_2, Collections.emptySet())); + } + + @Test + public void testConvertFact() throws ParseException { + final Fact rulewerk_atom = Expressions.makeFact(this.rulewerk_human, Arrays.asList(this.rulewerk_socrate)); + final fr.lirmm.graphik.graal.api.core.Atom graal_atom = new DefaultAtom(this.graal_human, this.graal_socrate); + assertEquals(rulewerk_atom, GraalToRulewerkModelConverter.convertAtomToFact(graal_atom)); + } + + @Test + public void testConvertRule() throws ParseException { + // moral(X) :- human(X) + final PositiveLiteral rulewerk_mortal_atom = Expressions.makePositiveLiteral(this.rulewerk_mortal, this.rulewerk_x); + final PositiveLiteral rulewerk_human_atom = Expressions.makePositiveLiteral(this.rulewerk_human, this.rulewerk_x); + final Rule rulewerk_rule = Expressions.makeRule(rulewerk_mortal_atom, rulewerk_human_atom); + + final fr.lirmm.graphik.graal.api.core.Atom graal_mortal_atom = new DefaultAtom(this.graal_mortal, this.graal_x); + final fr.lirmm.graphik.graal.api.core.Atom graal_human_atom = new DefaultAtom(this.graal_human, this.graal_x); + final fr.lirmm.graphik.graal.api.core.Rule graal_rule = new DefaultRule(new LinkedListAtomSet(graal_human_atom), + new LinkedListAtomSet(graal_mortal_atom)); + + assertEquals(rulewerk_rule, GraalToRulewerkModelConverter.convertRule(graal_rule)); + } + + @Test + public void testConvertExistentialRule() throws ParseException { + // hasPart(X, Y), wheel(Y) :- bicycle(X) + + final PositiveLiteral rulewerk_hasPart_atom = Expressions.makePositiveLiteral(this.rulewerk_hasPart, this.rulewerk_x, + this.rulewerk_ex_y); + final PositiveLiteral rulewerk_wheel_atom = Expressions.makePositiveLiteral(this.rulewerk_wheel, this.rulewerk_ex_y); + final PositiveLiteral rulewerk_bicycle_atom = Expressions.makePositiveLiteral(this.rulewerk_bicycle, this.rulewerk_x); + final Rule rulewerk_rule = Expressions.makeRule( + Expressions.makePositiveConjunction(rulewerk_hasPart_atom, rulewerk_wheel_atom), + Expressions.makeConjunction(rulewerk_bicycle_atom)); + + final fr.lirmm.graphik.graal.api.core.Atom graal_hasPart_atom = new DefaultAtom(this.graal_hasPart, + this.graal_x, this.graal_y); + final fr.lirmm.graphik.graal.api.core.Atom graal_wheel_atom = new DefaultAtom(this.graal_wheel, this.graal_y); + final fr.lirmm.graphik.graal.api.core.Atom graal_bicycle_atom = new DefaultAtom(this.graal_bicycle, + this.graal_x); + final fr.lirmm.graphik.graal.api.core.Rule graal_rule = new DefaultRule( + new LinkedListAtomSet(graal_bicycle_atom), new LinkedListAtomSet(graal_hasPart_atom, graal_wheel_atom)); + + assertEquals(rulewerk_rule, GraalToRulewerkModelConverter.convertRule(graal_rule)); + } + + @Test + public void testConvertQuery() throws ParseException { + // ?(X) :- mortal(X) + final String mortalQuery = "mortalQuery"; + final PositiveLiteral query = Expressions.makePositiveLiteral(Expressions.makePredicate(mortalQuery, 1), + this.rulewerk_x); + final Rule queryRule = Expressions.makeRule(query, + Expressions.makePositiveLiteral(this.rulewerk_mortal, this.rulewerk_x)); + + final fr.lirmm.graphik.graal.api.core.Atom graal_query_atom = new DefaultAtom(this.graal_mortal, this.graal_x); + + final ConjunctiveQuery graal_query = new DefaultConjunctiveQuery(new LinkedListAtomSet(graal_query_atom), + Arrays.asList(this.graal_x)); + + final GraalConjunctiveQueryToRule importedQuery = GraalToRulewerkModelConverter.convertQuery(mortalQuery, + graal_query); + assertEquals(query, importedQuery.getQuery()); + assertEquals(queryRule, importedQuery.getRule()); + + final String complexQuery = "complexQuery"; + final String predicate1 = "predicate1"; + final String predicate2 = "predicate2"; + final String predicate3 = "predicate3"; + final String predicate4 = "predicate4"; + final String stockholm = "stockholm"; + + final fr.lirmm.graphik.graal.api.core.Predicate graal_predicate1 = new fr.lirmm.graphik.graal.api.core.Predicate( + predicate1, 1); + final fr.lirmm.graphik.graal.api.core.Predicate graal_predicate2 = new fr.lirmm.graphik.graal.api.core.Predicate( + predicate2, 2); + final fr.lirmm.graphik.graal.api.core.Predicate graal_predicate3 = new fr.lirmm.graphik.graal.api.core.Predicate( + predicate3, 2); + final fr.lirmm.graphik.graal.api.core.Predicate graal_predicate4 = new fr.lirmm.graphik.graal.api.core.Predicate( + predicate4, 3); + + final fr.lirmm.graphik.graal.api.core.Atom graal_predicate1_atom = new DefaultAtom(graal_predicate1, + this.graal_x); + final fr.lirmm.graphik.graal.api.core.Atom graal_predicate2_atom = new DefaultAtom(graal_predicate2, + this.graal_y, this.graal_x); + final fr.lirmm.graphik.graal.api.core.Atom graal_predicate3_atom = new DefaultAtom(graal_predicate3, + this.graal_y, this.termFactory.createConstant(stockholm)); + + final fr.lirmm.graphik.graal.api.core.Atom graal_predicate4_atom = new DefaultAtom(graal_predicate4, + this.graal_x, this.graal_y, this.graal_z); + + final ConjunctiveQuery graal_complex_query = new DefaultConjunctiveQuery( + new LinkedListAtomSet(graal_predicate1_atom, graal_predicate2_atom, graal_predicate3_atom, + graal_predicate4_atom), + Arrays.asList(this.graal_x, this.graal_x, this.graal_y)); + + final GraalConjunctiveQueryToRule importedComplexQuery = GraalToRulewerkModelConverter.convertQuery(complexQuery, + graal_complex_query); + + final PositiveLiteral expectedComplexQueryAtom = Expressions.makePositiveLiteral( + Expressions.makePredicate(complexQuery, 3), this.rulewerk_x, this.rulewerk_x, this.rulewerk_y); + final PositiveLiteral rulewerk_predicate1_atom = Expressions + .makePositiveLiteral(Expressions.makePredicate(predicate1, 1), this.rulewerk_x); + final PositiveLiteral rulewerk_predicate2_atom = Expressions + .makePositiveLiteral(Expressions.makePredicate(predicate2, 2), this.rulewerk_y, this.rulewerk_x); + final PositiveLiteral rulewerk_predicate3_atom = Expressions.makePositiveLiteral( + Expressions.makePredicate(predicate3, 2), this.rulewerk_y, Expressions.makeAbstractConstant(stockholm)); + final PositiveLiteral rulewerk_predicate4_atom = Expressions.makePositiveLiteral( + Expressions.makePredicate(predicate4, 3), this.rulewerk_x, this.rulewerk_y, this.rulewerk_z); + final Rule expectedComplexQueryRule = Expressions.makeRule(expectedComplexQueryAtom, rulewerk_predicate1_atom, + rulewerk_predicate2_atom, rulewerk_predicate3_atom, rulewerk_predicate4_atom); + + assertEquals(expectedComplexQueryAtom, importedComplexQuery.getQuery()); + + assertEquals(expectedComplexQueryRule, importedComplexQuery.getRule()); + } + + @Test(expected = GraalConvertException.class) + public void testConvertQueryExceptionNoVariables() { + + final fr.lirmm.graphik.graal.api.core.Atom graal_atom = new DefaultAtom(this.graal_hasPart, this.graal_x, + this.graal_socrate); + final ConjunctiveQuery graal_query_without_answer_variables = new DefaultConjunctiveQuery( + new LinkedListAtomSet(graal_atom), new ArrayList<>()); + GraalToRulewerkModelConverter.convertQuery("name", graal_query_without_answer_variables); + } + + @Test(expected = GraalConvertException.class) + public void testConvertQueryExceptionEmptyBody() { + + final ConjunctiveQuery graal_query_without_body = new DefaultConjunctiveQuery(new LinkedListAtomSet(), + Arrays.asList(this.graal_y)); + GraalToRulewerkModelConverter.convertQuery("name", graal_query_without_body); + } + + @Test(expected = GraalConvertException.class) + public void testConvertQueryExceptionBlankPredicate() { + + final fr.lirmm.graphik.graal.api.core.Atom graal_atom_1 = new DefaultAtom(this.graal_hasPart, + this.graal_redsBike, this.graal_z); + final fr.lirmm.graphik.graal.api.core.Atom graal_atom_2 = new DefaultAtom(this.graal_human, this.graal_z); + final ConjunctiveQuery graal_query = new DefaultConjunctiveQuery( + new LinkedListAtomSet(graal_atom_1, graal_atom_2), Arrays.asList(this.graal_z)); + + GraalToRulewerkModelConverter.convertQuery(" ", graal_query); + } +} diff --git a/rulewerk-integrationtests/LICENSE.txt b/rulewerk-integrationtests/LICENSE.txt new file mode 100644 index 000000000..261eeb9e9 --- /dev/null +++ b/rulewerk-integrationtests/LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/rulewerk-integrationtests/pom.xml b/rulewerk-integrationtests/pom.xml new file mode 100644 index 000000000..e0fdbedec --- /dev/null +++ b/rulewerk-integrationtests/pom.xml @@ -0,0 +1,84 @@ + + + + 4.0.0 + + org.semanticweb.rulewerk + rulewerk-parent + 0.10.0-SNAPSHOT + + + rulewerk-integrationtests + jar + + Rulewerk Integration Tests + Contains blackbox tests for VLog + + + + ${project.groupId} + rulewerk-core + ${project.version} + + + ${project.groupId} + rulewerk-parser + ${project.version} + + + ${project.groupId} + rulewerk-vlog + ${project.version} + + + + + + + org.apache.maven.plugins + maven-surefire-plugin + 3.0.0-M5 + + true + + + + org.apache.maven.plugins + maven-failsafe-plugin + 3.0.0-M5 + + + rulewerk-vlog-integration-tests + + integration-test + verify + + + + org.semanticweb.rulewerk.integrationtests.vlogissues.*IT + + ${project.build.directory}/failsafe-reports/failsafe-vlog-summary.xml + true + + + + default + + integration-test + verify + + + + **/vlogissues/*.java + + false + + + + + + + + diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/IntegrationTest.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/IntegrationTest.java new file mode 100644 index 000000000..c0ff802fe --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/IntegrationTest.java @@ -0,0 +1,93 @@ +package org.semanticweb.rulewerk.integrationtests; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.InputStream; + +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; + +public abstract class IntegrationTest { + /** + * Returns the prefix to use for resource names + * + * @return the prefix to use when turning resource names into paths + * + * This needs to be overriden in subpackages for loading to work + * correctly. + */ + protected String getResourcePrefix() { + return "/"; + } + + /** + * Obtain an input stream for a resource name + * + * @param resourceName the resource name to load + * @return an {@link InputStream} pointing to the resource + */ + protected InputStream getResourceAsStream(final String resourceName) { + String prefix = this.getResourcePrefix(); + + if (resourceName.startsWith(prefix)) { + prefix = ""; + } else if (resourceName.startsWith("/") && prefix.endsWith("/")) { + prefix = prefix.substring(0, prefix.length() - 1); + } + + return this.getClass().getResourceAsStream(prefix + resourceName); + } + + /** + * Load a Knowledge Base from a resource name + * + * @param resourceName the name of the resource to parse into a Knowledge Base + * + * @throws ParsingException when there is an error during parsing + * + * @return a {@link KnowledgeBase} containing the parsed contents of the named + * resource + */ + protected KnowledgeBase parseKbFromResource(final String resourceName) throws ParsingException { + final KnowledgeBase kb = new KnowledgeBase(); + + RuleParser.parseInto(kb, this.getResourceAsStream(resourceName)); + + return kb; + } + + /** + * Obtain a reasoner loaded with the Knowledge Base read from the resource name + * + * @param resourceName the name of the resource to load into the Reasoner + * + * @throws ParsingException when there is an error during parsing + * + * @return a {@link VLogReasoner} containing the parsed contents of the named + * resource + */ + protected Reasoner getReasonerWithKbFromResource(final String resourceName) throws ParsingException { + return new VLogReasoner(this.parseKbFromResource(resourceName)); + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/acyclicity/AcyclicityTest.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/acyclicity/AcyclicityTest.java new file mode 100644 index 000000000..414a91867 --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/acyclicity/AcyclicityTest.java @@ -0,0 +1,33 @@ +package org.semanticweb.rulewerk.integrationtests.acyclicity; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + + +import org.semanticweb.rulewerk.integrationtests.IntegrationTest; + +public abstract class AcyclicityTest extends IntegrationTest { + + @Override + protected String getResourcePrefix() { + return "/acyclicity/"; + } + +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/acyclicity/JAIT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/acyclicity/JAIT.java new file mode 100644 index 000000000..773bf33f2 --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/acyclicity/JAIT.java @@ -0,0 +1,101 @@ +package org.semanticweb.rulewerk.integrationtests.acyclicity; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class JAIT extends AcyclicityTest { + + @Test + public void isJA_datalog() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("datalog.rls")) { + assertTrue(r.isJA()); + } + } + + @Test + public void isJA_nonRecursive() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("non-recursive.rls")) { + assertTrue(r.isJA()); + } + } + + @Test + public void isJA_JA_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("JA-1.rls")) { + assertTrue(r.isJA()); + } + } + + @Test + public void isNotJA_RJA_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("RJA-1.rls")) { + assertFalse(r.isJA()); + } + } + + @Test + public void isNotJA_RJA_2() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("RJA-2.rls")) { + assertFalse(r.isJA()); + } + } + + @Test + public void isNotJA_RJA_3() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("RJA-3.rls")) { + assertFalse(r.isJA()); + } + } + + @Test + public void isNotJA_MFA_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("MFA-1.rls")) { + assertFalse(r.isJA()); + } + } + + @Test + public void isNotJA_RMFA_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("RMFA-1.rls")) { + assertFalse(r.isJA()); + } + } + + @Test + public void isNotJA_MFC_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("MFC-1.rls")) { + assertFalse(r.isJA()); + } + } + + @Test + public void isNotJA_RMFC_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("RMFC-1.rls")) { + assertFalse(r.isJA()); + } + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/acyclicity/MFAIT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/acyclicity/MFAIT.java new file mode 100644 index 000000000..48d69f72c --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/acyclicity/MFAIT.java @@ -0,0 +1,102 @@ +package org.semanticweb.rulewerk.integrationtests.acyclicity; + +import static org.junit.Assert.assertFalse; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertTrue; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class MFAIT extends AcyclicityTest { + + @Test + public void isMFA_datalog() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("datalog.rls")) { + assertTrue(r.isMFA()); + } + } + + @Test + public void isMFA_nonRecursive() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("non-recursive.rls")) { + assertTrue(r.isMFA()); + } + } + + @Test + public void isMFA_JA_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("JA-1.rls")) { + assertTrue(r.isMFA()); + } + } + + @Test + public void isMFA_MFA_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("MFA-1.rls")) { + assertTrue(r.isMFA()); + } + } + + @Test + public void isNotMFA_RJA_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("RJA-1.rls")) { + assertFalse(r.isMFA()); + } + } + + @Test + public void isNotMFA_RJA_2() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("RJA-2.rls")) { + assertFalse(r.isMFA()); + } + } + + @Test + public void isNotMFA_RJA_3() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("RJA-3.rls")) { + assertFalse(r.isMFA()); + } + } + + @Test + public void isNotMFA_RMFA_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("RMFA-1.rls")) { + assertFalse(r.isMFA()); + } + } + + @Test + public void isNotMFA_MFC_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("MFC-1.rls")) { + assertFalse(r.isMFA()); + } + } + + @Test + public void isNotMFA_RMFC_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("RMFC-1.rls")) { + assertFalse(r.isMFA()); + } + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/acyclicity/MFCIT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/acyclicity/MFCIT.java new file mode 100644 index 000000000..66dca87d9 --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/acyclicity/MFCIT.java @@ -0,0 +1,102 @@ +package org.semanticweb.rulewerk.integrationtests.acyclicity; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class MFCIT extends AcyclicityTest { + + @Test + public void isNotMFC_datalog() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("datalog.rls")) { + assertFalse(r.isMFC()); + } + } + + @Test + public void isNotMFC_nonRecursive() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("non-recursive.rls")) { + assertFalse(r.isMFC()); + } + } + + @Test + public void isNotMFC_JA_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("JA-1.rls")) { + assertFalse(r.isMFC()); + } + } + + @Test + public void isNotMFC_MFA_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("MFA-1.rls")) { + assertFalse(r.isMFC()); + } + } + + @Test + public void isMFC_RMFA_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("RMFA-1.rls")) { + assertTrue(r.isMFC()); + } + } + + @Test + public void isMFC_RJA_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("RJA-1.rls")) { + assertTrue(r.isMFC()); + } + } + + @Test + public void isMFC_RJA_2() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("RJA-2.rls")) { + assertTrue(r.isMFC()); + } + } + + @Test + public void isMFC_RJA_3() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("RJA-3.rls")) { + assertTrue(r.isMFC()); + } + } + + @Test + public void isMFC_MFC_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("MFC-1.rls")) { + assertTrue(r.isMFC()); + } + } + + @Test + public void isMFC_RMFC_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("RMFC-1.rls")) { + assertTrue(r.isMFC()); + } + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/acyclicity/RJAIT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/acyclicity/RJAIT.java new file mode 100644 index 000000000..7baedf54c --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/acyclicity/RJAIT.java @@ -0,0 +1,102 @@ +package org.semanticweb.rulewerk.integrationtests.acyclicity; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class RJAIT extends AcyclicityTest { + + @Test + public void isRJA_datalog() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("datalog.rls")) { + assertTrue(r.isRJA()); + } + } + + @Test + public void isRJA_nonRecursive() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("non-recursive.rls")) { + assertTrue(r.isRJA()); + } + } + + @Test + public void isJRA_JA_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("JA-1.rls")) { + assertTrue(r.isRJA()); + } + } + + @Test + public void isRJA_RJA_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("RJA-1.rls")) { + assertTrue(r.isRJA()); + } + } + + @Test + public void isRJA_RJA_2() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("RJA-2.rls")) { + assertTrue(r.isRJA()); + } + } + + @Test + public void isRJA_RJA_3() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("RJA-3.rls")) { + assertTrue(r.isRJA()); + } + } + + @Test + public void isRJA_MFA_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("MFA-1.rls")) { + assertFalse(r.isRJA()); + } + } + + @Test + public void isNotRJA_RMFA_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("RMFA-1.rls")) { + assertFalse(r.isRJA()); + } + } + + @Test + public void isNotRJA_MFC_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("MFC-1.rls")) { + assertFalse(r.isRJA()); + } + } + + @Test + public void isNotRJA_RMFC_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("RMFC-1.rls")) { + assertFalse(r.isRJA()); + } + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/acyclicity/RMFAIT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/acyclicity/RMFAIT.java new file mode 100644 index 000000000..8a1800d53 --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/acyclicity/RMFAIT.java @@ -0,0 +1,105 @@ +package org.semanticweb.rulewerk.integrationtests.acyclicity; + +import static org.junit.Assert.assertFalse; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertTrue; + +import org.junit.Ignore; +import org.junit.Test; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class RMFAIT extends AcyclicityTest { + + @Test + public void isRMFA_datalog() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("datalog.rls")) { + assertTrue(r.isRMFA()); + } + } + + @Test + public void isRMFA_nonRecursive() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("non-recursive.rls")) { + assertTrue(r.isRMFA()); + } + } + + @Test + public void isRMFA_JA_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("JA-1.rls")) { + assertTrue(r.isRMFA()); + } + } + + @Test + public void isRMFA_RJA_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("RJA-1.rls")) { + assertTrue(r.isRMFA()); + } + } + + @Test + public void isRMFA_RJA_2() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("RJA-2.rls")) { + assertTrue(r.isRMFA()); + } + } + + @Test + public void isRMFA_RJA_3() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("RJA-3.rls")) { + assertTrue(r.isRMFA()); + } + } + + @Test + public void isRMFA_MFA_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("MFA-1.rls")) { + assertTrue(r.isRMFA()); + } + } + + //FIXME should be RMFA + @Ignore + @Test + public void isRMFA_RMFA_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("RMFA-1.rls")) { + assertTrue(r.isRMFA()); + } + } + + @Test + public void isNotRMFA_MFC_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("MFC-1.rls")) { + assertFalse(r.isRMFA()); + } + } + + @Test + public void isNotRMFA_RMFC_1() throws ParsingException { + try (Reasoner r = this.getReasonerWithKbFromResource("RMFC-1.rls")) { + assertFalse(r.isRMFA()); + } + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/RulewerkIssue175IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/RulewerkIssue175IT.java new file mode 100644 index 000000000..cda83279e --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/RulewerkIssue175IT.java @@ -0,0 +1,55 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertTrue; + +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class RulewerkIssue175IT extends VLogIssue { + @Test + public void issue175_full_succeeds() throws ParsingException, IOException { + try (final Reasoner reasoner = getReasonerWithKbFromResource("rulewerk/175.rls")) { + reasoner.reason(); + try (QueryResultIterator result = reasoner.answerQuery(Expressions.makePositiveLiteral("VANDALISMRESERVEDENTITIESSUPPREL0", + Expressions.makeAbstractConstant("VANDALISMRESERVEDENTITIESSUPPRULE50")), false)) { + assertTrue(result.hasNext()); + } + } + } + + @Test + public void issue175_minimal_succeeds() throws ParsingException, IOException { + try (final Reasoner reasoner = getReasonerWithKbFromResource("rulewerk/175-minimal.rls")) { + reasoner.reason(); + try (QueryResultIterator result = reasoner.answerQuery(Expressions.makePositiveLiteral("VANDALISMRESERVEDENTITIESSUPPREL0", + Expressions.makeAbstractConstant("VANDALISMRESERVEDENTITIESSUPPRULE50")), false)) { + assertTrue(result.hasNext()); + } + } + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java new file mode 100644 index 000000000..9d32e931c --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue.java @@ -0,0 +1,31 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.integrationtests.IntegrationTest; + +abstract class VLogIssue extends IntegrationTest { + @Override + protected String getResourcePrefix() { + return "/vlogissues/"; + } + +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue49IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue49IT.java new file mode 100644 index 000000000..85ff37367 --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue49IT.java @@ -0,0 +1,46 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertTrue; + +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; + +/** + * https://github.com/karmaresearch/vlog/issues/49 + * + * @author Irina Dragoste + * + */ +public class VLogIssue49IT extends VLogIssue { + + @Test + public void ruleset_succeeds_noCrash() throws ParsingException, IOException { + try (final Reasoner reasoner = this.getReasonerWithKbFromResource("vlog/49.rls")) { + final boolean finished = reasoner.reason(); + assertTrue(finished); + } + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue50IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue50IT.java new file mode 100644 index 000000000..5b61b1167 --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue50IT.java @@ -0,0 +1,46 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertTrue; + +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; + +/** + * https://github.com/karmaresearch/vlog/issues/50 + * + * @author Irina Dragoste + * + */ +public class VLogIssue50IT extends VLogIssue { + + @Test + public void ruleset1_succeeds_noCrash() throws ParsingException, IOException { + try (final Reasoner reasoner = this.getReasonerWithKbFromResource("vlog/50.rls")) { + final boolean finished = reasoner.reason(); + assertTrue(finished); + } + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue51IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue51IT.java new file mode 100644 index 000000000..e5533bc42 --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue51IT.java @@ -0,0 +1,54 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertTrue; + +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; + +/** + * https://github.com/karmaresearch/vlog/issues/51 + * + * @author Irina Dragoste + * + */ +public class VLogIssue51IT extends VLogIssue { + + @Test + public void ruleset1_succeeds_noCrash() throws ParsingException, IOException { + try (final Reasoner reasoner = this.getReasonerWithKbFromResource("vlog/51-1.rls")) { + final boolean finished = reasoner.reason(); + assertTrue(finished); + } + } + + @Test + public void ruleset2_succeeds_noCrash() throws ParsingException, IOException { + try (final Reasoner reasoner = this.getReasonerWithKbFromResource("vlog/51-2.rls")) { + final boolean finished = reasoner.reason(); + assertTrue(finished); + } + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61IT.java new file mode 100644 index 000000000..1cf093861 --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue61IT.java @@ -0,0 +1,71 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; + +public class VLogIssue61IT extends VLogIssue { + + boolean hasCorrectAnswers(final QueryResultIterator answers) { + int numAnswers = 0; + boolean hasEqualNullsAnswer = false; + + while (answers.hasNext()) { + ++numAnswers; + + final List terms = answers.next().getTerms(); + hasEqualNullsAnswer = hasEqualNullsAnswer || (terms.get(1).equals(terms.get(2))); + } + + return hasEqualNullsAnswer && numAnswers <= 2; + } + + @Test + public void ruleset01_succeeds() throws ParsingException, IOException { + try (final Reasoner reasoner = this.getReasonerWithKbFromResource("vlog/61-1.rls")) { + reasoner.reason(); + + final PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); + assertTrue(this.hasCorrectAnswers(reasoner.answerQuery(query, true))); + } + } + + @Test + public void ruleset02_succeeds() throws ParsingException, IOException { + try (final Reasoner reasoner = this.getReasonerWithKbFromResource("vlog/61-2.rls")) { + reasoner.reason(); + + final PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); + assertTrue(this.hasCorrectAnswers(reasoner.answerQuery(query, true))); + } + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java new file mode 100644 index 000000000..f289764b4 --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue63IT.java @@ -0,0 +1,38 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class VLogIssue63IT extends VLogIssue { + + @Test(expected = RulewerkRuntimeException.class) + public void test() throws ParsingException, IOException { + try (final Reasoner reasoner = getReasonerWithKbFromResource("vlog/63.rls")) { + reasoner.reason(); + } + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue65IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue65IT.java new file mode 100644 index 000000000..c6babea94 --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue65IT.java @@ -0,0 +1,73 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; + +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; + +/** + * https://github.com/karmaresearch/vlog/issues/65 + * + * @author Irina Dragoste + * + */ +public class VLogIssue65IT extends VLogIssue { + + @Test + public void ruleset_succeeds() throws IOException, ParsingException { + try (final Reasoner reasoner = this.getReasonerWithKbFromResource("vlog/65.rls")) { + + reasoner.reason(); + + this.testCorrectness(reasoner); + } + } + + @Test + public void ruleset_succeeds_within1s() throws IOException, ParsingException { + try (final Reasoner reasoner = this.getReasonerWithKbFromResource("vlog/65.rls")) { + + reasoner.setReasoningTimeout(1); + reasoner.reason(); + + this.testCorrectness(reasoner); + } + } + + private void testCorrectness(final Reasoner reasoner) throws ParsingException { + final PositiveLiteral query = RuleParser.parsePositiveLiteral("Goal(?x)"); + final QueryResultIterator answerQuery = reasoner.answerQuery(query, true); + assertFalse(answerQuery.hasNext()); + assertEquals(Correctness.SOUND_AND_COMPLETE, answerQuery.getCorrectness()); + + assertEquals(0, reasoner.getInferences().count()); + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue66IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue66IT.java new file mode 100644 index 000000000..4a106ed13 --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue66IT.java @@ -0,0 +1,70 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.Arrays; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; + +/** + * https://github.com/karmaresearch/vlog/issues/66 + * + * @author Irina Dragoste + * + */ +public class VLogIssue66IT extends VLogIssue { + + @Test + public void ruleset_succeeds() throws IOException, ParsingException { + try (final Reasoner reasoner = this.getReasonerWithKbFromResource("vlog/66.rls")) { + + reasoner.reason(); + + this.testCorrectness(reasoner); + } + } + + + + private void testCorrectness(final Reasoner reasoner) throws ParsingException { + final PositiveLiteral query = RuleParser.parsePositiveLiteral(" R(a)"); + final QueryResultIterator answerQuery = reasoner.answerQuery(query, true); + assertEquals(Correctness.SOUND_AND_COMPLETE, answerQuery.getCorrectness()); + assertTrue(answerQuery.hasNext()); + final QueryResult result = answerQuery.next(); + assertEquals(Arrays.asList(new AbstractConstantImpl("a")), result.getTerms()); + + assertFalse(answerQuery.hasNext()); + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue67IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue67IT.java new file mode 100644 index 000000000..ed9daa56f --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue67IT.java @@ -0,0 +1,44 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; + +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; + +public class VLogIssue67IT extends VLogIssue { + + @Test + public void test() throws ParsingException, IOException { + try (final Reasoner reasoner = getReasonerWithKbFromResource("vlog/67.rls")) { + reasoner.reason(); + + PositiveLiteral query = RuleParser.parsePositiveLiteral("true(a)"); + assertEquals(1, reasoner.countQueryAnswers(query, true).getCount()); + } + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue68IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue68IT.java new file mode 100644 index 000000000..b2b195592 --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue68IT.java @@ -0,0 +1,54 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; + +import java.io.IOException; +import java.util.HashSet; +import java.util.Set; +import java.util.stream.Collectors; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; + +/** + * https://github.com/karmaresearch/vlog/issues/68 + * + * @author Irina Dragoste + * + */ +public class VLogIssue68IT extends VLogIssue { + + @Test + public void ruleset_succeeds() throws ParsingException, IOException { + try (final Reasoner reasoner = this.getReasonerWithKbFromResource("vlog/68.rls")) { + reasoner.reason(); + + final Set inferences = reasoner.getInferences().collect(Collectors.toSet()); + final Set expectedInferences = new HashSet<>( + this.parseKbFromResource("vlog/68-expected.rls").getFacts()); + assertEquals(expectedInferences, inferences); + } + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69IT.java new file mode 100644 index 000000000..1ada7cc7e --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue69IT.java @@ -0,0 +1,47 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; + +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; + +public class VLogIssue69IT extends VLogIssue { + + @Test + public void ruleset_succeeds() throws ParsingException, IOException { + try (final Reasoner reasoner = getReasonerWithKbFromResource("vlog/69.rls")) { + reasoner.reason(); + + PositiveLiteral query1 = RuleParser.parsePositiveLiteral("prec(?X,?Y)"); + assertEquals(2, reasoner.countQueryAnswers(query1, true).getCount()); + + PositiveLiteral query2 = RuleParser.parsePositiveLiteral("conc(?X,?Y)"); + assertEquals(4, reasoner.countQueryAnswers(query2, true).getCount()); + } + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue70IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue70IT.java new file mode 100644 index 000000000..71dd5d4bf --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue70IT.java @@ -0,0 +1,54 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; + +import java.io.IOException; +import java.util.HashSet; +import java.util.Set; +import java.util.stream.Collectors; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; + +/** + * https://github.com/karmaresearch/vlog/issues/72 + * + * @author Irina Dragoste + * + */ +public class VLogIssue70IT extends VLogIssue { + + @Test + public void ruleset_succeeds() throws ParsingException, IOException { + try (final Reasoner reasoner = this.getReasonerWithKbFromResource("vlog/72.rls")) { + reasoner.reason(); + + final Set inferences = reasoner.getInferences().collect(Collectors.toSet()); + final Set expectedInferences = new HashSet<>( + this.parseKbFromResource("vlog/72-expected.rls").getFacts()); + assertEquals(expectedInferences, inferences); + } + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue71IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue71IT.java new file mode 100644 index 000000000..c498a3f45 --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue71IT.java @@ -0,0 +1,65 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; + +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; + +/** + * https://github.com/karmaresearch/vlog/issues/71 + * + * @author Irina Dragoste + * + */ +public class VLogIssue71IT extends VLogIssue { + + @Test + public void ruleset_succeeds() throws ParsingException, IOException { + try (final Reasoner reasoner = this.getReasonerWithKbFromResource("vlog/71.rls")) { + reasoner.reason(); + this.testCorrectness(reasoner); + } + } + + private void testCorrectness(final Reasoner reasoner) throws ParsingException { + + try (final QueryResultIterator answerQuery = reasoner.answerQuery(RuleParser.parsePositiveLiteral("Goal(?x)"), + true)) { + assertEquals(Correctness.SOUND_AND_COMPLETE, answerQuery.getCorrectness()); + assertFalse(answerQuery.hasNext()); + } + + try (final QueryResultIterator answerQuery = reasoner + .answerQuery(RuleParser.parsePositiveLiteral("ResultDeg(i8580)"), true)) { + assertEquals(Correctness.SOUND_AND_COMPLETE, answerQuery.getCorrectness()); + assertFalse(answerQuery.hasNext()); + } + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue72IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue72IT.java new file mode 100644 index 000000000..94ec92006 --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue72IT.java @@ -0,0 +1,54 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; + +import java.io.IOException; +import java.util.HashSet; +import java.util.Set; +import java.util.stream.Collectors; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; + +/** + * https://github.com/karmaresearch/vlog/issues/70 + * + * @author Irina Dragoste + * + */ +public class VLogIssue72IT extends VLogIssue { + + @Test + public void ruleset_succeeds() throws ParsingException, IOException { + try (final Reasoner reasoner = this.getReasonerWithKbFromResource("vlog/70.rls")) { + reasoner.reason(); + + final Set inferences = reasoner.getInferences().collect(Collectors.toSet()); + final Set expectedInferences = new HashSet<>( + this.parseKbFromResource("vlog/70-expected.rls").getFacts()); + assertEquals(expectedInferences, inferences); + } + } +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue73IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue73IT.java new file mode 100644 index 000000000..4e3f23fb3 --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue73IT.java @@ -0,0 +1,106 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2022 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.Set; +import java.util.stream.Collectors; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Statement; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; + +/** + * https://github.com/karmaresearch/vlog/issues/55 + * + * https://github.com/karmaresearch/vlog/issues/73 + * + * RDF literals (constants) "foo"^^ and + * "foo" should be interpreted the same in VLog. + * + * @author Irina Dragoste + * + */ +public class VLogIssue73IT extends VLogIssue { + + // TODO join data from RDF with data from Rulewerk, for example + + // TODO add unit test for SPARQL data source + + @Test + public void rule_rulewerk_succeeds() throws ParsingException, IOException { + try (final Reasoner reasoner = this.getReasonerWithKbFromResource("vlog/73/73.rls")) { + final Statement fact1 = RuleParser.parseFact("long(\"foo\"^^) ."); + final Statement fact2 = RuleParser.parseFact("short(\"foo\") ."); + reasoner.getKnowledgeBase().addStatement(fact1); + reasoner.getKnowledgeBase().addStatement(fact2); + reasoner.reason(); + + testJoin(reasoner); + } + } + + private void testJoin(final Reasoner reasoner) throws ParsingException { + try (final QueryResultIterator answerQuery = reasoner.answerQuery(RuleParser.parsePositiveLiteral("join(?x)"), + true)) { + assertEquals(Correctness.SOUND_AND_COMPLETE, answerQuery.getCorrectness()); + assertTrue(answerQuery.hasNext()); + } + } + + @Test + public void rule_csv_succeeds() throws ParsingException, IOException { + try (final Reasoner reasoner = this.getReasonerWithKbFromResource("vlog/73/73.rls")) { + final DataSourceDeclaration declaration1 = RuleParser.parseDataSourceDeclaration( + "@source long[1]: load-csv(\"src/test/resources/vlogissues/vlog/73/long.csv\") ."); + final DataSourceDeclaration declaration2 = RuleParser.parseDataSourceDeclaration( + "@source short[1]: load-csv(\"src/test/resources/vlogissues/vlog/73/short.csv\") ."); + + reasoner.getKnowledgeBase().addStatement(declaration1); + reasoner.getKnowledgeBase().addStatement(declaration2); + reasoner.reason(); + + testJoin(reasoner); + } + } + + @Test + public void rule_nt_succeeds() throws ParsingException, IOException { + try (final Reasoner reasoner = this.getReasonerWithKbFromResource("vlog/73/73-nt.rls")) { + final DataSourceDeclaration declaration = RuleParser.parseDataSourceDeclaration( + "@source triple[3]: load-rdf(\"src/test/resources/vlogissues/vlog/73/73.nt\") ."); + reasoner.getKnowledgeBase().addStatement(declaration); + reasoner.reason(); + + testJoin(reasoner); + } + } + +} diff --git a/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue98IT.java b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue98IT.java new file mode 100644 index 000000000..92b37c014 --- /dev/null +++ b/rulewerk-integrationtests/src/test/java/org/semanticweb/rulewerk/integrationtests/vlogissues/VLogIssue98IT.java @@ -0,0 +1,75 @@ +package org.semanticweb.rulewerk.integrationtests.vlogissues; + +/*- + * #%L + * Rulewerk Integration Tests + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; + +/** + * https://github.com/karmaresearch/vlog/issues/98 + * + * @author Irina Dragoste + * + */ +public class VLogIssue98IT extends VLogIssue { + + @Test + public void ruleset_succeeds() throws ParsingException, IOException { + try (final Reasoner reasoner = this.getReasonerWithKbFromResource("vlog/98.rls")) { + reasoner.reason(); + + testCorrectness(reasoner); + } + } + + private void testCorrectness(final Reasoner reasoner) throws ParsingException { + // part_of_molar_crown(mc1) . + try (final QueryResultIterator answerQuery = reasoner + .answerQuery(RuleParser.parsePositiveLiteral("part_of_molar_crown(?x)"), true)) { + assertEquals(Correctness.SOUND_AND_COMPLETE, answerQuery.getCorrectness()); + assertTrue(answerQuery.hasNext()); + final List terms = answerQuery.next().getTerms(); + final List expectedTerms = new ArrayList<>(); + expectedTerms.add(new AbstractConstantImpl("mc1")); + assertEquals(expectedTerms, terms); + assertFalse(answerQuery.hasNext()); + } + } + +} diff --git a/rulewerk-integrationtests/src/test/resources/acyclicity/JA-1.rls b/rulewerk-integrationtests/src/test/resources/acyclicity/JA-1.rls new file mode 100644 index 000000000..145eda1db --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/acyclicity/JA-1.rls @@ -0,0 +1 @@ +R(?y,!z) :- R(?x,?y), C(?y) . \ No newline at end of file diff --git a/rulewerk-integrationtests/src/test/resources/acyclicity/MFA-1.rls b/rulewerk-integrationtests/src/test/resources/acyclicity/MFA-1.rls new file mode 100644 index 000000000..88e95da38 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/acyclicity/MFA-1.rls @@ -0,0 +1,2 @@ +S(?y, !z) :- C(?x), R(?x, ?y). +R(?z, !t) :- D(?y), S(?y, ?z). \ No newline at end of file diff --git a/rulewerk-integrationtests/src/test/resources/acyclicity/MFC-1.rls b/rulewerk-integrationtests/src/test/resources/acyclicity/MFC-1.rls new file mode 100644 index 000000000..30d23d0a1 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/acyclicity/MFC-1.rls @@ -0,0 +1 @@ +P(?y, !z) :- P(?x,?y) . \ No newline at end of file diff --git a/rulewerk-integrationtests/src/test/resources/acyclicity/RJA-1.rls b/rulewerk-integrationtests/src/test/resources/acyclicity/RJA-1.rls new file mode 100644 index 000000000..08ae9bb90 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/acyclicity/RJA-1.rls @@ -0,0 +1,4 @@ +hP(?x,!u), W(!u) :- B(?x) . +pO(?x, !v), B(!v) :- W(?x) . +hP(?x, ?y) :- pO(?y, ?x) . +pO(?x, ?y) :- hP(?y, ?x) . \ No newline at end of file diff --git a/rulewerk-integrationtests/src/test/resources/acyclicity/RJA-2.rls b/rulewerk-integrationtests/src/test/resources/acyclicity/RJA-2.rls new file mode 100644 index 000000000..8abf83524 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/acyclicity/RJA-2.rls @@ -0,0 +1,2 @@ +R(?y, !z) :- R(?x,?y) . +R(?y, ?x) :- R(?x, ?y) . diff --git a/rulewerk-integrationtests/src/test/resources/acyclicity/RJA-3.rls b/rulewerk-integrationtests/src/test/resources/acyclicity/RJA-3.rls new file mode 100644 index 000000000..677551968 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/acyclicity/RJA-3.rls @@ -0,0 +1,3 @@ +R(?x,!y), C(!y) :- C(?x) . +R(?y, ?x) :- R(?x, ?y) . + \ No newline at end of file diff --git a/rulewerk-integrationtests/src/test/resources/acyclicity/RMFA-1.rls b/rulewerk-integrationtests/src/test/resources/acyclicity/RMFA-1.rls new file mode 100644 index 000000000..fc6019a90 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/acyclicity/RMFA-1.rls @@ -0,0 +1,4 @@ +R(?x,!y),D(!y) :- C(?x) . +S(?x,!y), E(!y) :- D(?x) . +V(?x,!y), C(!y) :- E(?x) . +R(?z,?x) :- S(?x,?y), V(?y,?z) . diff --git a/rulewerk-integrationtests/src/test/resources/acyclicity/RMFC-1.rls b/rulewerk-integrationtests/src/test/resources/acyclicity/RMFC-1.rls new file mode 100644 index 000000000..5cf888dd6 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/acyclicity/RMFC-1.rls @@ -0,0 +1,4 @@ +R(?x,!y), D(!y) :- C(?x) . +S(?x,!y), E(!y) :- D(?x) . +V(?x,!y), C(!y) :- E(?x) . + \ No newline at end of file diff --git a/rulewerk-integrationtests/src/test/resources/acyclicity/datalog.rls b/rulewerk-integrationtests/src/test/resources/acyclicity/datalog.rls new file mode 100644 index 000000000..6c61f4ed5 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/acyclicity/datalog.rls @@ -0,0 +1,2 @@ +A ( ?x , ?x , ?y , c ) :- A ( ?x , ?y , ?z , ?t ) . +B ( c , d ) :- B ( d , ?y ) . \ No newline at end of file diff --git a/rulewerk-integrationtests/src/test/resources/acyclicity/non-recursive.rls b/rulewerk-integrationtests/src/test/resources/acyclicity/non-recursive.rls new file mode 100644 index 000000000..d31f8022e --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/acyclicity/non-recursive.rls @@ -0,0 +1,2 @@ +S(?y, !z) :- R(?x, ?y). +V(?z, !t) :- S(?y, ?z). \ No newline at end of file diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/rulewerk/175-minimal.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/rulewerk/175-minimal.rls new file mode 100644 index 000000000..0013de9db --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/rulewerk/175-minimal.rls @@ -0,0 +1,4 @@ +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE50) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/rulewerk/175.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/rulewerk/175.rls new file mode 100644 index 000000000..a78e9911e --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/rulewerk/175.rls @@ -0,0 +1,464 @@ +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS164, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS164) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS135, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS135) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS175, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS175) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom0(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS107, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS107) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS159, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS159) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS165, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS165) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS128, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS128) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS45) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS45) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS188) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS188) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS120, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS120) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS8) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS8) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom0(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS117, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS117) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS143, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS143) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS15) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS15) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS153, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS153) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS102, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS102) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS168, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS168) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS178, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS178) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS150, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS150) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS3) :- VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS3) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS167, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS167) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS141) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS141) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS100, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS100) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS115, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS115) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS14) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS14) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) . +VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351) :- VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS350), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS350) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS196, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS196) :- VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS196) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579) :- VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS578) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS580) :- VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS310) :- VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS311, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) :- VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS504), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS504) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS273, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS273) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS273) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS510), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS510) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS276, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS276) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS276) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS243, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS243) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS243) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS444), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS444) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS279, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS279) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS279) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS516), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS516) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS518), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS518) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS280, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS280) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS280) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS281, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS281) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS281) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS520), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS520) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS319, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS319) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS319) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS595), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS595) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS322, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS322) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS322) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS601), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS601) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS321, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS321) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS321) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS599), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS599) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS368), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS368) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS205, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS205) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS205) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS207, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS207) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS207) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS372), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS372) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS208, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS208) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS208) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS374), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS374) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS454), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS454) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS248, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS248) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS248) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS247, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS247) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS247) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS452), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS452) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS327, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS327) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS327) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS611), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS611) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS534), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS534) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS288, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS288) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS288) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS252, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS252) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS252) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS462), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS462) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS215, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS215) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS215) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS388), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS388) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS390), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS390) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS216, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS216) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS216) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS218, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS218) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS218) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS394), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS394) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS256, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS256) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS256) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS470), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS470) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS260, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS260) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS260) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS478), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS478) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS548), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS548) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS295, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS295) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS295) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS480), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS480) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS261, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS261) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS261) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS554), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS554) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS298, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS298) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS298) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS633), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS633) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS338, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS338) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS338) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS300, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS300) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS300) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS558), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS558) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS227, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS227) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS227) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS412), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS412) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS410), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS410) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS226, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS226) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS226) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS341, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS341) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS341) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS639), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS639) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS304, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS304) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS304) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS566), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS566) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS344, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS344) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS344) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS645), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS645) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS266, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS266) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS266) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS490), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS490) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS418), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS418) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS230, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS230) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS230) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS232, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS232) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS232) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS422), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS422) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS492), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS492) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS267, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS267) :- VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS267) . +VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS430), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS430) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS236, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS236) :- VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS236) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE155) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS164, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS164) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS164) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS164, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS164) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS135) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS135, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS135) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE6) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS135, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS135) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS175) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS175, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS175) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE189) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS175, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS175) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000002000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000000400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE50) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS70) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS107) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS107, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS107) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE58) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS107, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS107) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE2) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE35) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS159, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS159) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS159) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS159, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS159) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE44) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS165, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS165) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS165) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS165, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS165) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE176) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS128, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS128) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS128) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS128, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS128) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE53) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE54) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE55) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000003000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS45), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS45) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000002700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS45) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000003000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS45), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS45) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE56) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE159) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS95) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE14) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS170) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS188) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000002700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS188), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS188) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE110) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000002700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS188), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS188) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE113) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE21) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE22) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE89) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS120, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS120) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS120) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS120, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS120) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE23) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS8), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS8) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS8) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS8), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS8) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE26) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE28) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE67) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000000400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000003100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS73) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE64) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS117, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS117) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS117) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS117, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS117) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE70) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE170) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE127) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS112) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE154) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS143, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS143) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS143) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS143, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS143) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE174) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000003000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS15) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000003100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS15), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS15) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE38) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000003100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS15), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS15) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE156) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS153, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS153) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS153) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS153, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS153) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS102) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS102, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS102) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE74) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS102, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS102) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE133) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE82) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS111) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE85) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS160) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS168) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS168, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS168) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE150) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS168, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS168) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE181) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS156) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS178) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS178, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS178) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE175) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS178, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS178) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE81) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS150, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS150) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS150) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS150, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS150) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE1) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE90) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS3), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS3) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS3) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS3), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS3) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE184) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE43) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS132) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE92) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS138) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE19) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS167, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS167) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS167) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS167, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS167) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE0) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS141) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS141), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS141) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE188) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS141), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS141) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE129) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS100, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS100) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS100) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS100, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS100) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS115) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS115, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS115) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE9) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS115, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS115) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE96) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS130) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE46) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS123) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE190) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS166) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE97) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE3) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset00(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE191) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS78) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE99) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS14), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS14) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS14) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS14), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS14) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE100) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS162) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE49) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS146) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE4) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0, VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE5) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE193) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS350), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS350) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS350) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS350), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS350) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS350) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS350), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS351, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS350) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS196) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS196, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS196), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS196) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE194) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS196, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS196), VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS196) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS578) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS578) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS578) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE421) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS578) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE422) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS580), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS580), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS580), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS578, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS580, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS579) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS310), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE423) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS310), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE424) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS311, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS311, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS310, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS311) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS504) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS504), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS504) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE347) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS504), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS504) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS504) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS504), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS505, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS504) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000010200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS273) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS273, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS273), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS273) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE348) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS273, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS273), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00001020(?VANDALISMRESERVEDENTITIESUNIVERSALVARS273) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE353) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS510), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS510) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS510) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS510), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS510) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS510) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS510), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS511, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS510) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE354) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS276, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS276), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS276) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS276) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS276, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS276), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS276) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE287) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS243, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS243), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS243) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS243) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS243, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS243), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS243) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE288) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS444), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS444) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS444) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS444), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS444) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS444) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS444), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS445, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS444) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS279) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS279, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS279), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS279) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE359) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS279, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS279), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS279) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS516) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS516), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS516) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS516) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS516), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS516) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE360) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS230(?VANDALISMRESERVEDENTITIESUNIVERSALVARS516), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS517, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS516) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS518) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS518), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS518) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE361) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS518), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS518) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS518) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS518), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS519, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS518) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE362) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS280, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS280), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS280) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS280) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS280, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS280), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS60(?VANDALISMRESERVEDENTITIESUNIVERSALVARS280) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS281) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS281, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS281), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS281) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE363) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS281, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS281), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS281) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE364) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS520), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS520) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS520) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS520), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS520) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS520) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS110(?VANDALISMRESERVEDENTITIESUNIVERSALVARS520), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS521, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS520) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE439) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS319, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS319), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS319) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS319) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS319, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS319), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS319) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE440) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS595), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS595) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS595) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS595), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS595) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS595) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS595), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS596, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS595) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE445) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS322, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS322), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS322) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS322) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS322, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS322), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS322) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS601) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS601), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS601) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS601) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS601), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS601) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE446) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS40(?VANDALISMRESERVEDENTITIESUNIVERSALVARS601), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS602, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS601) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE443) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS321, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS321), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS321) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS321) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS321, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS321), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS321) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS599) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS599), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS599) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE444) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS599), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS599) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS599) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS130(?VANDALISMRESERVEDENTITIESUNIVERSALVARS599), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS600, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS599) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE211) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS368), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS368) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS368) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS368), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS368) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS368) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS368), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS369, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS368) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS205) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS205, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS205), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS205) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE212) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS205, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS205), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS205) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE215) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS207, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS207), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS207) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS207) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS207, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS207), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS207) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE216) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS372), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS372) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS372) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS372), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS372) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS372) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS150(?VANDALISMRESERVEDENTITIESUNIVERSALVARS372), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS373, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS372) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS208) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS208, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS208), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS208) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE217) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS208, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS208), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS208) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS374) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS374), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS374) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE218) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS374), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS374) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS374) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS20(?VANDALISMRESERVEDENTITIESUNIVERSALVARS374), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS375, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS374) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS454) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS454), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS454) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE297) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS454), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS454) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS454) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS454), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS455, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS454) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE298) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS248, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS248), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS248) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS248) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS248, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS248), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS170(?VANDALISMRESERVEDENTITIESUNIVERSALVARS248) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE295) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS247, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS247), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS247) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS247) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS247, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS247), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS247) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE296) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS452), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS452) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS452) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS452), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS452) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS452) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS452), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS453, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS452) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000003100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS327) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS327, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS327), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS327) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE455) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS327, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS327), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS327) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000003100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS611) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000003100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS611), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS611) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE456) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000003100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS611), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS611) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS611) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000003100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000310(?VANDALISMRESERVEDENTITIESUNIVERSALVARS611), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS612, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS611) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS534) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS534), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS534) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS534) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS534), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS534) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE377) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS534), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS535, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS534) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE378) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS288, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS288), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS288) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS288) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS288, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS288), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS190(?VANDALISMRESERVEDENTITIESUNIVERSALVARS288) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE305) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS252, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS252), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS252) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS252) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS252, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS252), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS252) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE306) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS462), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS462) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS462) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS462), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS462) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS462) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS80(?VANDALISMRESERVEDENTITIESUNIVERSALVARS462), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS463, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS462) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS215) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS215, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS215), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS215) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE231) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS215, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS215), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS215) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE232) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS388), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS388) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS388) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS388), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS388) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS388) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000007800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(?VANDALISMRESERVEDENTITIESUNIVERSALVARS388), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS389, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS388) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS390) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000003000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS390), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS390) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE233) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000003000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS390), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS390) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000003000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS390) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000003000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS390), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS391, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS390) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE234) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS216, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS216), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS216) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000003000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS216) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS216, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS216), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS216) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE237) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS218, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS218), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS218) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000000400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS218) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS218, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS218), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS218) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE238) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000000400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS394), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS394) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS394) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000000400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS394), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS394) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000000400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS394) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000000400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(?VANDALISMRESERVEDENTITIESUNIVERSALVARS394), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS395, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS394) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS256) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS256, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS256), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS256) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE313) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS256, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS256), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS256) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE314) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS470), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS470) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS470) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS470), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS470) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS470) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS210(?VANDALISMRESERVEDENTITIESUNIVERSALVARS470), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS471, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS470) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE321) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS260, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS260), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS260) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS260) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS260, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS260), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS260) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE322) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS478), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS478) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS478) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS478), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS478) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS478) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS70(?VANDALISMRESERVEDENTITIESUNIVERSALVARS478), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS479, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS478) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS548) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS548), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS548) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE391) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS548), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS548) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS548) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS548), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS549, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS548) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS295) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS295, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS295), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS295) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE392) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS295, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS295), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS120(?VANDALISMRESERVEDENTITIESUNIVERSALVARS295) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS480) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS480), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS480) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS480) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS480), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS480) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE323) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS480), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS481, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS480) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS261) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS261, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS261), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS261) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE324) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS261, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS261), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS220(?VANDALISMRESERVEDENTITIESUNIVERSALVARS261) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS554) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS554), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS554) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE397) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS554), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS554) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS554) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS554), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS555, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS554) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS298) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS298, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS298), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS298) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE398) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS298, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS298), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS50(?VANDALISMRESERVEDENTITIESUNIVERSALVARS298) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE477) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS633), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS633) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS633) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS633), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS633) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS633) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS633), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS634, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS633) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE478) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS338, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS338), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS338) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS338) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS338, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS338), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS140(?VANDALISMRESERVEDENTITIESUNIVERSALVARS338) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE401) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS300, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS300), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS300) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS300) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS300, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS300), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS300) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS558) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS558), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS558) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE402) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS558), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS558) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS558) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2400(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS240(?VANDALISMRESERVEDENTITIESUNIVERSALVARS558), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS559, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS558) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE255) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS227, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS227), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS227) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000002700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS227) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS227, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS227), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS227) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000002700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS412) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000002700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS412), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS412) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS412) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000002700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS412), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS412) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE256) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000002700(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000270(?VANDALISMRESERVEDENTITIESUNIVERSALVARS412), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS413, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS412) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE253) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS410), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS410) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS410) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS410), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS410) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS410) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS410), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS411, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS410) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE254) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS226, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS226), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS226) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS226) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS226, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS226), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS160(?VANDALISMRESERVEDENTITIESUNIVERSALVARS226) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS341) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS341, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS341), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS341) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE483) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS341, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS341), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS341) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS639) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS639), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS639) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE484) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS639), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS639) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS639) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS300(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS30(?VANDALISMRESERVEDENTITIESUNIVERSALVARS639), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS640, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS639) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS304) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS304, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS304), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS304) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE409) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS304, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS304), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS304) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS566) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS566), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS566) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE410) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS566), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS566) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS566) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000022500(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(?VANDALISMRESERVEDENTITIESUNIVERSALVARS566), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS567, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS566) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE489) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS344, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS344), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS344) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS344) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS344, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS344), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS344) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS645) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS645), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS645) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS645) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS645), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS645) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE490) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS2600(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS260(?VANDALISMRESERVEDENTITIESUNIVERSALVARS645), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS646, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS645) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE333) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS266, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS266), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS266) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS266) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS266, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS266), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS266) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS490) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS490), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS490) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS490) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS490), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS490) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE334) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS1800(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS180(?VANDALISMRESERVEDENTITIESUNIVERSALVARS490), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS491, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS490) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE261) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS418), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS418) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS418) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS418), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS418) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS418) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS418), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS419, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS418) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE262) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS230, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS230), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS230) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS100(?VANDALISMRESERVEDENTITIESUNIVERSALVARS230) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS230, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS230), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS10(?VANDALISMRESERVEDENTITIESUNIVERSALVARS230) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE265) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS232, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS232), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS232) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000002000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS232) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS232, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS232), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS232) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000002000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS422) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000002000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS422), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS422) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS422) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000002000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS422), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS422) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE266) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000002000(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(?VANDALISMRESERVEDENTITIESUNIVERSALVARS422), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS423, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS422) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE335) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS492), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS492) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS492) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS492), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS492) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS492) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS492), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS493, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS492) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE336) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS267, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS267), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS267) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO000040900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS267) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS267, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS267), VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(?VANDALISMRESERVEDENTITIESUNIVERSALVARS267) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS430) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS430), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS430) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE273) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS430), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS430) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS430) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS430), VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(?VANDALISMRESERVEDENTITIESUNIVERSALVARS431, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS430) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS900(?VANDALISMRESERVEDENTITIESUNIVERSALVARS236) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS236, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS236), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS236) . +VANDALISMRESERVEDENTITIESSUPPREL0(VANDALISMRESERVEDENTITIESSUPPRULE274) :- VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs00(?VANDALISMRESERVEDENTITIESUNIVERSALVARS236, ?VANDALISMRESERVEDENTITIESUNIVERSALVARS236), VANDALISMRESERVEDENTITIESDATALOGVANDALISMNFRESERVEDENTITIESFORREPLACEMENTNEWCLASS90(?VANDALISMRESERVEDENTITIESUNIVERSALVARS236) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00004090(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00002250(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboIAO00000780(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESDATALOGhttpwwgeneontologorgformatsoboInOwlSubset0(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0, VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0) . +VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEsameAs0(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0, VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000040(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO00000200(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000000400(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottom00(VANDALISMRESERVEDENTITIESFORTOPBOTTOMSAMEbottomconstant0) . +VANDALISMRESERVEDENTITIESSUPPPREDICATEVANDALISMRESERVEDENTITIESDATALOGhttppurobolibrarorgoboBFO000002000(VANDALISMRESERVEDENTITIESCRITICALCONSTANT0) . diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/49.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/49.rls new file mode 100644 index 000000000..fdfd3ff9f --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/49.rls @@ -0,0 +1,72 @@ +CON1(!c1, ?x, ?u) :- Rpm(?t, ?x, ?V1), KPa(?t, ?y, ?V2), InjTable(?u, ?v, ?u1, ?v1, ?z, ?V3) . +CON2(!c2, ?x, ?v) :- Rpm(?t, ?x, ?V1), KPa(?t, ?y, ?V2), InjTable(?u, ?v, ?u1, ?v1, ?z, ?V3) . +CON3(!c3, ?y, ?u1) :- Rpm(?t, ?x, ?V1), KPa(?t, ?y, ?V2), InjTable(?u, ?v, ?u1, ?v1, ?z, ?V3) . +CON4(!c4, ?y, ?v1) :- Rpm(?t, ?x, ?V1), KPa(?t, ?y, ?V2), InjTable(?u, ?v, ?u1, ?v1, ?z, ?V3) . +getU3(?V1, ?V2, ?V3) :- Rpm(?t, ?x, ?V1), KPa(?t, ?y, ?V2), InjTable(?u, ?v, ?u1, ?v1, ?z, ?V3) . +getSU4(?V, ?c1, ?c2, ?c3, ?c4) :- Rpm(?t, ?x, ?V1), KPa(?t, ?y, ?V2), InjTable(?u, ?v, ?u1, ?v1, ?z, ?V3), CON1(?c1, ?x, ?u), CON2(?c2, ?x, ?v), CON3(?c3, ?y, ?u1), CON4(?c4, ?y, ?v1), U3(?V1, ?V2, ?V3, ?V) . +InjTime1(?t, ?z, ?W) :- Rpm(?t, ?x, ?V1), KPa(?t, ?y, ?V2), InjTable(?u, ?v, ?u1, ?v1, ?z, ?V3), CON1(?c1, ?x, ?u), CON2(?c2, ?x, ?v), CON3(?c3, ?y, ?u1), CON4(?c4, ?y, ?v1), U3(?V1, ?V2, ?V3, ?V), SU4(?V, ?c1, ?c2, ?c3, ?c4, ?W) . +EQ1(!z, ?y, ?x) :- Rpm(?t, ?x, ?V1), InjTime1(?t, ?y, ?V2) . +CON5(!c1, ?x) :- Rpm(?t, ?x, ?V1), InjTime1(?t, ?y, ?V2) . +getU(?V1, ?V2) :- Rpm(?t, ?x, ?V1), InjTime1(?t, ?y, ?V2) . +getSU(?V, ?c1) :- Rpm(?t, ?x, ?V1), InjTime1(?t, ?y, ?V2), CON5(?c1, ?x), U(?V1, ?V2, ?V) . +InjTime2(?t, ?z, ?W) :- EQ1(?z, ?y, ?x), Rpm(?t, ?x, ?V1), InjTime1(?t, ?y, ?V2), CON5(?c1, ?x), U(?V1, ?V2, ?V), SU(?V, ?c1, ?W) . +CON6(!c1, ?x) :- Rpm(?t, ?x, ?V1), InjTime1(?t, ?y, ?V2) . +getSU(?V, ?c1) :- Rpm(?t, ?x, ?V1), InjTime1(?t, ?y, ?V2), CON6(?c1, ?x), U(?V1, ?V2, ?V) . +InjTime2(?t, ?y, ?W) :- Rpm(?t, ?x, ?V1), InjTime1(?t, ?y, ?V2), CON6(?c1, ?x), U(?V1, ?V2, ?V), SU(?V, ?c1, ?W) . +EQ2(!u, ?y, ?z) :- TpD(?t, ?x, ?V1), Lmbd(?t, ?z, ?V2), InjTime2(?t, ?y, ?V3) . +CON7(!c1, ?x) :- TpD(?t, ?x, ?V1), Lmbd(?t, ?z, ?V2), InjTime2(?t, ?y, ?V3) . +CON8(!c2, ?z) :- TpD(?t, ?x, ?V1), Lmbd(?t, ?z, ?V2), InjTime2(?t, ?y, ?V3) . +getU3(?V1, ?V2, ?V3) :- TpD(?t, ?x, ?V1), Lmbd(?t, ?z, ?V2), InjTime2(?t, ?y, ?V3) . +getSU2(?V, ?c1, ?c2) :- TpD(?t, ?x, ?V1), Lmbd(?t, ?z, ?V2), InjTime2(?t, ?y, ?V3), CON7(?c1, ?x), CON8(?c2, ?z), U3(?V1, ?V2, ?V3, ?V) . +InjTimeFinal(?t, ?u, ?W) :- EQ2(?u, ?y, ?z), TpD(?t, ?x, ?V1), Lmbd(?t, ?z, ?V2), InjTime2(?t, ?y, ?V3), CON7(?c1, ?x), CON8(?c2, ?z), U3(?V1, ?V2, ?V3, ?V), SU2(?V, ?c1, ?c2, ?W) . +CON9(!c1, ?x) :- TpD(?t, ?x, ?V1), InjTime2(?t, ?y, ?V2) . +getU(?V1, ?V2) :- TpD(?t, ?x, ?V1), InjTime2(?t, ?y, ?V2) . +getSU(?V, ?c1) :- TpD(?t, ?x, ?V1), InjTime2(?t, ?y, ?V2), CON9(?c1, ?x), U(?V1, ?V2, ?V) . +InjTimeFinal(?t, ?y, ?W) :- TpD(?t, ?x, ?V1), InjTime2(?t, ?y, ?V2), CON9(?c1, ?x), U(?V1, ?V2, ?V), SU(?V, ?c1, ?W) . +CON10(!c2, ?z) :- TpD(?t, ?x, ?V1), Lmbd(?t, ?z, ?V2), InjTime2(?t, ?y, ?V3) . +getSU2(?V, ?c1, ?c2) :- TpD(?t, ?x, ?V1), Lmbd(?t, ?z, ?V2), InjTime2(?t, ?y, ?V3), CON7(?c1, ?x), CON10(?c2, ?z), U3(?V1, ?V2, ?V3, ?V) . +InjTimeFinal(?t, ?y, ?W) :- TpD(?t, ?x, ?V1), Lmbd(?t, ?z, ?V2), InjTime2(?t, ?y, ?V3), CON7(?c1, ?x), CON10(?c2, ?z), U3(?V1, ?V2, ?V3, ?V), SU2(?V, ?c1, ?c2, ?W) . +getSU3(?V, ?c1, ?c2, ?c3) :- getSU4(?V, ?c1, ?c2, ?c3, ?c4) . +getSU(?V1, ?c4) :- getSU4(?V, ?c1, ?c2, ?c3, ?c4), SU3(?V, ?c1, ?c2, ?c3, ?V1) . +SU4(?V, ?c1, ?c2, ?c3, ?c4, ?V2) :- getSU4(?V, ?c1, ?c2, ?c3, ?c4), SU3(?V, ?c1, ?c2, ?c3, ?V1), SU(?V1, ?c4, ?V2) . +getSU2(?V, ?c1, ?c2) :- getSU3(?V, ?c1, ?c2, ?c3) . +getSU(?V1, ?c3) :- getSU3(?V, ?c1, ?c2, ?c3), SU2(?V, ?c1, ?c2, ?V1) . +SU3(?V, ?c1, ?c2, ?c3, ?V2) :- getSU3(?V, ?c1, ?c2, ?c3), SU2(?V, ?c1, ?c2, ?V1), SU(?V1, ?c3, ?V2) . +getSU(?V, ?c1) :- getSU2(?V, ?c1, ?c2) . +getSU(?V1, ?c2) :- getSU2(?V, ?c1, ?c2), SU(?V, ?c1, ?V1) . +SU2(?V, ?c1, ?c2, ?V2) :- getSU2(?V, ?c1, ?c2), SU(?V, ?c1, ?V1), SU(?V1, ?c2, ?V2) . +getU(?V1, ?V2) :- getU3(?V1, ?V2, ?V3) . +getU(?V, ?V3) :- getU3(?V1, ?V2, ?V3), U(?V1, ?V2, ?V) . +U3(?V1, ?V2, ?V3, ?W) :- getU3(?V1, ?V2, ?V3), U(?V1, ?V2, ?V), U(?V, ?V3, ?W) . +empty(emptySet) . +SU(?U, ?x, !V), SU(!V, ?x, !V) :- getSU(?U, ?x) . +SU(?V, ?y, ?V) :- SU(?U, ?x, ?V), SU(?U, ?y, ?U) . +U(?V, ?W, ?W) :- getU(?V, ?W), empty(?V) . +getSU(?W, ?x) :- getU(?V, ?W), SU(?Vmin, ?x, ?V) . +getU(?Vmin, ?Wplus) :- getU(?V, ?W), SU(?Vmin, ?x, ?V), SU(?W, ?x, ?Wplus) . +U(?V, ?W, ?T) :- getU(?V, ?W), SU(?Vmin, ?x, ?V), SU(?W, ?x, ?Wplus), U(?Vmin, ?Wplus, ?T) . +Rpm(t, roundsPerMinute, emptySet) . +KPa(t, airPressure, emptySet) . +Lmbd(t, lambda, emptySet) . +TpD(t, throttle, emptySet) . +InjTable(a5, b12, c20, d30, e42, emptySet) . +InjTable(a7, b17, c29, d43, e61, emptySet) . +InjTable(a9, b22, c38, d56, e80, emptySet) . +InjTable(a11, b27, c47, d69, e99, emptySet) . +InjTable(a13, b32, c56, d82, e118, emptySet) . +InjTable(a15, b37, c65, d95, e137, emptySet) . +InjTable(a17, b42, c74, d108, e156, emptySet) . +InjTable(a19, b47, c83, d121, e175, emptySet) . +InjTable(a21, b52, c92, d134, e194, emptySet) . +InjTable(a23, b57, c101, d147, e213, emptySet) . +InjTable(a25, b62, c110, d160, e232, emptySet) . +InjTable(a27, b67, c119, d173, e251, emptySet) . +InjTable(a29, b72, c128, d186, e270, emptySet) . +InjTable(a31, b77, c137, d199, e289, emptySet) . +InjTable(a33, b82, c146, d212, e308, emptySet) . +InjTable(a35, b87, c155, d225, e327, emptySet) . +InjTable(a37, b92, c164, d238, e346, emptySet) . +InjTable(a39, b97, c173, d251, e365, emptySet) . +InjTable(a41, b102, c182, d264, e384, emptySet) . +InjTable(a43, b107, c191, d277, e403, emptySet) . +InjTable(a45, b112, c200, d290, e422, emptySet) . \ No newline at end of file diff --git a/vlog4j-core/src/test/data/output/exclude_blanks.csv b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/50.rls similarity index 100% rename from vlog4j-core/src/test/data/output/exclude_blanks.csv rename to rulewerk-integrationtests/src/test/resources/vlogissues/vlog/50.rls diff --git a/vlog4j-core/src/test/data/output/output.csv b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/51-1.rls similarity index 100% rename from vlog4j-core/src/test/data/output/output.csv rename to rulewerk-integrationtests/src/test/resources/vlogissues/vlog/51-1.rls diff --git a/vlog4j-core/src/test/data/output/outputXXX.csv b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/51-2.rls similarity index 100% rename from vlog4j-core/src/test/data/output/outputXXX.csv rename to rulewerk-integrationtests/src/test/resources/vlogissues/vlog/51-2.rls diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/61-1.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/61-1.rls new file mode 100644 index 000000000..98e034606 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/61-1.rls @@ -0,0 +1,3 @@ +p(a) . +q(?X,!Y,!Z) :- p(?X) . +q(?X,!Y,!Y) :- p(?X) . \ No newline at end of file diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/61-2.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/61-2.rls new file mode 100644 index 000000000..b722059d7 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/61-2.rls @@ -0,0 +1,3 @@ +p(a) . +q(?X,!Y,!Y) :- p(?X) . +q(?X,!Y,!Z) :- p(?X) . diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/63.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/63.rls new file mode 100644 index 000000000..81832fc8a --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/63.rls @@ -0,0 +1,3 @@ +%https://github.com/karmaresearch/vlog/issues/61 +p(a). +q(?X):-~p(?X). \ No newline at end of file diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/65.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/65.rls new file mode 100644 index 000000000..fdb522561 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/65.rls @@ -0,0 +1,2 @@ + +Goal(cg) :- ResultDeg(cr624_0),ResultDeg(cr623_0),ResultDeg(cr622_0),ResultDeg(cr621_0),ResultDeg(cr620_0),ResultDeg(cr619_0),ResultDeg(cr618_0),ResultDeg(cr617_0),ResultDeg(cr616_0),ResultDeg(cr615_0),ResultDeg(cr614_0),ResultDeg(cr613_0),ResultDeg(cr612_0),ResultDeg(cr611_0),ResultDeg(cr610_0),ResultDeg(cr609_0),ResultDeg(cr608_0),ResultDeg(cr607_0),ResultDeg(cr606_0),ResultDeg(cr605_0),ResultDeg(cr604_0),ResultDeg(cr603_0),ResultDeg(cr602_0),ResultDeg(cr601_0),ResultDeg(cr600_0),ResultDeg(cr599_0),ResultDeg(cr598_0),ResultDeg(cr597_0),ResultDeg(cr596_0),ResultDeg(cr595_0),ResultDeg(cr594_0),ResultDeg(cr593_0),ResultDeg(cr592_0),ResultDeg(cr591_0),ResultDeg(cr590_0),ResultDeg(cr589_0),ResultDeg(cr588_0),ResultDeg(cr587_0),ResultDeg(cr586_0),ResultDeg(cr585_0),ResultDeg(cr584_0),ResultDeg(cr583_0),ResultDeg(cr582_0),ResultDeg(cr581_0),ResultDeg(cr580_0),ResultDeg(cr579_0),ResultDeg(cr578_0),ResultDeg(cr577_0),ResultDeg(cr576_0),ResultDeg(cr575_0),ResultDeg(cr574_0),ResultDeg(cr573_0),ResultDeg(cr572_0),ResultDeg(cr571_0),ResultDeg(cr570_0),ResultDeg(cr569_0),ResultDeg(cr568_0),ResultDeg(cr567_0),ResultDeg(cr566_0),ResultDeg(cr565_0),ResultDeg(cr564_0),ResultDeg(cr563_0),ResultDeg(cr562_0),ResultDeg(cr561_0),ResultDeg(cr560_0),ResultDeg(cr559_0),ResultDeg(cr558_0),ResultDeg(cr557_0),ResultDeg(cr556_0),ResultDeg(cr555_0),ResultDeg(cr554_0),ResultDeg(cr553_0),ResultDeg(cr552_0),ResultDeg(cr551_0),ResultDeg(cr550_0),ResultDeg(cr549_0),ResultDeg(cr548_0),ResultDeg(cr547_0),ResultDeg(cr546_0),ResultDeg(cr545_0),ResultDeg(cr544_0),ResultDeg(cr543_0),ResultDeg(cr542_0),ResultDeg(cr541_0),ResultDeg(cr540_0),ResultDeg(cr539_0),ResultDeg(cr538_0),ResultDeg(cr537_0),ResultDeg(cr536_0),ResultDeg(cr535_0),ResultDeg(cr534_0),ResultDeg(cr533_0),ResultDeg(cr532_0),ResultDeg(cr531_0),ResultDeg(cr530_0),ResultDeg(cr529_0),ResultDeg(cr528_0),ResultDeg(cr527_0),ResultDeg(cr526_0),ResultDeg(cr525_0),ResultDeg(cr524_0),ResultDeg(cr523_0),ResultDeg(cr522_0),ResultDeg(cr521_0),ResultDeg(cr520_0),ResultDeg(cr519_0),ResultDeg(cr518_0),ResultDeg(cr517_0),ResultDeg(cr516_0),ResultDeg(cr515_0),ResultDeg(cr514_0),ResultDeg(cr513_0),ResultDeg(cr512_0),ResultDeg(cr511_0),ResultDeg(cr510_0),ResultDeg(cr509_0),ResultDeg(cr508_0),ResultDeg(cr507_0),ResultDeg(cr506_0),ResultDeg(cr505_0),ResultDeg(cr504_0),ResultDeg(cr503_0),ResultDeg(cr502_0),ResultDeg(cr501_0),ResultDeg(cr500_0),ResultDeg(cr499_0),ResultDeg(cr498_0),ResultDeg(cr497_0),ResultDeg(cr496_0),ResultDeg(cr495_0),ResultDeg(cr494_0),ResultDeg(cr493_0),ResultDeg(cr492_0),ResultDeg(cr491_0),ResultDeg(cr490_0),ResultDeg(cr489_0),ResultDeg(i1700),ResultDeg(i1810),ResultDeg(i1730),ResultDeg(i2160),ResultDeg(i2120),ResultDeg(i1840),ResultDeg(i1770),ResultDeg(i2480),ResultDeg(i2510),ResultDeg(i2410),ResultDeg(i2300),ResultDeg(i2340),ResultDeg(i1950),ResultDeg(i1660),ResultDeg(i2730),ResultDeg(i2620),ResultDeg(i2370),ResultDeg(i2270),ResultDeg(i3290),ResultDeg(i3260),ResultDeg(i3150),ResultDeg(i3120),ResultDeg(i3010),ResultDeg(i2970),ResultDeg(i2830),ResultDeg(i2550),ResultDeg(i2190),ResultDeg(i1910),ResultDeg(i3720),ResultDeg(i3500),ResultDeg(i3400),ResultDeg(i3330),ResultDeg(i2870),ResultDeg(i2800),ResultDeg(i2660),ResultDeg(i2580),ResultDeg(i1880),ResultDeg(i1590),ResultDeg(i4140),ResultDeg(i4040),ResultDeg(i3860),ResultDeg(i3820),ResultDeg(i3790),ResultDeg(i3680),ResultDeg(i3650),ResultDeg(i3610),ResultDeg(i3580),ResultDeg(i3430),ResultDeg(i3220),ResultDeg(i3040),ResultDeg(i2940),ResultDeg(i2760),ResultDeg(i2440),ResultDeg(i2230),ResultDeg(i2090),ResultDeg(i2050),ResultDeg(i2020),ResultDeg(i1583),ResultDeg(i1595),ResultDeg(i1599),ResultDeg(i1608),ResultDeg(i1624),ResultDeg(i1614),ResultDeg(i1587),ResultDeg(i1593),ResultDeg(i1675),ResultDeg(i1737),ResultDeg(i1791),ResultDeg(i1836),ResultDeg(i1885),ResultDeg(i1916),ResultDeg(i1935),ResultDeg(i1937),ResultDeg(i1933),ResultDeg(i1927),ResultDeg(i1920),ResultDeg(i1908),ResultDeg(i1678),ResultDeg(i1723),ResultDeg(i1778),ResultDeg(i1826),ResultDeg(i1865),ResultDeg(i1883),ResultDeg(i1887),ResultDeg(i1881),ResultDeg(i1875),ResultDeg(i1869),ResultDeg(i1859),ResultDeg(i1589),ResultDeg(i1642),ResultDeg(i1706),ResultDeg(i1750),ResultDeg(i1776),ResultDeg(i1774),ResultDeg(i1766),ResultDeg(i1752),ResultDeg(i1591),ResultDeg(i1622),ResultDeg(i1630),ResultDeg(i1628),ResultDeg(i1620),ResultDeg(i1612),ResultDeg(i1605),ResultDeg(i8580),ResultDeg(i8360),ResultDeg(i8140),ResultDeg(i7920),ResultDeg(i7700),ResultDeg(i7480),ResultDeg(i7260),ResultDeg(i7040),ResultDeg(i6820),ResultDeg(i6600),ResultDeg(i6380),ResultDeg(i6160),ResultDeg(i5940),ResultDeg(i5720),ResultDeg(i5500),ResultDeg(i5280),ResultDeg(i5060),ResultDeg(i4840),ResultDeg(i4620),ResultDeg(i4400),ResultDeg(i4180),ResultDeg(i3960),ResultDeg(i3740),ResultDeg(i3520),ResultDeg(i3300),ResultDeg(i3080),ResultDeg(i2860),ResultDeg(i2640),ResultDeg(i2420),ResultDeg(i2200),ResultDeg(i1980),ResultDeg(i1760). diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/66.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/66.rls new file mode 100644 index 000000000..fc7d5de49 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/66.rls @@ -0,0 +1,7 @@ +P(a,b,c) . +Q(a,a,c) . + +R(?x1):- P(?x1, ?x2, ?x3), Q(?x1,?x1,?x3) . + +% expected new inferences: +% R(a) . diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/67.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/67.rls new file mode 100644 index 000000000..a0854c7c1 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/67.rls @@ -0,0 +1,5 @@ +B1_(a, b, c, d, prov1) . +B2_(a, a, c, prov2). +H1_(a, n1_2_0, n1_2_0, n1_3_0, n1_4_0) . +H2_(n1_3_0, n1_5_0, n1_6_0) . +true(?x1) :- B1_(?x1, ?x2, ?y1, ?y2, ?F_1), B2_(?x1, ?x1, ?y1, ?F_2), H1_(?x1, ?z1, ?z1, ?z2, ?F_3), H2_(?z2, ?z3, ?F_4) . \ No newline at end of file diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/68-expected.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/68-expected.rls new file mode 100644 index 000000000..b1bd64556 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/68-expected.rls @@ -0,0 +1,13 @@ +B1_(a, b, c, d, provb1) . +B2_(a, a, c, provb2) . +H1_(a, n1_2_0, n1_2_0, n1_3_0, provH1) . +H2_(n1_3_0, n1_5_0, provH2) . +H3_(b, provH3) . +H4_(n1_2_0, provH4) . +inst(a, b, c, d, i, rule0) . +conc(i, provH1) . +conc(i, provH2) . +conc(i, provH3) . +conc(i, provH4) . +prec(provb1, i) . +prec(provb2, i) . diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/68.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/68.rls new file mode 100644 index 000000000..0d16968c8 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/68.rls @@ -0,0 +1,17 @@ +B1_(a, b, c, d, provb1) . +B2_(a, a, c, provb2) . +H1_(a, n1_2_0, n1_2_0, n1_3_0, provH1) . +H2_(n1_3_0, n1_5_0, provH2) . +H3_(b, provH3) . +H4_(n1_2_0, provH4) . +inst(a, b, c, d, i, rule0) . + +prec(?b1, ?i), prec(?b2, ?i), conc(?i, ?h1), conc(?i, ?h2), conc(?i, ?h3), conc(?i, ?h4) :- B1_(?x1, ?x2, ?y1, ?y2, ?b1), B2_(?x1, ?x1, ?y1, ?b2), H1_(?x1, ?z1, ?z1, ?z2, ?h1), H2_(?z2, ?z3, ?h2), H3_(?x2, ?h3), H4_(?z1, ?h4), inst(?x1, ?x2, ?y1, ?y2, ?i, rule0) . + +% Expected new inferences: +% conc(i, provH1) . +% conc(i, provH2) . +% conc(i, provH3) . +% conc(i, provH4) . +% prec(provb1, i) . +% prec(provb2, i) . diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/69.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/69.rls new file mode 100644 index 000000000..9bb347613 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/69.rls @@ -0,0 +1,6 @@ +B1_(a, b, c, d, prov1) . +B2_(a, a, c, prov2) . + +H1_(?x1, !z1, !z1, !z2, !F_2), H2_(!z2, !z3, !F_3), H3_(?x2, !F_4), H4_(!z1, !F_5) :- B1_(?x1, ?x2, ?y1, ?y2, ?F_0), B2_(?x1, ?x1, ?y1, ?F_1) . +inst(?x1, ?x2, ?y1, ?y2, !F_6, rule0) :- B1_(?x1, ?x2, ?y1, ?y2, ?F_0), B2_(?x1, ?x1, ?y1, ?F_1) . +prec(?F_0, ?F_6), prec(?F_1, ?F_6), conc(?F_6, ?F_2), conc(?F_6, ?F_3), conc(?F_6, ?F_4), conc(?F_6, ?F_5) :- B1_(?x1, ?x2, ?y1, ?y2, ?F_0), B2_(?x1, ?x1, ?y1, ?F_1), H1_(?x1, ?z1, ?z1, ?z2, ?F_2), H2_(?z2, ?z3, ?F_3), H3_(?x2, ?F_4), H4_(?z1, ?F_5), inst(?x1, ?x2, ?y1, ?y2, ?F_6, rule0) . \ No newline at end of file diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/70-expected.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/70-expected.rls new file mode 100644 index 000000000..aa1de5438 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/70-expected.rls @@ -0,0 +1,12 @@ + Leq(i0, i260) . + Leq(i0, i13) . + Leq(i13, i260) . + Leq(i0, i0) . + Le(i0, i260) . + Le(i0, i13) . + Le(i13, i260) . + Eq(i0, i0) . + KPa(i0, i0) . + KPa(i13, i13) . + + diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/70.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/70.rls new file mode 100644 index 000000000..e2d11695c --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/70.rls @@ -0,0 +1,8 @@ +Le(i0,i13). +Le(i13,i260). +Leq(?x0,?x1) :- Le(?x0,?x1). +KPa(?x0,?x0) :- Leq(i0,?x0),Leq(?x0,i260). + +Eq(i0,i0). +Le(i0,i260). +Leq(?x0,?x1) :- Eq(?x0,?x1). diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/71.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/71.rls new file mode 100644 index 000000000..ac85dd8b9 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/71.rls @@ -0,0 +1,36874 @@ +IgnTable(i0,i13,i0,i660,i2020). +IgnTable(i0,i13,i660,i880,i2050). +IgnTable(i0,i13,i880,i1100,i2020). +IgnTable(i0,i13,i1100,i1320,i2090). +IgnTable(i0,i13,i1320,i1540,i2230). +IgnTable(i0,i13,i1540,i1760,i2440). +IgnTable(i0,i13,i1760,i1980,i2760). +IgnTable(i0,i13,i1980,i2200,i2940). +IgnTable(i0,i13,i2200,i2420,i3040). +IgnTable(i0,i13,i2420,i2640,i3220). +IgnTable(i0,i13,i2640,i2860,i3430). +IgnTable(i0,i13,i2860,i3080,i3580). +IgnTable(i0,i13,i3080,i3300,i3580). +IgnTable(i0,i13,i3300,i3520,i3610). +IgnTable(i0,i13,i3520,i3740,i3650). +IgnTable(i0,i13,i3740,i3960,i3680). +IgnTable(i0,i13,i3960,i4180,i3680). +IgnTable(i0,i13,i4180,i4400,i3680). +IgnTable(i0,i13,i4400,i4620,i3680). +IgnTable(i0,i13,i4620,i4840,i3790). +IgnTable(i0,i13,i4840,i5060,i3820). +IgnTable(i0,i13,i5060,i5280,i3790). +IgnTable(i0,i13,i5280,i5500,i3790). +IgnTable(i0,i13,i5500,i5720,i3860). +IgnTable(i0,i13,i5720,i5940,i3860). +IgnTable(i0,i13,i5940,i6160,i3860). +IgnTable(i0,i13,i6160,i6380,i4040). +IgnTable(i0,i13,i6380,i6600,i4040). +IgnTable(i0,i13,i6600,i6820,i4140). +IgnTable(i0,i13,i6820,i7040,i4140). +IgnTable(i0,i13,i7040,i7260,i4140). +IgnTable(i0,i13,i7260,i7480,i4140). +IgnTable(i0,i13,i7480,i7700,i4140). +IgnTable(i0,i13,i7700,i7920,i4140). +IgnTable(i0,i13,i7920,i8140,i4140). +IgnTable(i0,i13,i8140,i8360,i4140). +IgnTable(i0,i13,i8360,i8580,i4140). +IgnTable(i13,i26,i0,i660,i1520). +IgnTable(i13,i26,i660,i880,i1560). +IgnTable(i13,i26,i880,i1100,i1590). +IgnTable(i13,i26,i1100,i1320,i1880). +IgnTable(i13,i26,i1320,i1540,i2090). +IgnTable(i13,i26,i1540,i1760,i2440). +IgnTable(i13,i26,i1760,i1980,i2580). +IgnTable(i13,i26,i1980,i2200,i2660). +IgnTable(i13,i26,i2200,i2420,i2800). +IgnTable(i13,i26,i2420,i2640,i2870). +IgnTable(i13,i26,i2640,i2860,i3080). +IgnTable(i13,i26,i2860,i3080,i3330). +IgnTable(i13,i26,i3080,i3300,i3400). +IgnTable(i13,i26,i3300,i3520,i3330). +IgnTable(i13,i26,i3520,i3740,i3330). +IgnTable(i13,i26,i3740,i3960,i3400). +IgnTable(i13,i26,i3960,i4180,i3400). +IgnTable(i13,i26,i4180,i4400,i3400). +IgnTable(i13,i26,i4400,i4620,i3500). +IgnTable(i13,i26,i4620,i4840,i3500). +IgnTable(i13,i26,i4840,i5060,i3580). +IgnTable(i13,i26,i5060,i5280,i3610). +IgnTable(i13,i26,i5280,i5500,i3650). +IgnTable(i13,i26,i5500,i5720,i3720). +IgnTable(i13,i26,i5720,i5940,i3720). +IgnTable(i13,i26,i5940,i6160,i3680). +IgnTable(i13,i26,i6160,i6380,i3820). +IgnTable(i13,i26,i6380,i6600,i3860). +IgnTable(i13,i26,i6600,i6820,i4040). +IgnTable(i13,i26,i6820,i7040,i4040). +IgnTable(i13,i26,i7040,i7260,i4040). +IgnTable(i13,i26,i7260,i7480,i4040). +IgnTable(i13,i26,i7480,i7700,i4040). +IgnTable(i13,i26,i7700,i7920,i4040). +IgnTable(i13,i26,i7920,i8140,i4040). +IgnTable(i13,i26,i8140,i8360,i4040). +IgnTable(i13,i26,i8360,i8580,i4040). +IgnTable(i26,i39,i0,i660,i1030). +IgnTable(i26,i39,i660,i880,i1240). +IgnTable(i26,i39,i880,i1100,i1450). +IgnTable(i26,i39,i1100,i1320,i1910). +IgnTable(i26,i39,i1320,i1540,i2190). +IgnTable(i26,i39,i1540,i1760,i2550). +IgnTable(i26,i39,i1760,i1980,i2660). +IgnTable(i26,i39,i1980,i2200,i2830). +IgnTable(i26,i39,i2200,i2420,i2940). +IgnTable(i26,i39,i2420,i2640,i2970). +IgnTable(i26,i39,i2640,i2860,i3010). +IgnTable(i26,i39,i2860,i3080,i3010). +IgnTable(i26,i39,i3080,i3300,i3040). +IgnTable(i26,i39,i3300,i3520,i3010). +IgnTable(i26,i39,i3520,i3740,i3120). +IgnTable(i26,i39,i3740,i3960,i3150). +IgnTable(i26,i39,i3960,i4180,i3150). +IgnTable(i26,i39,i4180,i4400,i3150). +IgnTable(i26,i39,i4400,i4620,i3260). +IgnTable(i26,i39,i4620,i4840,i3150). +IgnTable(i26,i39,i4840,i5060,i3220). +IgnTable(i26,i39,i5060,i5280,i3290). +IgnTable(i26,i39,i5280,i5500,i3400). +IgnTable(i26,i39,i5500,i5720,i3430). +IgnTable(i26,i39,i5720,i5940,i3500). +IgnTable(i26,i39,i5940,i6160,i3400). +IgnTable(i26,i39,i6160,i6380,i3580). +IgnTable(i26,i39,i6380,i6600,i3610). +IgnTable(i26,i39,i6600,i6820,i3790). +IgnTable(i26,i39,i6820,i7040,i3790). +IgnTable(i26,i39,i7040,i7260,i3790). +IgnTable(i26,i39,i7260,i7480,i3790). +IgnTable(i26,i39,i7480,i7700,i3790). +IgnTable(i26,i39,i7700,i7920,i3790). +IgnTable(i26,i39,i7920,i8140,i3790). +IgnTable(i26,i39,i8140,i8360,i3790). +IgnTable(i26,i39,i8360,i8580,i3790). +IgnTable(i39,i52,i0,i660,i960). +IgnTable(i39,i52,i660,i880,i1240). +IgnTable(i39,i52,i880,i1100,i1630). +IgnTable(i39,i52,i1100,i1320,i2050). +IgnTable(i39,i52,i1320,i1540,i2270). +IgnTable(i39,i52,i1540,i1760,i2370). +IgnTable(i39,i52,i1760,i1980,i2620). +IgnTable(i39,i52,i1980,i2200,i2800). +IgnTable(i39,i52,i2200,i2420,i2800). +IgnTable(i39,i52,i2420,i2640,i2730). +IgnTable(i39,i52,i2640,i2860,i2730). +IgnTable(i39,i52,i2860,i3080,i2730). +IgnTable(i39,i52,i3080,i3300,i2730). +IgnTable(i39,i52,i3300,i3520,i2730). +IgnTable(i39,i52,i3520,i3740,i2730). +IgnTable(i39,i52,i3740,i3960,i2800). +IgnTable(i39,i52,i3960,i4180,i2800). +IgnTable(i39,i52,i4180,i4400,i2800). +IgnTable(i39,i52,i4400,i4620,i2800). +IgnTable(i39,i52,i4620,i4840,i2830). +IgnTable(i39,i52,i4840,i5060,i2870). +IgnTable(i39,i52,i5060,i5280,i2870). +IgnTable(i39,i52,i5280,i5500,i2940). +IgnTable(i39,i52,i5500,i5720,i2970). +IgnTable(i39,i52,i5720,i5940,i3040). +IgnTable(i39,i52,i5940,i6160,i3010). +IgnTable(i39,i52,i6160,i6380,i3220). +IgnTable(i39,i52,i6380,i6600,i3260). +IgnTable(i39,i52,i6600,i6820,i3400). +IgnTable(i39,i52,i6820,i7040,i3400). +IgnTable(i39,i52,i7040,i7260,i3400). +IgnTable(i39,i52,i7260,i7480,i3400). +IgnTable(i39,i52,i7480,i7700,i3400). +IgnTable(i39,i52,i7700,i7920,i3400). +IgnTable(i39,i52,i7920,i8140,i3400). +IgnTable(i39,i52,i8140,i8360,i3400). +IgnTable(i39,i52,i8360,i8580,i3400). +IgnTable(i52,i65,i0,i660,i670). +IgnTable(i52,i65,i660,i880,i1350). +IgnTable(i52,i65,i880,i1100,i1660). +IgnTable(i52,i65,i1100,i1320,i1950). +IgnTable(i52,i65,i1320,i1540,i2190). +IgnTable(i52,i65,i1540,i1760,i2340). +IgnTable(i52,i65,i1760,i1980,i2340). +IgnTable(i52,i65,i1980,i2200,i2340). +IgnTable(i52,i65,i2200,i2420,i2270). +IgnTable(i52,i65,i2420,i2640,i2300). +IgnTable(i52,i65,i2640,i2860,i2410). +IgnTable(i52,i65,i2860,i3080,i2510). +IgnTable(i52,i65,i3080,i3300,i2510). +IgnTable(i52,i65,i3300,i3520,i2510). +IgnTable(i52,i65,i3520,i3740,i2480). +IgnTable(i52,i65,i3740,i3960,i2410). +IgnTable(i52,i65,i3960,i4180,i2370). +IgnTable(i52,i65,i4180,i4400,i2270). +IgnTable(i52,i65,i4400,i4620,i2340). +IgnTable(i52,i65,i4620,i4840,i2340). +IgnTable(i52,i65,i4840,i5060,i2370). +IgnTable(i52,i65,i5060,i5280,i2440). +IgnTable(i52,i65,i5280,i5500,i2510). +IgnTable(i52,i65,i5500,i5720,i2550). +IgnTable(i52,i65,i5720,i5940,i2620). +IgnTable(i52,i65,i5940,i6160,i2620). +IgnTable(i52,i65,i6160,i6380,i2830). +IgnTable(i52,i65,i6380,i6600,i2970). +IgnTable(i52,i65,i6600,i6820,i3040). +IgnTable(i52,i65,i6820,i7040,i3040). +IgnTable(i52,i65,i7040,i7260,i3040). +IgnTable(i52,i65,i7260,i7480,i3040). +IgnTable(i52,i65,i7480,i7700,i3040). +IgnTable(i52,i65,i7700,i7920,i3040). +IgnTable(i52,i65,i7920,i8140,i3040). +IgnTable(i52,i65,i8140,i8360,i3040). +IgnTable(i52,i65,i8360,i8580,i3040). +IgnTable(i65,i78,i0,i660,i530). +IgnTable(i65,i78,i660,i880,i990). +IgnTable(i65,i78,i880,i1100,i1380). +IgnTable(i65,i78,i1100,i1320,i1490). +IgnTable(i65,i78,i1320,i1540,i1660). +IgnTable(i65,i78,i1540,i1760,i1770). +IgnTable(i65,i78,i1760,i1980,i1770). +IgnTable(i65,i78,i1980,i2200,i1840). +IgnTable(i65,i78,i2200,i2420,i1880). +IgnTable(i65,i78,i2420,i2640,i1880). +IgnTable(i65,i78,i2640,i2860,i1980). +IgnTable(i65,i78,i2860,i3080,i2090). +IgnTable(i65,i78,i3080,i3300,i2090). +IgnTable(i65,i78,i3300,i3520,i2120). +IgnTable(i65,i78,i3520,i3740,i2020). +IgnTable(i65,i78,i3740,i3960,i1880). +IgnTable(i65,i78,i3960,i4180,i1880). +IgnTable(i65,i78,i4180,i4400,i1880). +IgnTable(i65,i78,i4400,i4620,i1980). +IgnTable(i65,i78,i4620,i4840,i1980). +IgnTable(i65,i78,i4840,i5060,i2050). +IgnTable(i65,i78,i5060,i5280,i2120). +IgnTable(i65,i78,i5280,i5500,i2160). +IgnTable(i65,i78,i5500,i5720,i2190). +IgnTable(i65,i78,i5720,i5940,i2230). +IgnTable(i65,i78,i5940,i6160,i2340). +IgnTable(i65,i78,i6160,i6380,i2580). +IgnTable(i65,i78,i6380,i6600,i2660). +IgnTable(i65,i78,i6600,i6820,i2730). +IgnTable(i65,i78,i6820,i7040,i2730). +IgnTable(i65,i78,i7040,i7260,i2730). +IgnTable(i65,i78,i7260,i7480,i2730). +IgnTable(i65,i78,i7480,i7700,i2730). +IgnTable(i65,i78,i7700,i7920,i2730). +IgnTable(i65,i78,i7920,i8140,i2730). +IgnTable(i65,i78,i8140,i8360,i2730). +IgnTable(i65,i78,i8360,i8580,i2730). +IgnTable(i78,i91,i0,i660,i530). +IgnTable(i78,i91,i660,i880,i810). +IgnTable(i78,i91,i880,i1100,i850). +IgnTable(i78,i91,i1100,i1320,i990). +IgnTable(i78,i91,i1320,i1540,i1100). +IgnTable(i78,i91,i1540,i1760,i1420). +IgnTable(i78,i91,i1760,i1980,i1630). +IgnTable(i78,i91,i1980,i2200,i1560). +IgnTable(i78,i91,i2200,i2420,i1560). +IgnTable(i78,i91,i2420,i2640,i1660). +IgnTable(i78,i91,i2640,i2860,i1770). +IgnTable(i78,i91,i2860,i3080,i1770). +IgnTable(i78,i91,i3080,i3300,i1660). +IgnTable(i78,i91,i3300,i3520,i1590). +IgnTable(i78,i91,i3520,i3740,i1630). +IgnTable(i78,i91,i3740,i3960,i1770). +IgnTable(i78,i91,i3960,i4180,i1770). +IgnTable(i78,i91,i4180,i4400,i1770). +IgnTable(i78,i91,i4400,i4620,i1770). +IgnTable(i78,i91,i4620,i4840,i1770). +IgnTable(i78,i91,i4840,i5060,i1880). +IgnTable(i78,i91,i5060,i5280,i1840). +IgnTable(i78,i91,i5280,i5500,i1840). +IgnTable(i78,i91,i5500,i5720,i1980). +IgnTable(i78,i91,i5720,i5940,i2050). +IgnTable(i78,i91,i5940,i6160,i2050). +IgnTable(i78,i91,i6160,i6380,i2230). +IgnTable(i78,i91,i6380,i6600,i2270). +IgnTable(i78,i91,i6600,i6820,i2370). +IgnTable(i78,i91,i6820,i7040,i2370). +IgnTable(i78,i91,i7040,i7260,i2370). +IgnTable(i78,i91,i7260,i7480,i2370). +IgnTable(i78,i91,i7480,i7700,i2370). +IgnTable(i78,i91,i7700,i7920,i2370). +IgnTable(i78,i91,i7920,i8140,i2370). +IgnTable(i78,i91,i8140,i8360,i2370). +IgnTable(i78,i91,i8360,i8580,i2370). +IgnTable(i91,i104,i0,i660,i460). +IgnTable(i91,i104,i660,i880,i600). +IgnTable(i91,i104,i880,i1100,i600). +IgnTable(i91,i104,i1100,i1320,i740). +IgnTable(i91,i104,i1320,i1540,i1030). +IgnTable(i91,i104,i1540,i1760,i1170). +IgnTable(i91,i104,i1760,i1980,i1350). +IgnTable(i91,i104,i1980,i2200,i1350). +IgnTable(i91,i104,i2200,i2420,i1420). +IgnTable(i91,i104,i2420,i2640,i1450). +IgnTable(i91,i104,i2640,i2860,i1490). +IgnTable(i91,i104,i2860,i3080,i1560). +IgnTable(i91,i104,i3080,i3300,i1560). +IgnTable(i91,i104,i3300,i3520,i1660). +IgnTable(i91,i104,i3520,i3740,i1730). +IgnTable(i91,i104,i3740,i3960,i1810). +IgnTable(i91,i104,i3960,i4180,i1910). +IgnTable(i91,i104,i4180,i4400,i2050). +IgnTable(i91,i104,i4400,i4620,i1880). +IgnTable(i91,i104,i4620,i4840,i1770). +IgnTable(i91,i104,i4840,i5060,i1840). +IgnTable(i91,i104,i5060,i5280,i1840). +IgnTable(i91,i104,i5280,i5500,i1980). +IgnTable(i91,i104,i5500,i5720,i1980). +IgnTable(i91,i104,i5720,i5940,i1980). +IgnTable(i91,i104,i5940,i6160,i2050). +IgnTable(i91,i104,i6160,i6380,i2190). +IgnTable(i91,i104,i6380,i6600,i2090). +IgnTable(i91,i104,i6600,i6820,i2160). +IgnTable(i91,i104,i6820,i7040,i2090). +IgnTable(i91,i104,i7040,i7260,i2020). +IgnTable(i91,i104,i7260,i7480,i1980). +IgnTable(i91,i104,i7480,i7700,i1980). +IgnTable(i91,i104,i7700,i7920,i1980). +IgnTable(i91,i104,i7920,i8140,i1980). +IgnTable(i91,i104,i8140,i8360,i1980). +IgnTable(i91,i104,i8360,i8580,i1980). +IgnTable(i104,i117,i0,i660,i460). +IgnTable(i104,i117,i660,i880,i600). +IgnTable(i104,i117,i880,i1100,i600). +IgnTable(i104,i117,i1100,i1320,i710). +IgnTable(i104,i117,i1320,i1540,i960). +IgnTable(i104,i117,i1540,i1760,i1100). +IgnTable(i104,i117,i1760,i1980,i1240). +IgnTable(i104,i117,i1980,i2200,i1240). +IgnTable(i104,i117,i2200,i2420,i1310). +IgnTable(i104,i117,i2420,i2640,i1420). +IgnTable(i104,i117,i2640,i2860,i1450). +IgnTable(i104,i117,i2860,i3080,i1520). +IgnTable(i104,i117,i3080,i3300,i1560). +IgnTable(i104,i117,i3300,i3520,i1630). +IgnTable(i104,i117,i3520,i3740,i1630). +IgnTable(i104,i117,i3740,i3960,i1630). +IgnTable(i104,i117,i3960,i4180,i1770). +IgnTable(i104,i117,i4180,i4400,i1880). +IgnTable(i104,i117,i4400,i4620,i1810). +IgnTable(i104,i117,i4620,i4840,i1700). +IgnTable(i104,i117,i4840,i5060,i1730). +IgnTable(i104,i117,i5060,i5280,i1770). +IgnTable(i104,i117,i5280,i5500,i1880). +IgnTable(i104,i117,i5500,i5720,i1910). +IgnTable(i104,i117,i5720,i5940,i1910). +IgnTable(i104,i117,i5940,i6160,i2020). +IgnTable(i104,i117,i6160,i6380,i2160). +IgnTable(i104,i117,i6380,i6600,i2090). +IgnTable(i104,i117,i6600,i6820,i2090). +IgnTable(i104,i117,i6820,i7040,i2050). +IgnTable(i104,i117,i7040,i7260,i2020). +IgnTable(i104,i117,i7260,i7480,i1980). +IgnTable(i104,i117,i7480,i7700,i1950). +IgnTable(i104,i117,i7700,i7920,i1950). +IgnTable(i104,i117,i7920,i8140,i1950). +IgnTable(i104,i117,i8140,i8360,i1950). +IgnTable(i104,i117,i8360,i8580,i1950). +IgnTable(i117,i130,i0,i660,i460). +IgnTable(i117,i130,i660,i880,i600). +IgnTable(i117,i130,i880,i1100,i600). +IgnTable(i117,i130,i1100,i1320,i710). +IgnTable(i117,i130,i1320,i1540,i920). +IgnTable(i117,i130,i1540,i1760,i1100). +IgnTable(i117,i130,i1760,i1980,i1240). +IgnTable(i117,i130,i1980,i2200,i1240). +IgnTable(i117,i130,i2200,i2420,i1310). +IgnTable(i117,i130,i2420,i2640,i1420). +IgnTable(i117,i130,i2640,i2860,i1450). +IgnTable(i117,i130,i2860,i3080,i1520). +IgnTable(i117,i130,i3080,i3300,i1560). +IgnTable(i117,i130,i3300,i3520,i1630). +IgnTable(i117,i130,i3520,i3740,i1630). +IgnTable(i117,i130,i3740,i3960,i1630). +IgnTable(i117,i130,i3960,i4180,i1770). +IgnTable(i117,i130,i4180,i4400,i1880). +IgnTable(i117,i130,i4400,i4620,i1810). +IgnTable(i117,i130,i4620,i4840,i1730). +IgnTable(i117,i130,i4840,i5060,i1730). +IgnTable(i117,i130,i5060,i5280,i1770). +IgnTable(i117,i130,i5280,i5500,i1880). +IgnTable(i117,i130,i5500,i5720,i1910). +IgnTable(i117,i130,i5720,i5940,i1910). +IgnTable(i117,i130,i5940,i6160,i2020). +IgnTable(i117,i130,i6160,i6380,i2160). +IgnTable(i117,i130,i6380,i6600,i2090). +IgnTable(i117,i130,i6600,i6820,i2090). +IgnTable(i117,i130,i6820,i7040,i2050). +IgnTable(i117,i130,i7040,i7260,i2020). +IgnTable(i117,i130,i7260,i7480,i1980). +IgnTable(i117,i130,i7480,i7700,i1950). +IgnTable(i117,i130,i7700,i7920,i1950). +IgnTable(i117,i130,i7920,i8140,i1950). +IgnTable(i117,i130,i8140,i8360,i1950). +IgnTable(i117,i130,i8360,i8580,i1950). +IgnTable(i130,i143,i0,i660,i460). +IgnTable(i130,i143,i660,i880,i600). +IgnTable(i130,i143,i880,i1100,i600). +IgnTable(i130,i143,i1100,i1320,i710). +IgnTable(i130,i143,i1320,i1540,i920). +IgnTable(i130,i143,i1540,i1760,i1100). +IgnTable(i130,i143,i1760,i1980,i1240). +IgnTable(i130,i143,i1980,i2200,i1240). +IgnTable(i130,i143,i2200,i2420,i1310). +IgnTable(i130,i143,i2420,i2640,i1420). +IgnTable(i130,i143,i2640,i2860,i1450). +IgnTable(i130,i143,i2860,i3080,i1520). +IgnTable(i130,i143,i3080,i3300,i1560). +IgnTable(i130,i143,i3300,i3520,i1630). +IgnTable(i130,i143,i3520,i3740,i1630). +IgnTable(i130,i143,i3740,i3960,i1630). +IgnTable(i130,i143,i3960,i4180,i1770). +IgnTable(i130,i143,i4180,i4400,i1880). +IgnTable(i130,i143,i4400,i4620,i1810). +IgnTable(i130,i143,i4620,i4840,i1730). +IgnTable(i130,i143,i4840,i5060,i1730). +IgnTable(i130,i143,i5060,i5280,i1770). +IgnTable(i130,i143,i5280,i5500,i1880). +IgnTable(i130,i143,i5500,i5720,i1910). +IgnTable(i130,i143,i5720,i5940,i1910). +IgnTable(i130,i143,i5940,i6160,i2020). +IgnTable(i130,i143,i6160,i6380,i2160). +IgnTable(i130,i143,i6380,i6600,i2090). +IgnTable(i130,i143,i6600,i6820,i2090). +IgnTable(i130,i143,i6820,i7040,i2050). +IgnTable(i130,i143,i7040,i7260,i2020). +IgnTable(i130,i143,i7260,i7480,i1980). +IgnTable(i130,i143,i7480,i7700,i1950). +IgnTable(i130,i143,i7700,i7920,i1950). +IgnTable(i130,i143,i7920,i8140,i1950). +IgnTable(i130,i143,i8140,i8360,i1950). +IgnTable(i130,i143,i8360,i8580,i1950). +IgnTable(i143,i156,i0,i660,i460). +IgnTable(i143,i156,i660,i880,i600). +IgnTable(i143,i156,i880,i1100,i600). +IgnTable(i143,i156,i1100,i1320,i710). +IgnTable(i143,i156,i1320,i1540,i920). +IgnTable(i143,i156,i1540,i1760,i1100). +IgnTable(i143,i156,i1760,i1980,i1240). +IgnTable(i143,i156,i1980,i2200,i1240). +IgnTable(i143,i156,i2200,i2420,i1270). +IgnTable(i143,i156,i2420,i2640,i1420). +IgnTable(i143,i156,i2640,i2860,i1450). +IgnTable(i143,i156,i2860,i3080,i1490). +IgnTable(i143,i156,i3080,i3300,i1560). +IgnTable(i143,i156,i3300,i3520,i1630). +IgnTable(i143,i156,i3520,i3740,i1630). +IgnTable(i143,i156,i3740,i3960,i1630). +IgnTable(i143,i156,i3960,i4180,i1770). +IgnTable(i143,i156,i4180,i4400,i1880). +IgnTable(i143,i156,i4400,i4620,i1810). +IgnTable(i143,i156,i4620,i4840,i1730). +IgnTable(i143,i156,i4840,i5060,i1730). +IgnTable(i143,i156,i5060,i5280,i1770). +IgnTable(i143,i156,i5280,i5500,i1880). +IgnTable(i143,i156,i5500,i5720,i1910). +IgnTable(i143,i156,i5720,i5940,i1910). +IgnTable(i143,i156,i5940,i6160,i2020). +IgnTable(i143,i156,i6160,i6380,i2160). +IgnTable(i143,i156,i6380,i6600,i2090). +IgnTable(i143,i156,i6600,i6820,i2090). +IgnTable(i143,i156,i6820,i7040,i2050). +IgnTable(i143,i156,i7040,i7260,i2020). +IgnTable(i143,i156,i7260,i7480,i1980). +IgnTable(i143,i156,i7480,i7700,i1950). +IgnTable(i143,i156,i7700,i7920,i1950). +IgnTable(i143,i156,i7920,i8140,i1950). +IgnTable(i143,i156,i8140,i8360,i1950). +IgnTable(i143,i156,i8360,i8580,i1950). +IgnTable(i156,i169,i0,i660,i460). +IgnTable(i156,i169,i660,i880,i600). +IgnTable(i156,i169,i880,i1100,i600). +IgnTable(i156,i169,i1100,i1320,i710). +IgnTable(i156,i169,i1320,i1540,i920). +IgnTable(i156,i169,i1540,i1760,i1100). +IgnTable(i156,i169,i1760,i1980,i1240). +IgnTable(i156,i169,i1980,i2200,i1240). +IgnTable(i156,i169,i2200,i2420,i1310). +IgnTable(i156,i169,i2420,i2640,i1420). +IgnTable(i156,i169,i2640,i2860,i1450). +IgnTable(i156,i169,i2860,i3080,i1490). +IgnTable(i156,i169,i3080,i3300,i1560). +IgnTable(i156,i169,i3300,i3520,i1630). +IgnTable(i156,i169,i3520,i3740,i1630). +IgnTable(i156,i169,i3740,i3960,i1630). +IgnTable(i156,i169,i3960,i4180,i1700). +IgnTable(i156,i169,i4180,i4400,i1770). +IgnTable(i156,i169,i4400,i4620,i1810). +IgnTable(i156,i169,i4620,i4840,i1700). +IgnTable(i156,i169,i4840,i5060,i1730). +IgnTable(i156,i169,i5060,i5280,i1770). +IgnTable(i156,i169,i5280,i5500,i1880). +IgnTable(i156,i169,i5500,i5720,i1910). +IgnTable(i156,i169,i5720,i5940,i1910). +IgnTable(i156,i169,i5940,i6160,i2020). +IgnTable(i156,i169,i6160,i6380,i2050). +IgnTable(i156,i169,i6380,i6600,i2050). +IgnTable(i156,i169,i6600,i6820,i2050). +IgnTable(i156,i169,i6820,i7040,i2050). +IgnTable(i156,i169,i7040,i7260,i2020). +IgnTable(i156,i169,i7260,i7480,i1980). +IgnTable(i156,i169,i7480,i7700,i1950). +IgnTable(i156,i169,i7700,i7920,i1950). +IgnTable(i156,i169,i7920,i8140,i1950). +IgnTable(i156,i169,i8140,i8360,i1950). +IgnTable(i156,i169,i8360,i8580,i1950). +IgnTable(i169,i182,i0,i660,i460). +IgnTable(i169,i182,i660,i880,i600). +IgnTable(i169,i182,i880,i1100,i600). +IgnTable(i169,i182,i1100,i1320,i710). +IgnTable(i169,i182,i1320,i1540,i920). +IgnTable(i169,i182,i1540,i1760,i1060). +IgnTable(i169,i182,i1760,i1980,i1200). +IgnTable(i169,i182,i1980,i2200,i1200). +IgnTable(i169,i182,i2200,i2420,i1270). +IgnTable(i169,i182,i2420,i2640,i1380). +IgnTable(i169,i182,i2640,i2860,i1450). +IgnTable(i169,i182,i2860,i3080,i1490). +IgnTable(i169,i182,i3080,i3300,i1520). +IgnTable(i169,i182,i3300,i3520,i1590). +IgnTable(i169,i182,i3520,i3740,i1590). +IgnTable(i169,i182,i3740,i3960,i1590). +IgnTable(i169,i182,i3960,i4180,i1700). +IgnTable(i169,i182,i4180,i4400,i1660). +IgnTable(i169,i182,i4400,i4620,i1560). +IgnTable(i169,i182,i4620,i4840,i1560). +IgnTable(i169,i182,i4840,i5060,i1630). +IgnTable(i169,i182,i5060,i5280,i1810). +IgnTable(i169,i182,i5280,i5500,i2090). +IgnTable(i169,i182,i5500,i5720,i2120). +IgnTable(i169,i182,i5720,i5940,i1950). +IgnTable(i169,i182,i5940,i6160,i1840). +IgnTable(i169,i182,i6160,i6380,i2300). +IgnTable(i169,i182,i6380,i6600,i2340). +IgnTable(i169,i182,i6600,i6820,i2300). +IgnTable(i169,i182,i6820,i7040,i1910). +IgnTable(i169,i182,i7040,i7260,i2300). +IgnTable(i169,i182,i7260,i7480,i2230). +IgnTable(i169,i182,i7480,i7700,i2190). +IgnTable(i169,i182,i7700,i7920,i2190). +IgnTable(i169,i182,i7920,i8140,i2190). +IgnTable(i169,i182,i8140,i8360,i2190). +IgnTable(i169,i182,i8360,i8580,i2190). +IgnTable(i182,i195,i0,i660,i460). +IgnTable(i182,i195,i660,i880,i600). +IgnTable(i182,i195,i880,i1100,i600). +IgnTable(i182,i195,i1100,i1320,i710). +IgnTable(i182,i195,i1320,i1540,i920). +IgnTable(i182,i195,i1540,i1760,i1030). +IgnTable(i182,i195,i1760,i1980,i1170). +IgnTable(i182,i195,i1980,i2200,i1170). +IgnTable(i182,i195,i2200,i2420,i1240). +IgnTable(i182,i195,i2420,i2640,i1350). +IgnTable(i182,i195,i2640,i2860,i1420). +IgnTable(i182,i195,i2860,i3080,i1450). +IgnTable(i182,i195,i3080,i3300,i1490). +IgnTable(i182,i195,i3300,i3520,i1560). +IgnTable(i182,i195,i3520,i3740,i1560). +IgnTable(i182,i195,i3740,i3960,i1560). +IgnTable(i182,i195,i3960,i4180,i1700). +IgnTable(i182,i195,i4180,i4400,i1810). +IgnTable(i182,i195,i4400,i4620,i1770). +IgnTable(i182,i195,i4620,i4840,i1660). +IgnTable(i182,i195,i4840,i5060,i1660). +IgnTable(i182,i195,i5060,i5280,i1810). +IgnTable(i182,i195,i5280,i5500,i2120). +IgnTable(i182,i195,i5500,i5720,i2120). +IgnTable(i182,i195,i5720,i5940,i2090). +IgnTable(i182,i195,i5940,i6160,i2160). +IgnTable(i182,i195,i6160,i6380,i2050). +IgnTable(i182,i195,i6380,i6600,i1700). +IgnTable(i182,i195,i6600,i6820,i1730). +IgnTable(i182,i195,i6820,i7040,i1880). +IgnTable(i182,i195,i7040,i7260,i2230). +IgnTable(i182,i195,i7260,i7480,i2190). +IgnTable(i182,i195,i7480,i7700,i2160). +IgnTable(i182,i195,i7700,i7920,i2160). +IgnTable(i182,i195,i7920,i8140,i2160). +IgnTable(i182,i195,i8140,i8360,i2160). +IgnTable(i182,i195,i8360,i8580,i2160). +IgnTable(i195,i208,i0,i660,i460). +IgnTable(i195,i208,i660,i880,i600). +IgnTable(i195,i208,i880,i1100,i600). +IgnTable(i195,i208,i1100,i1320,i710). +IgnTable(i195,i208,i1320,i1540,i920). +IgnTable(i195,i208,i1540,i1760,i990). +IgnTable(i195,i208,i1760,i1980,i1100). +IgnTable(i195,i208,i1980,i2200,i1100). +IgnTable(i195,i208,i2200,i2420,i1130). +IgnTable(i195,i208,i2420,i2640,i1240). +IgnTable(i195,i208,i2640,i2860,i1310). +IgnTable(i195,i208,i2860,i3080,i1380). +IgnTable(i195,i208,i3080,i3300,i1420). +IgnTable(i195,i208,i3300,i3520,i1450). +IgnTable(i195,i208,i3520,i3740,i1450). +IgnTable(i195,i208,i3740,i3960,i1450). +IgnTable(i195,i208,i3960,i4180,i1560). +IgnTable(i195,i208,i4180,i4400,i1700). +IgnTable(i195,i208,i4400,i4620,i1630). +IgnTable(i195,i208,i4620,i4840,i1520). +IgnTable(i195,i208,i4840,i5060,i1520). +IgnTable(i195,i208,i5060,i5280,i1660). +IgnTable(i195,i208,i5280,i5500,i1980). +IgnTable(i195,i208,i5500,i5720,i2020). +IgnTable(i195,i208,i5720,i5940,i2020). +IgnTable(i195,i208,i5940,i6160,i2090). +IgnTable(i195,i208,i6160,i6380,i2120). +IgnTable(i195,i208,i6380,i6600,i2120). +IgnTable(i195,i208,i6600,i6820,i2120). +IgnTable(i195,i208,i6820,i7040,i2120). +IgnTable(i195,i208,i7040,i7260,i2090). +IgnTable(i195,i208,i7260,i7480,i2050). +IgnTable(i195,i208,i7480,i7700,i2020). +IgnTable(i195,i208,i7700,i7920,i2020). +IgnTable(i195,i208,i7920,i8140,i2020). +IgnTable(i195,i208,i8140,i8360,i2020). +IgnTable(i195,i208,i8360,i8580,i2020). +IgnTable(i208,i221,i0,i660,i460). +IgnTable(i208,i221,i660,i880,i600). +IgnTable(i208,i221,i880,i1100,i600). +IgnTable(i208,i221,i1100,i1320,i710). +IgnTable(i208,i221,i1320,i1540,i920). +IgnTable(i208,i221,i1540,i1760,i960). +IgnTable(i208,i221,i1760,i1980,i1060). +IgnTable(i208,i221,i1980,i2200,i1030). +IgnTable(i208,i221,i2200,i2420,i1100). +IgnTable(i208,i221,i2420,i2640,i1170). +IgnTable(i208,i221,i2640,i2860,i1240). +IgnTable(i208,i221,i2860,i3080,i1310). +IgnTable(i208,i221,i3080,i3300,i1350). +IgnTable(i208,i221,i3300,i3520,i1420). +IgnTable(i208,i221,i3520,i3740,i1420). +IgnTable(i208,i221,i3740,i3960,i1420). +IgnTable(i208,i221,i3960,i4180,i1490). +IgnTable(i208,i221,i4180,i4400,i1590). +IgnTable(i208,i221,i4400,i4620,i1520). +IgnTable(i208,i221,i4620,i4840,i1450). +IgnTable(i208,i221,i4840,i5060,i1450). +IgnTable(i208,i221,i5060,i5280,i1490). +IgnTable(i208,i221,i5280,i5500,i1590). +IgnTable(i208,i221,i5500,i5720,i1630). +IgnTable(i208,i221,i5720,i5940,i1630). +IgnTable(i208,i221,i5940,i6160,i1730). +IgnTable(i208,i221,i6160,i6380,i1730). +IgnTable(i208,i221,i6380,i6600,i1730). +IgnTable(i208,i221,i6600,i6820,i1730). +IgnTable(i208,i221,i6820,i7040,i1730). +IgnTable(i208,i221,i7040,i7260,i1730). +IgnTable(i208,i221,i7260,i7480,i1700). +IgnTable(i208,i221,i7480,i7700,i1660). +IgnTable(i208,i221,i7700,i7920,i1660). +IgnTable(i208,i221,i7920,i8140,i1660). +IgnTable(i208,i221,i8140,i8360,i1660). +IgnTable(i208,i221,i8360,i8580,i1660). +IgnTable(i221,i234,i0,i660,i460). +IgnTable(i221,i234,i660,i880,i600). +IgnTable(i221,i234,i880,i1100,i600). +IgnTable(i221,i234,i1100,i1320,i710). +IgnTable(i221,i234,i1320,i1540,i920). +IgnTable(i221,i234,i1540,i1760,i920). +IgnTable(i221,i234,i1760,i1980,i990). +IgnTable(i221,i234,i1980,i2200,i960). +IgnTable(i221,i234,i2200,i2420,i1030). +IgnTable(i221,i234,i2420,i2640,i1130). +IgnTable(i221,i234,i2640,i2860,i1170). +IgnTable(i221,i234,i2860,i3080,i1240). +IgnTable(i221,i234,i3080,i3300,i1270). +IgnTable(i221,i234,i3300,i3520,i1350). +IgnTable(i221,i234,i3520,i3740,i1350). +IgnTable(i221,i234,i3740,i3960,i1350). +IgnTable(i221,i234,i3960,i4180,i1450). +IgnTable(i221,i234,i4180,i4400,i1560). +IgnTable(i221,i234,i4400,i4620,i1490). +IgnTable(i221,i234,i4620,i4840,i1420). +IgnTable(i221,i234,i4840,i5060,i1420). +IgnTable(i221,i234,i5060,i5280,i1450). +IgnTable(i221,i234,i5280,i5500,i1560). +IgnTable(i221,i234,i5500,i5720,i1590). +IgnTable(i221,i234,i5720,i5940,i1590). +IgnTable(i221,i234,i5940,i6160,i1700). +IgnTable(i221,i234,i6160,i6380,i1700). +IgnTable(i221,i234,i6380,i6600,i1700). +IgnTable(i221,i234,i6600,i6820,i1700). +IgnTable(i221,i234,i6820,i7040,i1700). +IgnTable(i221,i234,i7040,i7260,i1700). +IgnTable(i221,i234,i7260,i7480,i1660). +IgnTable(i221,i234,i7480,i7700,i1630). +IgnTable(i221,i234,i7700,i7920,i1630). +IgnTable(i221,i234,i7920,i8140,i1630). +IgnTable(i221,i234,i8140,i8360,i1630). +IgnTable(i221,i234,i8360,i8580,i1630). +IgnTable(i234,i247,i0,i660,i460). +IgnTable(i234,i247,i660,i880,i600). +IgnTable(i234,i247,i880,i1100,i600). +IgnTable(i234,i247,i1100,i1320,i710). +IgnTable(i234,i247,i1320,i1540,i920). +IgnTable(i234,i247,i1540,i1760,i890). +IgnTable(i234,i247,i1760,i1980,i960). +IgnTable(i234,i247,i1980,i2200,i920). +IgnTable(i234,i247,i2200,i2420,i990). +IgnTable(i234,i247,i2420,i2640,i1100). +IgnTable(i234,i247,i2640,i2860,i1130). +IgnTable(i234,i247,i2860,i3080,i1200). +IgnTable(i234,i247,i3080,i3300,i1240). +IgnTable(i234,i247,i3300,i3520,i1310). +IgnTable(i234,i247,i3520,i3740,i1310). +IgnTable(i234,i247,i3740,i3960,i1310). +IgnTable(i234,i247,i3960,i4180,i1420). +IgnTable(i234,i247,i4180,i4400,i1520). +IgnTable(i234,i247,i4400,i4620,i1450). +IgnTable(i234,i247,i4620,i4840,i1380). +IgnTable(i234,i247,i4840,i5060,i1380). +IgnTable(i234,i247,i5060,i5280,i1420). +IgnTable(i234,i247,i5280,i5500,i1520). +IgnTable(i234,i247,i5500,i5720,i1560). +IgnTable(i234,i247,i5720,i5940,i1560). +IgnTable(i234,i247,i5940,i6160,i1660). +IgnTable(i234,i247,i6160,i6380,i1660). +IgnTable(i234,i247,i6380,i6600,i1660). +IgnTable(i234,i247,i6600,i6820,i1660). +IgnTable(i234,i247,i6820,i7040,i1660). +IgnTable(i234,i247,i7040,i7260,i1660). +IgnTable(i234,i247,i7260,i7480,i1630). +IgnTable(i234,i247,i7480,i7700,i1590). +IgnTable(i234,i247,i7700,i7920,i1590). +IgnTable(i234,i247,i7920,i8140,i1590). +IgnTable(i234,i247,i8140,i8360,i1590). +IgnTable(i234,i247,i8360,i8580,i1590). +IgnTable(i247,i260,i0,i660,i460). +IgnTable(i247,i260,i660,i880,i600). +IgnTable(i247,i260,i880,i1100,i600). +IgnTable(i247,i260,i1100,i1320,i710). +IgnTable(i247,i260,i1320,i1540,i920). +IgnTable(i247,i260,i1540,i1760,i890). +IgnTable(i247,i260,i1760,i1980,i920). +IgnTable(i247,i260,i1980,i2200,i890). +IgnTable(i247,i260,i2200,i2420,i960). +IgnTable(i247,i260,i2420,i2640,i1060). +IgnTable(i247,i260,i2640,i2860,i1100). +IgnTable(i247,i260,i2860,i3080,i1170). +IgnTable(i247,i260,i3080,i3300,i1200). +IgnTable(i247,i260,i3300,i3520,i1270). +IgnTable(i247,i260,i3520,i3740,i1270). +IgnTable(i247,i260,i3740,i3960,i1270). +IgnTable(i247,i260,i3960,i4180,i1380). +IgnTable(i247,i260,i4180,i4400,i1490). +IgnTable(i247,i260,i4400,i4620,i1420). +IgnTable(i247,i260,i4620,i4840,i1350). +IgnTable(i247,i260,i4840,i5060,i1350). +IgnTable(i247,i260,i5060,i5280,i1380). +IgnTable(i247,i260,i5280,i5500,i1490). +IgnTable(i247,i260,i5500,i5720,i1520). +IgnTable(i247,i260,i5720,i5940,i1520). +IgnTable(i247,i260,i5940,i6160,i1630). +IgnTable(i247,i260,i6160,i6380,i1630). +IgnTable(i247,i260,i6380,i6600,i1630). +IgnTable(i247,i260,i6600,i6820,i1630). +IgnTable(i247,i260,i6820,i7040,i1630). +IgnTable(i247,i260,i7040,i7260,i1630). +IgnTable(i247,i260,i7260,i7480,i1590). +IgnTable(i247,i260,i7480,i7700,i1560). +IgnTable(i247,i260,i7700,i7920,i1560). +IgnTable(i247,i260,i7920,i8140,i1560). +IgnTable(i247,i260,i8140,i8360,i1560). +IgnTable(i247,i260,i8360,i8580,i1560). +TempIgnTable(i60,i70,i1630,i1530). +TempIgnTable(i60,i70,i1560,i1460). +TempIgnTable(i60,i70,i1590,i1490). +TempIgnTable(i70,i90,i1630,i1430). +TempIgnTable(i70,i90,i1560,i1360). +TempIgnTable(i70,i90,i1590,i1390). +TempIgnTable(i90,i130,i1630,i1330). +TempIgnTable(i90,i130,i1560,i1260). +TempIgnTable(i90,i130,i1590,i1290). +Tmp(?x0,?x0) :- Leq(i-30,?x0),Leq(?x0,i130). +KPa(?x0,?x0) :- Leq(i0,?x0),Leq(?x0,i260). +Rpm(?x0,?x0) :- Leq(i0,?x0),Leq(?x0,i8580). +IgnDeg1(?x6,?x7,?x1,?x4,?x8) :- Leq(?x0,?x1),Le(?x1,?x2),Leq(?x3,?x4),Le(?x4,?x5),KPa(?x6,?x1),Rpm(?x7,?x4),IgnTable(?x0,?x2,?x3,?x5,?x8). +IndnDeg2(?x1,?x2,?x0,?x3,?x4,?x0,?x5) :- Le(?x0,i60),IgnDeg1(?x1,?x2,?x3,?x4,?x5),Tmp(?x0,?x0). +IndnDeg2(?x5,?x6,?x3,?x7,?x8,?x3,?x9) :- Leq(?x1,?x0),Leq(?x2,?x3),Le(?x3,?x4),IgnDeg1(?x5,?x6,?x7,?x8,?x0),Tmp(?x3,?x3),TempIgnTable(?x2,?x4,?x1,?x9). +ResultDeg(?x3) :- Leq(?x0,?x1),Leq(i-30,?x2),Leq(?x2,i130),Leq(i0,?x3),Leq(?x3,i8580),IndnDeg2(?x4,?x5,?x2,?x6,?x3,?x2,?x0),IgnDeg1(?x4,?x5,?x6,?x3,?x1). +Le(i-30,i0). +Le(i-30,i13). +Le(i0,i13). +Le(i-30,i26). +Le(i0,i26). +Le(i13,i26). +Le(i-30,i39). +Le(i0,i39). +Le(i13,i39). +Le(i26,i39). +Le(i-30,i52). +Le(i0,i52). +Le(i13,i52). +Le(i26,i52). +Le(i39,i52). +Le(i-30,i60). +Le(i0,i60). +Le(i13,i60). +Le(i26,i60). +Le(i39,i60). +Le(i52,i60). +Le(i-30,i65). +Le(i0,i65). +Le(i13,i65). +Le(i26,i65). +Le(i39,i65). +Le(i52,i65). +Le(i60,i65). +Le(i-30,i70). +Le(i0,i70). +Le(i13,i70). +Le(i26,i70). +Le(i39,i70). +Le(i52,i70). +Le(i60,i70). +Le(i65,i70). +Le(i-30,i78). +Le(i0,i78). +Le(i13,i78). +Le(i26,i78). +Le(i39,i78). +Le(i52,i78). +Le(i60,i78). +Le(i65,i78). +Le(i70,i78). +Le(i-30,i90). +Le(i0,i90). +Le(i13,i90). +Le(i26,i90). +Le(i39,i90). +Le(i52,i90). +Le(i60,i90). +Le(i65,i90). +Le(i70,i90). +Le(i78,i90). +Le(i-30,i91). +Le(i0,i91). +Le(i13,i91). +Le(i26,i91). +Le(i39,i91). +Le(i52,i91). +Le(i60,i91). +Le(i65,i91). +Le(i70,i91). +Le(i78,i91). +Le(i90,i91). +Le(i-30,i104). +Le(i0,i104). +Le(i13,i104). +Le(i26,i104). +Le(i39,i104). +Le(i52,i104). +Le(i60,i104). +Le(i65,i104). +Le(i70,i104). +Le(i78,i104). +Le(i90,i104). +Le(i91,i104). +Le(i-30,i117). +Le(i0,i117). +Le(i13,i117). +Le(i26,i117). +Le(i39,i117). +Le(i52,i117). +Le(i60,i117). +Le(i65,i117). +Le(i70,i117). +Le(i78,i117). +Le(i90,i117). +Le(i91,i117). +Le(i104,i117). +Le(i-30,i130). +Le(i0,i130). +Le(i13,i130). +Le(i26,i130). +Le(i39,i130). +Le(i52,i130). +Le(i60,i130). +Le(i65,i130). +Le(i70,i130). +Le(i78,i130). +Le(i90,i130). +Le(i91,i130). +Le(i104,i130). +Le(i117,i130). +Le(i-30,i143). +Le(i0,i143). +Le(i13,i143). +Le(i26,i143). +Le(i39,i143). +Le(i52,i143). +Le(i60,i143). +Le(i65,i143). +Le(i70,i143). +Le(i78,i143). +Le(i90,i143). +Le(i91,i143). +Le(i104,i143). +Le(i117,i143). +Le(i130,i143). +Le(i-30,i156). +Le(i0,i156). +Le(i13,i156). +Le(i26,i156). +Le(i39,i156). +Le(i52,i156). +Le(i60,i156). +Le(i65,i156). +Le(i70,i156). +Le(i78,i156). +Le(i90,i156). +Le(i91,i156). +Le(i104,i156). +Le(i117,i156). +Le(i130,i156). +Le(i143,i156). +Le(i-30,i169). +Le(i0,i169). +Le(i13,i169). +Le(i26,i169). +Le(i39,i169). +Le(i52,i169). +Le(i60,i169). +Le(i65,i169). +Le(i70,i169). +Le(i78,i169). +Le(i90,i169). +Le(i91,i169). +Le(i104,i169). +Le(i117,i169). +Le(i130,i169). +Le(i143,i169). +Le(i156,i169). +Le(i-30,i182). +Le(i0,i182). +Le(i13,i182). +Le(i26,i182). +Le(i39,i182). +Le(i52,i182). +Le(i60,i182). +Le(i65,i182). +Le(i70,i182). +Le(i78,i182). +Le(i90,i182). +Le(i91,i182). +Le(i104,i182). +Le(i117,i182). +Le(i130,i182). +Le(i143,i182). +Le(i156,i182). +Le(i169,i182). +Le(i-30,i195). +Le(i0,i195). +Le(i13,i195). +Le(i26,i195). +Le(i39,i195). +Le(i52,i195). +Le(i60,i195). +Le(i65,i195). +Le(i70,i195). +Le(i78,i195). +Le(i90,i195). +Le(i91,i195). +Le(i104,i195). +Le(i117,i195). +Le(i130,i195). +Le(i143,i195). +Le(i156,i195). +Le(i169,i195). +Le(i182,i195). +Le(i-30,i208). +Le(i0,i208). +Le(i13,i208). +Le(i26,i208). +Le(i39,i208). +Le(i52,i208). +Le(i60,i208). +Le(i65,i208). +Le(i70,i208). +Le(i78,i208). +Le(i90,i208). +Le(i91,i208). +Le(i104,i208). +Le(i117,i208). +Le(i130,i208). +Le(i143,i208). +Le(i156,i208). +Le(i169,i208). +Le(i182,i208). +Le(i195,i208). +Le(i-30,i221). +Le(i0,i221). +Le(i13,i221). +Le(i26,i221). +Le(i39,i221). +Le(i52,i221). +Le(i60,i221). +Le(i65,i221). +Le(i70,i221). +Le(i78,i221). +Le(i90,i221). +Le(i91,i221). +Le(i104,i221). +Le(i117,i221). +Le(i130,i221). +Le(i143,i221). +Le(i156,i221). +Le(i169,i221). +Le(i182,i221). +Le(i195,i221). +Le(i208,i221). +Le(i-30,i234). +Le(i0,i234). +Le(i13,i234). +Le(i26,i234). +Le(i39,i234). +Le(i52,i234). +Le(i60,i234). +Le(i65,i234). +Le(i70,i234). +Le(i78,i234). +Le(i90,i234). +Le(i91,i234). +Le(i104,i234). +Le(i117,i234). +Le(i130,i234). +Le(i143,i234). +Le(i156,i234). +Le(i169,i234). +Le(i182,i234). +Le(i195,i234). +Le(i208,i234). +Le(i221,i234). +Le(i-30,i247). +Le(i0,i247). +Le(i13,i247). +Le(i26,i247). +Le(i39,i247). +Le(i52,i247). +Le(i60,i247). +Le(i65,i247). +Le(i70,i247). +Le(i78,i247). +Le(i90,i247). +Le(i91,i247). +Le(i104,i247). +Le(i117,i247). +Le(i130,i247). +Le(i143,i247). +Le(i156,i247). +Le(i169,i247). +Le(i182,i247). +Le(i195,i247). +Le(i208,i247). +Le(i221,i247). +Le(i234,i247). +Le(i-30,i260). +Le(i0,i260). +Le(i13,i260). +Le(i26,i260). +Le(i39,i260). +Le(i52,i260). +Le(i60,i260). +Le(i65,i260). +Le(i70,i260). +Le(i78,i260). +Le(i90,i260). +Le(i91,i260). +Le(i104,i260). +Le(i117,i260). +Le(i130,i260). +Le(i143,i260). +Le(i156,i260). +Le(i169,i260). +Le(i182,i260). +Le(i195,i260). +Le(i208,i260). +Le(i221,i260). +Le(i234,i260). +Le(i247,i260). +Le(i-30,i460). +Le(i0,i460). +Le(i13,i460). +Le(i26,i460). +Le(i39,i460). +Le(i52,i460). +Le(i60,i460). +Le(i65,i460). +Le(i70,i460). +Le(i78,i460). +Le(i90,i460). +Le(i91,i460). +Le(i104,i460). +Le(i117,i460). +Le(i130,i460). +Le(i143,i460). +Le(i156,i460). +Le(i169,i460). +Le(i182,i460). +Le(i195,i460). +Le(i208,i460). +Le(i221,i460). +Le(i234,i460). +Le(i247,i460). +Le(i260,i460). +Le(i-30,i530). +Le(i0,i530). +Le(i13,i530). +Le(i26,i530). +Le(i39,i530). +Le(i52,i530). +Le(i60,i530). +Le(i65,i530). +Le(i70,i530). +Le(i78,i530). +Le(i90,i530). +Le(i91,i530). +Le(i104,i530). +Le(i117,i530). +Le(i130,i530). +Le(i143,i530). +Le(i156,i530). +Le(i169,i530). +Le(i182,i530). +Le(i195,i530). +Le(i208,i530). +Le(i221,i530). +Le(i234,i530). +Le(i247,i530). +Le(i260,i530). +Le(i460,i530). +Le(i-30,i600). +Le(i0,i600). +Le(i13,i600). +Le(i26,i600). +Le(i39,i600). +Le(i52,i600). +Le(i60,i600). +Le(i65,i600). +Le(i70,i600). +Le(i78,i600). +Le(i90,i600). +Le(i91,i600). +Le(i104,i600). +Le(i117,i600). +Le(i130,i600). +Le(i143,i600). +Le(i156,i600). +Le(i169,i600). +Le(i182,i600). +Le(i195,i600). +Le(i208,i600). +Le(i221,i600). +Le(i234,i600). +Le(i247,i600). +Le(i260,i600). +Le(i460,i600). +Le(i530,i600). +Le(i-30,i660). +Le(i0,i660). +Le(i13,i660). +Le(i26,i660). +Le(i39,i660). +Le(i52,i660). +Le(i60,i660). +Le(i65,i660). +Le(i70,i660). +Le(i78,i660). +Le(i90,i660). +Le(i91,i660). +Le(i104,i660). +Le(i117,i660). +Le(i130,i660). +Le(i143,i660). +Le(i156,i660). +Le(i169,i660). +Le(i182,i660). +Le(i195,i660). +Le(i208,i660). +Le(i221,i660). +Le(i234,i660). +Le(i247,i660). +Le(i260,i660). +Le(i460,i660). +Le(i530,i660). +Le(i600,i660). +Le(i-30,i670). +Le(i0,i670). +Le(i13,i670). +Le(i26,i670). +Le(i39,i670). +Le(i52,i670). +Le(i60,i670). +Le(i65,i670). +Le(i70,i670). +Le(i78,i670). +Le(i90,i670). +Le(i91,i670). +Le(i104,i670). +Le(i117,i670). +Le(i130,i670). +Le(i143,i670). +Le(i156,i670). +Le(i169,i670). +Le(i182,i670). +Le(i195,i670). +Le(i208,i670). +Le(i221,i670). +Le(i234,i670). +Le(i247,i670). +Le(i260,i670). +Le(i460,i670). +Le(i530,i670). +Le(i600,i670). +Le(i660,i670). +Le(i-30,i710). +Le(i0,i710). +Le(i13,i710). +Le(i26,i710). +Le(i39,i710). +Le(i52,i710). +Le(i60,i710). +Le(i65,i710). +Le(i70,i710). +Le(i78,i710). +Le(i90,i710). +Le(i91,i710). +Le(i104,i710). +Le(i117,i710). +Le(i130,i710). +Le(i143,i710). +Le(i156,i710). +Le(i169,i710). +Le(i182,i710). +Le(i195,i710). +Le(i208,i710). +Le(i221,i710). +Le(i234,i710). +Le(i247,i710). +Le(i260,i710). +Le(i460,i710). +Le(i530,i710). +Le(i600,i710). +Le(i660,i710). +Le(i670,i710). +Le(i-30,i740). +Le(i0,i740). +Le(i13,i740). +Le(i26,i740). +Le(i39,i740). +Le(i52,i740). +Le(i60,i740). +Le(i65,i740). +Le(i70,i740). +Le(i78,i740). +Le(i90,i740). +Le(i91,i740). +Le(i104,i740). +Le(i117,i740). +Le(i130,i740). +Le(i143,i740). +Le(i156,i740). +Le(i169,i740). +Le(i182,i740). +Le(i195,i740). +Le(i208,i740). +Le(i221,i740). +Le(i234,i740). +Le(i247,i740). +Le(i260,i740). +Le(i460,i740). +Le(i530,i740). +Le(i600,i740). +Le(i660,i740). +Le(i670,i740). +Le(i710,i740). +Le(i-30,i810). +Le(i0,i810). +Le(i13,i810). +Le(i26,i810). +Le(i39,i810). +Le(i52,i810). +Le(i60,i810). +Le(i65,i810). +Le(i70,i810). +Le(i78,i810). +Le(i90,i810). +Le(i91,i810). +Le(i104,i810). +Le(i117,i810). +Le(i130,i810). +Le(i143,i810). +Le(i156,i810). +Le(i169,i810). +Le(i182,i810). +Le(i195,i810). +Le(i208,i810). +Le(i221,i810). +Le(i234,i810). +Le(i247,i810). +Le(i260,i810). +Le(i460,i810). +Le(i530,i810). +Le(i600,i810). +Le(i660,i810). +Le(i670,i810). +Le(i710,i810). +Le(i740,i810). +Le(i-30,i850). +Le(i0,i850). +Le(i13,i850). +Le(i26,i850). +Le(i39,i850). +Le(i52,i850). +Le(i60,i850). +Le(i65,i850). +Le(i70,i850). +Le(i78,i850). +Le(i90,i850). +Le(i91,i850). +Le(i104,i850). +Le(i117,i850). +Le(i130,i850). +Le(i143,i850). +Le(i156,i850). +Le(i169,i850). +Le(i182,i850). +Le(i195,i850). +Le(i208,i850). +Le(i221,i850). +Le(i234,i850). +Le(i247,i850). +Le(i260,i850). +Le(i460,i850). +Le(i530,i850). +Le(i600,i850). +Le(i660,i850). +Le(i670,i850). +Le(i710,i850). +Le(i740,i850). +Le(i810,i850). +Le(i-30,i880). +Le(i0,i880). +Le(i13,i880). +Le(i26,i880). +Le(i39,i880). +Le(i52,i880). +Le(i60,i880). +Le(i65,i880). +Le(i70,i880). +Le(i78,i880). +Le(i90,i880). +Le(i91,i880). +Le(i104,i880). +Le(i117,i880). +Le(i130,i880). +Le(i143,i880). +Le(i156,i880). +Le(i169,i880). +Le(i182,i880). +Le(i195,i880). +Le(i208,i880). +Le(i221,i880). +Le(i234,i880). +Le(i247,i880). +Le(i260,i880). +Le(i460,i880). +Le(i530,i880). +Le(i600,i880). +Le(i660,i880). +Le(i670,i880). +Le(i710,i880). +Le(i740,i880). +Le(i810,i880). +Le(i850,i880). +Le(i-30,i890). +Le(i0,i890). +Le(i13,i890). +Le(i26,i890). +Le(i39,i890). +Le(i52,i890). +Le(i60,i890). +Le(i65,i890). +Le(i70,i890). +Le(i78,i890). +Le(i90,i890). +Le(i91,i890). +Le(i104,i890). +Le(i117,i890). +Le(i130,i890). +Le(i143,i890). +Le(i156,i890). +Le(i169,i890). +Le(i182,i890). +Le(i195,i890). +Le(i208,i890). +Le(i221,i890). +Le(i234,i890). +Le(i247,i890). +Le(i260,i890). +Le(i460,i890). +Le(i530,i890). +Le(i600,i890). +Le(i660,i890). +Le(i670,i890). +Le(i710,i890). +Le(i740,i890). +Le(i810,i890). +Le(i850,i890). +Le(i880,i890). +Le(i-30,i920). +Le(i0,i920). +Le(i13,i920). +Le(i26,i920). +Le(i39,i920). +Le(i52,i920). +Le(i60,i920). +Le(i65,i920). +Le(i70,i920). +Le(i78,i920). +Le(i90,i920). +Le(i91,i920). +Le(i104,i920). +Le(i117,i920). +Le(i130,i920). +Le(i143,i920). +Le(i156,i920). +Le(i169,i920). +Le(i182,i920). +Le(i195,i920). +Le(i208,i920). +Le(i221,i920). +Le(i234,i920). +Le(i247,i920). +Le(i260,i920). +Le(i460,i920). +Le(i530,i920). +Le(i600,i920). +Le(i660,i920). +Le(i670,i920). +Le(i710,i920). +Le(i740,i920). +Le(i810,i920). +Le(i850,i920). +Le(i880,i920). +Le(i890,i920). +Le(i-30,i960). +Le(i0,i960). +Le(i13,i960). +Le(i26,i960). +Le(i39,i960). +Le(i52,i960). +Le(i60,i960). +Le(i65,i960). +Le(i70,i960). +Le(i78,i960). +Le(i90,i960). +Le(i91,i960). +Le(i104,i960). +Le(i117,i960). +Le(i130,i960). +Le(i143,i960). +Le(i156,i960). +Le(i169,i960). +Le(i182,i960). +Le(i195,i960). +Le(i208,i960). +Le(i221,i960). +Le(i234,i960). +Le(i247,i960). +Le(i260,i960). +Le(i460,i960). +Le(i530,i960). +Le(i600,i960). +Le(i660,i960). +Le(i670,i960). +Le(i710,i960). +Le(i740,i960). +Le(i810,i960). +Le(i850,i960). +Le(i880,i960). +Le(i890,i960). +Le(i920,i960). +Le(i-30,i990). +Le(i0,i990). +Le(i13,i990). +Le(i26,i990). +Le(i39,i990). +Le(i52,i990). +Le(i60,i990). +Le(i65,i990). +Le(i70,i990). +Le(i78,i990). +Le(i90,i990). +Le(i91,i990). +Le(i104,i990). +Le(i117,i990). +Le(i130,i990). +Le(i143,i990). +Le(i156,i990). +Le(i169,i990). +Le(i182,i990). +Le(i195,i990). +Le(i208,i990). +Le(i221,i990). +Le(i234,i990). +Le(i247,i990). +Le(i260,i990). +Le(i460,i990). +Le(i530,i990). +Le(i600,i990). +Le(i660,i990). +Le(i670,i990). +Le(i710,i990). +Le(i740,i990). +Le(i810,i990). +Le(i850,i990). +Le(i880,i990). +Le(i890,i990). +Le(i920,i990). +Le(i960,i990). +Le(i-30,i1030). +Le(i0,i1030). +Le(i13,i1030). +Le(i26,i1030). +Le(i39,i1030). +Le(i52,i1030). +Le(i60,i1030). +Le(i65,i1030). +Le(i70,i1030). +Le(i78,i1030). +Le(i90,i1030). +Le(i91,i1030). +Le(i104,i1030). +Le(i117,i1030). +Le(i130,i1030). +Le(i143,i1030). +Le(i156,i1030). +Le(i169,i1030). +Le(i182,i1030). +Le(i195,i1030). +Le(i208,i1030). +Le(i221,i1030). +Le(i234,i1030). +Le(i247,i1030). +Le(i260,i1030). +Le(i460,i1030). +Le(i530,i1030). +Le(i600,i1030). +Le(i660,i1030). +Le(i670,i1030). +Le(i710,i1030). +Le(i740,i1030). +Le(i810,i1030). +Le(i850,i1030). +Le(i880,i1030). +Le(i890,i1030). +Le(i920,i1030). +Le(i960,i1030). +Le(i990,i1030). +Le(i-30,i1060). +Le(i0,i1060). +Le(i13,i1060). +Le(i26,i1060). +Le(i39,i1060). +Le(i52,i1060). +Le(i60,i1060). +Le(i65,i1060). +Le(i70,i1060). +Le(i78,i1060). +Le(i90,i1060). +Le(i91,i1060). +Le(i104,i1060). +Le(i117,i1060). +Le(i130,i1060). +Le(i143,i1060). +Le(i156,i1060). +Le(i169,i1060). +Le(i182,i1060). +Le(i195,i1060). +Le(i208,i1060). +Le(i221,i1060). +Le(i234,i1060). +Le(i247,i1060). +Le(i260,i1060). +Le(i460,i1060). +Le(i530,i1060). +Le(i600,i1060). +Le(i660,i1060). +Le(i670,i1060). +Le(i710,i1060). +Le(i740,i1060). +Le(i810,i1060). +Le(i850,i1060). +Le(i880,i1060). +Le(i890,i1060). +Le(i920,i1060). +Le(i960,i1060). +Le(i990,i1060). +Le(i1030,i1060). +Le(i-30,i1100). +Le(i0,i1100). +Le(i13,i1100). +Le(i26,i1100). +Le(i39,i1100). +Le(i52,i1100). +Le(i60,i1100). +Le(i65,i1100). +Le(i70,i1100). +Le(i78,i1100). +Le(i90,i1100). +Le(i91,i1100). +Le(i104,i1100). +Le(i117,i1100). +Le(i130,i1100). +Le(i143,i1100). +Le(i156,i1100). +Le(i169,i1100). +Le(i182,i1100). +Le(i195,i1100). +Le(i208,i1100). +Le(i221,i1100). +Le(i234,i1100). +Le(i247,i1100). +Le(i260,i1100). +Le(i460,i1100). +Le(i530,i1100). +Le(i600,i1100). +Le(i660,i1100). +Le(i670,i1100). +Le(i710,i1100). +Le(i740,i1100). +Le(i810,i1100). +Le(i850,i1100). +Le(i880,i1100). +Le(i890,i1100). +Le(i920,i1100). +Le(i960,i1100). +Le(i990,i1100). +Le(i1030,i1100). +Le(i1060,i1100). +Le(i-30,i1130). +Le(i0,i1130). +Le(i13,i1130). +Le(i26,i1130). +Le(i39,i1130). +Le(i52,i1130). +Le(i60,i1130). +Le(i65,i1130). +Le(i70,i1130). +Le(i78,i1130). +Le(i90,i1130). +Le(i91,i1130). +Le(i104,i1130). +Le(i117,i1130). +Le(i130,i1130). +Le(i143,i1130). +Le(i156,i1130). +Le(i169,i1130). +Le(i182,i1130). +Le(i195,i1130). +Le(i208,i1130). +Le(i221,i1130). +Le(i234,i1130). +Le(i247,i1130). +Le(i260,i1130). +Le(i460,i1130). +Le(i530,i1130). +Le(i600,i1130). +Le(i660,i1130). +Le(i670,i1130). +Le(i710,i1130). +Le(i740,i1130). +Le(i810,i1130). +Le(i850,i1130). +Le(i880,i1130). +Le(i890,i1130). +Le(i920,i1130). +Le(i960,i1130). +Le(i990,i1130). +Le(i1030,i1130). +Le(i1060,i1130). +Le(i1100,i1130). +Le(i-30,i1170). +Le(i0,i1170). +Le(i13,i1170). +Le(i26,i1170). +Le(i39,i1170). +Le(i52,i1170). +Le(i60,i1170). +Le(i65,i1170). +Le(i70,i1170). +Le(i78,i1170). +Le(i90,i1170). +Le(i91,i1170). +Le(i104,i1170). +Le(i117,i1170). +Le(i130,i1170). +Le(i143,i1170). +Le(i156,i1170). +Le(i169,i1170). +Le(i182,i1170). +Le(i195,i1170). +Le(i208,i1170). +Le(i221,i1170). +Le(i234,i1170). +Le(i247,i1170). +Le(i260,i1170). +Le(i460,i1170). +Le(i530,i1170). +Le(i600,i1170). +Le(i660,i1170). +Le(i670,i1170). +Le(i710,i1170). +Le(i740,i1170). +Le(i810,i1170). +Le(i850,i1170). +Le(i880,i1170). +Le(i890,i1170). +Le(i920,i1170). +Le(i960,i1170). +Le(i990,i1170). +Le(i1030,i1170). +Le(i1060,i1170). +Le(i1100,i1170). +Le(i1130,i1170). +Le(i-30,i1200). +Le(i0,i1200). +Le(i13,i1200). +Le(i26,i1200). +Le(i39,i1200). +Le(i52,i1200). +Le(i60,i1200). +Le(i65,i1200). +Le(i70,i1200). +Le(i78,i1200). +Le(i90,i1200). +Le(i91,i1200). +Le(i104,i1200). +Le(i117,i1200). +Le(i130,i1200). +Le(i143,i1200). +Le(i156,i1200). +Le(i169,i1200). +Le(i182,i1200). +Le(i195,i1200). +Le(i208,i1200). +Le(i221,i1200). +Le(i234,i1200). +Le(i247,i1200). +Le(i260,i1200). +Le(i460,i1200). +Le(i530,i1200). +Le(i600,i1200). +Le(i660,i1200). +Le(i670,i1200). +Le(i710,i1200). +Le(i740,i1200). +Le(i810,i1200). +Le(i850,i1200). +Le(i880,i1200). +Le(i890,i1200). +Le(i920,i1200). +Le(i960,i1200). +Le(i990,i1200). +Le(i1030,i1200). +Le(i1060,i1200). +Le(i1100,i1200). +Le(i1130,i1200). +Le(i1170,i1200). +Le(i-30,i1240). +Le(i0,i1240). +Le(i13,i1240). +Le(i26,i1240). +Le(i39,i1240). +Le(i52,i1240). +Le(i60,i1240). +Le(i65,i1240). +Le(i70,i1240). +Le(i78,i1240). +Le(i90,i1240). +Le(i91,i1240). +Le(i104,i1240). +Le(i117,i1240). +Le(i130,i1240). +Le(i143,i1240). +Le(i156,i1240). +Le(i169,i1240). +Le(i182,i1240). +Le(i195,i1240). +Le(i208,i1240). +Le(i221,i1240). +Le(i234,i1240). +Le(i247,i1240). +Le(i260,i1240). +Le(i460,i1240). +Le(i530,i1240). +Le(i600,i1240). +Le(i660,i1240). +Le(i670,i1240). +Le(i710,i1240). +Le(i740,i1240). +Le(i810,i1240). +Le(i850,i1240). +Le(i880,i1240). +Le(i890,i1240). +Le(i920,i1240). +Le(i960,i1240). +Le(i990,i1240). +Le(i1030,i1240). +Le(i1060,i1240). +Le(i1100,i1240). +Le(i1130,i1240). +Le(i1170,i1240). +Le(i1200,i1240). +Le(i-30,i1260). +Le(i0,i1260). +Le(i13,i1260). +Le(i26,i1260). +Le(i39,i1260). +Le(i52,i1260). +Le(i60,i1260). +Le(i65,i1260). +Le(i70,i1260). +Le(i78,i1260). +Le(i90,i1260). +Le(i91,i1260). +Le(i104,i1260). +Le(i117,i1260). +Le(i130,i1260). +Le(i143,i1260). +Le(i156,i1260). +Le(i169,i1260). +Le(i182,i1260). +Le(i195,i1260). +Le(i208,i1260). +Le(i221,i1260). +Le(i234,i1260). +Le(i247,i1260). +Le(i260,i1260). +Le(i460,i1260). +Le(i530,i1260). +Le(i600,i1260). +Le(i660,i1260). +Le(i670,i1260). +Le(i710,i1260). +Le(i740,i1260). +Le(i810,i1260). +Le(i850,i1260). +Le(i880,i1260). +Le(i890,i1260). +Le(i920,i1260). +Le(i960,i1260). +Le(i990,i1260). +Le(i1030,i1260). +Le(i1060,i1260). +Le(i1100,i1260). +Le(i1130,i1260). +Le(i1170,i1260). +Le(i1200,i1260). +Le(i1240,i1260). +Le(i-30,i1270). +Le(i0,i1270). +Le(i13,i1270). +Le(i26,i1270). +Le(i39,i1270). +Le(i52,i1270). +Le(i60,i1270). +Le(i65,i1270). +Le(i70,i1270). +Le(i78,i1270). +Le(i90,i1270). +Le(i91,i1270). +Le(i104,i1270). +Le(i117,i1270). +Le(i130,i1270). +Le(i143,i1270). +Le(i156,i1270). +Le(i169,i1270). +Le(i182,i1270). +Le(i195,i1270). +Le(i208,i1270). +Le(i221,i1270). +Le(i234,i1270). +Le(i247,i1270). +Le(i260,i1270). +Le(i460,i1270). +Le(i530,i1270). +Le(i600,i1270). +Le(i660,i1270). +Le(i670,i1270). +Le(i710,i1270). +Le(i740,i1270). +Le(i810,i1270). +Le(i850,i1270). +Le(i880,i1270). +Le(i890,i1270). +Le(i920,i1270). +Le(i960,i1270). +Le(i990,i1270). +Le(i1030,i1270). +Le(i1060,i1270). +Le(i1100,i1270). +Le(i1130,i1270). +Le(i1170,i1270). +Le(i1200,i1270). +Le(i1240,i1270). +Le(i1260,i1270). +Le(i-30,i1290). +Le(i0,i1290). +Le(i13,i1290). +Le(i26,i1290). +Le(i39,i1290). +Le(i52,i1290). +Le(i60,i1290). +Le(i65,i1290). +Le(i70,i1290). +Le(i78,i1290). +Le(i90,i1290). +Le(i91,i1290). +Le(i104,i1290). +Le(i117,i1290). +Le(i130,i1290). +Le(i143,i1290). +Le(i156,i1290). +Le(i169,i1290). +Le(i182,i1290). +Le(i195,i1290). +Le(i208,i1290). +Le(i221,i1290). +Le(i234,i1290). +Le(i247,i1290). +Le(i260,i1290). +Le(i460,i1290). +Le(i530,i1290). +Le(i600,i1290). +Le(i660,i1290). +Le(i670,i1290). +Le(i710,i1290). +Le(i740,i1290). +Le(i810,i1290). +Le(i850,i1290). +Le(i880,i1290). +Le(i890,i1290). +Le(i920,i1290). +Le(i960,i1290). +Le(i990,i1290). +Le(i1030,i1290). +Le(i1060,i1290). +Le(i1100,i1290). +Le(i1130,i1290). +Le(i1170,i1290). +Le(i1200,i1290). +Le(i1240,i1290). +Le(i1260,i1290). +Le(i1270,i1290). +Le(i-30,i1310). +Le(i0,i1310). +Le(i13,i1310). +Le(i26,i1310). +Le(i39,i1310). +Le(i52,i1310). +Le(i60,i1310). +Le(i65,i1310). +Le(i70,i1310). +Le(i78,i1310). +Le(i90,i1310). +Le(i91,i1310). +Le(i104,i1310). +Le(i117,i1310). +Le(i130,i1310). +Le(i143,i1310). +Le(i156,i1310). +Le(i169,i1310). +Le(i182,i1310). +Le(i195,i1310). +Le(i208,i1310). +Le(i221,i1310). +Le(i234,i1310). +Le(i247,i1310). +Le(i260,i1310). +Le(i460,i1310). +Le(i530,i1310). +Le(i600,i1310). +Le(i660,i1310). +Le(i670,i1310). +Le(i710,i1310). +Le(i740,i1310). +Le(i810,i1310). +Le(i850,i1310). +Le(i880,i1310). +Le(i890,i1310). +Le(i920,i1310). +Le(i960,i1310). +Le(i990,i1310). +Le(i1030,i1310). +Le(i1060,i1310). +Le(i1100,i1310). +Le(i1130,i1310). +Le(i1170,i1310). +Le(i1200,i1310). +Le(i1240,i1310). +Le(i1260,i1310). +Le(i1270,i1310). +Le(i1290,i1310). +Le(i-30,i1320). +Le(i0,i1320). +Le(i13,i1320). +Le(i26,i1320). +Le(i39,i1320). +Le(i52,i1320). +Le(i60,i1320). +Le(i65,i1320). +Le(i70,i1320). +Le(i78,i1320). +Le(i90,i1320). +Le(i91,i1320). +Le(i104,i1320). +Le(i117,i1320). +Le(i130,i1320). +Le(i143,i1320). +Le(i156,i1320). +Le(i169,i1320). +Le(i182,i1320). +Le(i195,i1320). +Le(i208,i1320). +Le(i221,i1320). +Le(i234,i1320). +Le(i247,i1320). +Le(i260,i1320). +Le(i460,i1320). +Le(i530,i1320). +Le(i600,i1320). +Le(i660,i1320). +Le(i670,i1320). +Le(i710,i1320). +Le(i740,i1320). +Le(i810,i1320). +Le(i850,i1320). +Le(i880,i1320). +Le(i890,i1320). +Le(i920,i1320). +Le(i960,i1320). +Le(i990,i1320). +Le(i1030,i1320). +Le(i1060,i1320). +Le(i1100,i1320). +Le(i1130,i1320). +Le(i1170,i1320). +Le(i1200,i1320). +Le(i1240,i1320). +Le(i1260,i1320). +Le(i1270,i1320). +Le(i1290,i1320). +Le(i1310,i1320). +Le(i-30,i1330). +Le(i0,i1330). +Le(i13,i1330). +Le(i26,i1330). +Le(i39,i1330). +Le(i52,i1330). +Le(i60,i1330). +Le(i65,i1330). +Le(i70,i1330). +Le(i78,i1330). +Le(i90,i1330). +Le(i91,i1330). +Le(i104,i1330). +Le(i117,i1330). +Le(i130,i1330). +Le(i143,i1330). +Le(i156,i1330). +Le(i169,i1330). +Le(i182,i1330). +Le(i195,i1330). +Le(i208,i1330). +Le(i221,i1330). +Le(i234,i1330). +Le(i247,i1330). +Le(i260,i1330). +Le(i460,i1330). +Le(i530,i1330). +Le(i600,i1330). +Le(i660,i1330). +Le(i670,i1330). +Le(i710,i1330). +Le(i740,i1330). +Le(i810,i1330). +Le(i850,i1330). +Le(i880,i1330). +Le(i890,i1330). +Le(i920,i1330). +Le(i960,i1330). +Le(i990,i1330). +Le(i1030,i1330). +Le(i1060,i1330). +Le(i1100,i1330). +Le(i1130,i1330). +Le(i1170,i1330). +Le(i1200,i1330). +Le(i1240,i1330). +Le(i1260,i1330). +Le(i1270,i1330). +Le(i1290,i1330). +Le(i1310,i1330). +Le(i1320,i1330). +Le(i-30,i1350). +Le(i0,i1350). +Le(i13,i1350). +Le(i26,i1350). +Le(i39,i1350). +Le(i52,i1350). +Le(i60,i1350). +Le(i65,i1350). +Le(i70,i1350). +Le(i78,i1350). +Le(i90,i1350). +Le(i91,i1350). +Le(i104,i1350). +Le(i117,i1350). +Le(i130,i1350). +Le(i143,i1350). +Le(i156,i1350). +Le(i169,i1350). +Le(i182,i1350). +Le(i195,i1350). +Le(i208,i1350). +Le(i221,i1350). +Le(i234,i1350). +Le(i247,i1350). +Le(i260,i1350). +Le(i460,i1350). +Le(i530,i1350). +Le(i600,i1350). +Le(i660,i1350). +Le(i670,i1350). +Le(i710,i1350). +Le(i740,i1350). +Le(i810,i1350). +Le(i850,i1350). +Le(i880,i1350). +Le(i890,i1350). +Le(i920,i1350). +Le(i960,i1350). +Le(i990,i1350). +Le(i1030,i1350). +Le(i1060,i1350). +Le(i1100,i1350). +Le(i1130,i1350). +Le(i1170,i1350). +Le(i1200,i1350). +Le(i1240,i1350). +Le(i1260,i1350). +Le(i1270,i1350). +Le(i1290,i1350). +Le(i1310,i1350). +Le(i1320,i1350). +Le(i1330,i1350). +Le(i-30,i1360). +Le(i0,i1360). +Le(i13,i1360). +Le(i26,i1360). +Le(i39,i1360). +Le(i52,i1360). +Le(i60,i1360). +Le(i65,i1360). +Le(i70,i1360). +Le(i78,i1360). +Le(i90,i1360). +Le(i91,i1360). +Le(i104,i1360). +Le(i117,i1360). +Le(i130,i1360). +Le(i143,i1360). +Le(i156,i1360). +Le(i169,i1360). +Le(i182,i1360). +Le(i195,i1360). +Le(i208,i1360). +Le(i221,i1360). +Le(i234,i1360). +Le(i247,i1360). +Le(i260,i1360). +Le(i460,i1360). +Le(i530,i1360). +Le(i600,i1360). +Le(i660,i1360). +Le(i670,i1360). +Le(i710,i1360). +Le(i740,i1360). +Le(i810,i1360). +Le(i850,i1360). +Le(i880,i1360). +Le(i890,i1360). +Le(i920,i1360). +Le(i960,i1360). +Le(i990,i1360). +Le(i1030,i1360). +Le(i1060,i1360). +Le(i1100,i1360). +Le(i1130,i1360). +Le(i1170,i1360). +Le(i1200,i1360). +Le(i1240,i1360). +Le(i1260,i1360). +Le(i1270,i1360). +Le(i1290,i1360). +Le(i1310,i1360). +Le(i1320,i1360). +Le(i1330,i1360). +Le(i1350,i1360). +Le(i-30,i1380). +Le(i0,i1380). +Le(i13,i1380). +Le(i26,i1380). +Le(i39,i1380). +Le(i52,i1380). +Le(i60,i1380). +Le(i65,i1380). +Le(i70,i1380). +Le(i78,i1380). +Le(i90,i1380). +Le(i91,i1380). +Le(i104,i1380). +Le(i117,i1380). +Le(i130,i1380). +Le(i143,i1380). +Le(i156,i1380). +Le(i169,i1380). +Le(i182,i1380). +Le(i195,i1380). +Le(i208,i1380). +Le(i221,i1380). +Le(i234,i1380). +Le(i247,i1380). +Le(i260,i1380). +Le(i460,i1380). +Le(i530,i1380). +Le(i600,i1380). +Le(i660,i1380). +Le(i670,i1380). +Le(i710,i1380). +Le(i740,i1380). +Le(i810,i1380). +Le(i850,i1380). +Le(i880,i1380). +Le(i890,i1380). +Le(i920,i1380). +Le(i960,i1380). +Le(i990,i1380). +Le(i1030,i1380). +Le(i1060,i1380). +Le(i1100,i1380). +Le(i1130,i1380). +Le(i1170,i1380). +Le(i1200,i1380). +Le(i1240,i1380). +Le(i1260,i1380). +Le(i1270,i1380). +Le(i1290,i1380). +Le(i1310,i1380). +Le(i1320,i1380). +Le(i1330,i1380). +Le(i1350,i1380). +Le(i1360,i1380). +Le(i-30,i1390). +Le(i0,i1390). +Le(i13,i1390). +Le(i26,i1390). +Le(i39,i1390). +Le(i52,i1390). +Le(i60,i1390). +Le(i65,i1390). +Le(i70,i1390). +Le(i78,i1390). +Le(i90,i1390). +Le(i91,i1390). +Le(i104,i1390). +Le(i117,i1390). +Le(i130,i1390). +Le(i143,i1390). +Le(i156,i1390). +Le(i169,i1390). +Le(i182,i1390). +Le(i195,i1390). +Le(i208,i1390). +Le(i221,i1390). +Le(i234,i1390). +Le(i247,i1390). +Le(i260,i1390). +Le(i460,i1390). +Le(i530,i1390). +Le(i600,i1390). +Le(i660,i1390). +Le(i670,i1390). +Le(i710,i1390). +Le(i740,i1390). +Le(i810,i1390). +Le(i850,i1390). +Le(i880,i1390). +Le(i890,i1390). +Le(i920,i1390). +Le(i960,i1390). +Le(i990,i1390). +Le(i1030,i1390). +Le(i1060,i1390). +Le(i1100,i1390). +Le(i1130,i1390). +Le(i1170,i1390). +Le(i1200,i1390). +Le(i1240,i1390). +Le(i1260,i1390). +Le(i1270,i1390). +Le(i1290,i1390). +Le(i1310,i1390). +Le(i1320,i1390). +Le(i1330,i1390). +Le(i1350,i1390). +Le(i1360,i1390). +Le(i1380,i1390). +Le(i-30,i1420). +Le(i0,i1420). +Le(i13,i1420). +Le(i26,i1420). +Le(i39,i1420). +Le(i52,i1420). +Le(i60,i1420). +Le(i65,i1420). +Le(i70,i1420). +Le(i78,i1420). +Le(i90,i1420). +Le(i91,i1420). +Le(i104,i1420). +Le(i117,i1420). +Le(i130,i1420). +Le(i143,i1420). +Le(i156,i1420). +Le(i169,i1420). +Le(i182,i1420). +Le(i195,i1420). +Le(i208,i1420). +Le(i221,i1420). +Le(i234,i1420). +Le(i247,i1420). +Le(i260,i1420). +Le(i460,i1420). +Le(i530,i1420). +Le(i600,i1420). +Le(i660,i1420). +Le(i670,i1420). +Le(i710,i1420). +Le(i740,i1420). +Le(i810,i1420). +Le(i850,i1420). +Le(i880,i1420). +Le(i890,i1420). +Le(i920,i1420). +Le(i960,i1420). +Le(i990,i1420). +Le(i1030,i1420). +Le(i1060,i1420). +Le(i1100,i1420). +Le(i1130,i1420). +Le(i1170,i1420). +Le(i1200,i1420). +Le(i1240,i1420). +Le(i1260,i1420). +Le(i1270,i1420). +Le(i1290,i1420). +Le(i1310,i1420). +Le(i1320,i1420). +Le(i1330,i1420). +Le(i1350,i1420). +Le(i1360,i1420). +Le(i1380,i1420). +Le(i1390,i1420). +Le(i-30,i1430). +Le(i0,i1430). +Le(i13,i1430). +Le(i26,i1430). +Le(i39,i1430). +Le(i52,i1430). +Le(i60,i1430). +Le(i65,i1430). +Le(i70,i1430). +Le(i78,i1430). +Le(i90,i1430). +Le(i91,i1430). +Le(i104,i1430). +Le(i117,i1430). +Le(i130,i1430). +Le(i143,i1430). +Le(i156,i1430). +Le(i169,i1430). +Le(i182,i1430). +Le(i195,i1430). +Le(i208,i1430). +Le(i221,i1430). +Le(i234,i1430). +Le(i247,i1430). +Le(i260,i1430). +Le(i460,i1430). +Le(i530,i1430). +Le(i600,i1430). +Le(i660,i1430). +Le(i670,i1430). +Le(i710,i1430). +Le(i740,i1430). +Le(i810,i1430). +Le(i850,i1430). +Le(i880,i1430). +Le(i890,i1430). +Le(i920,i1430). +Le(i960,i1430). +Le(i990,i1430). +Le(i1030,i1430). +Le(i1060,i1430). +Le(i1100,i1430). +Le(i1130,i1430). +Le(i1170,i1430). +Le(i1200,i1430). +Le(i1240,i1430). +Le(i1260,i1430). +Le(i1270,i1430). +Le(i1290,i1430). +Le(i1310,i1430). +Le(i1320,i1430). +Le(i1330,i1430). +Le(i1350,i1430). +Le(i1360,i1430). +Le(i1380,i1430). +Le(i1390,i1430). +Le(i1420,i1430). +Le(i-30,i1450). +Le(i0,i1450). +Le(i13,i1450). +Le(i26,i1450). +Le(i39,i1450). +Le(i52,i1450). +Le(i60,i1450). +Le(i65,i1450). +Le(i70,i1450). +Le(i78,i1450). +Le(i90,i1450). +Le(i91,i1450). +Le(i104,i1450). +Le(i117,i1450). +Le(i130,i1450). +Le(i143,i1450). +Le(i156,i1450). +Le(i169,i1450). +Le(i182,i1450). +Le(i195,i1450). +Le(i208,i1450). +Le(i221,i1450). +Le(i234,i1450). +Le(i247,i1450). +Le(i260,i1450). +Le(i460,i1450). +Le(i530,i1450). +Le(i600,i1450). +Le(i660,i1450). +Le(i670,i1450). +Le(i710,i1450). +Le(i740,i1450). +Le(i810,i1450). +Le(i850,i1450). +Le(i880,i1450). +Le(i890,i1450). +Le(i920,i1450). +Le(i960,i1450). +Le(i990,i1450). +Le(i1030,i1450). +Le(i1060,i1450). +Le(i1100,i1450). +Le(i1130,i1450). +Le(i1170,i1450). +Le(i1200,i1450). +Le(i1240,i1450). +Le(i1260,i1450). +Le(i1270,i1450). +Le(i1290,i1450). +Le(i1310,i1450). +Le(i1320,i1450). +Le(i1330,i1450). +Le(i1350,i1450). +Le(i1360,i1450). +Le(i1380,i1450). +Le(i1390,i1450). +Le(i1420,i1450). +Le(i1430,i1450). +Le(i-30,i1460). +Le(i0,i1460). +Le(i13,i1460). +Le(i26,i1460). +Le(i39,i1460). +Le(i52,i1460). +Le(i60,i1460). +Le(i65,i1460). +Le(i70,i1460). +Le(i78,i1460). +Le(i90,i1460). +Le(i91,i1460). +Le(i104,i1460). +Le(i117,i1460). +Le(i130,i1460). +Le(i143,i1460). +Le(i156,i1460). +Le(i169,i1460). +Le(i182,i1460). +Le(i195,i1460). +Le(i208,i1460). +Le(i221,i1460). +Le(i234,i1460). +Le(i247,i1460). +Le(i260,i1460). +Le(i460,i1460). +Le(i530,i1460). +Le(i600,i1460). +Le(i660,i1460). +Le(i670,i1460). +Le(i710,i1460). +Le(i740,i1460). +Le(i810,i1460). +Le(i850,i1460). +Le(i880,i1460). +Le(i890,i1460). +Le(i920,i1460). +Le(i960,i1460). +Le(i990,i1460). +Le(i1030,i1460). +Le(i1060,i1460). +Le(i1100,i1460). +Le(i1130,i1460). +Le(i1170,i1460). +Le(i1200,i1460). +Le(i1240,i1460). +Le(i1260,i1460). +Le(i1270,i1460). +Le(i1290,i1460). +Le(i1310,i1460). +Le(i1320,i1460). +Le(i1330,i1460). +Le(i1350,i1460). +Le(i1360,i1460). +Le(i1380,i1460). +Le(i1390,i1460). +Le(i1420,i1460). +Le(i1430,i1460). +Le(i1450,i1460). +Le(i-30,i1490). +Le(i0,i1490). +Le(i13,i1490). +Le(i26,i1490). +Le(i39,i1490). +Le(i52,i1490). +Le(i60,i1490). +Le(i65,i1490). +Le(i70,i1490). +Le(i78,i1490). +Le(i90,i1490). +Le(i91,i1490). +Le(i104,i1490). +Le(i117,i1490). +Le(i130,i1490). +Le(i143,i1490). +Le(i156,i1490). +Le(i169,i1490). +Le(i182,i1490). +Le(i195,i1490). +Le(i208,i1490). +Le(i221,i1490). +Le(i234,i1490). +Le(i247,i1490). +Le(i260,i1490). +Le(i460,i1490). +Le(i530,i1490). +Le(i600,i1490). +Le(i660,i1490). +Le(i670,i1490). +Le(i710,i1490). +Le(i740,i1490). +Le(i810,i1490). +Le(i850,i1490). +Le(i880,i1490). +Le(i890,i1490). +Le(i920,i1490). +Le(i960,i1490). +Le(i990,i1490). +Le(i1030,i1490). +Le(i1060,i1490). +Le(i1100,i1490). +Le(i1130,i1490). +Le(i1170,i1490). +Le(i1200,i1490). +Le(i1240,i1490). +Le(i1260,i1490). +Le(i1270,i1490). +Le(i1290,i1490). +Le(i1310,i1490). +Le(i1320,i1490). +Le(i1330,i1490). +Le(i1350,i1490). +Le(i1360,i1490). +Le(i1380,i1490). +Le(i1390,i1490). +Le(i1420,i1490). +Le(i1430,i1490). +Le(i1450,i1490). +Le(i1460,i1490). +Le(i-30,i1520). +Le(i0,i1520). +Le(i13,i1520). +Le(i26,i1520). +Le(i39,i1520). +Le(i52,i1520). +Le(i60,i1520). +Le(i65,i1520). +Le(i70,i1520). +Le(i78,i1520). +Le(i90,i1520). +Le(i91,i1520). +Le(i104,i1520). +Le(i117,i1520). +Le(i130,i1520). +Le(i143,i1520). +Le(i156,i1520). +Le(i169,i1520). +Le(i182,i1520). +Le(i195,i1520). +Le(i208,i1520). +Le(i221,i1520). +Le(i234,i1520). +Le(i247,i1520). +Le(i260,i1520). +Le(i460,i1520). +Le(i530,i1520). +Le(i600,i1520). +Le(i660,i1520). +Le(i670,i1520). +Le(i710,i1520). +Le(i740,i1520). +Le(i810,i1520). +Le(i850,i1520). +Le(i880,i1520). +Le(i890,i1520). +Le(i920,i1520). +Le(i960,i1520). +Le(i990,i1520). +Le(i1030,i1520). +Le(i1060,i1520). +Le(i1100,i1520). +Le(i1130,i1520). +Le(i1170,i1520). +Le(i1200,i1520). +Le(i1240,i1520). +Le(i1260,i1520). +Le(i1270,i1520). +Le(i1290,i1520). +Le(i1310,i1520). +Le(i1320,i1520). +Le(i1330,i1520). +Le(i1350,i1520). +Le(i1360,i1520). +Le(i1380,i1520). +Le(i1390,i1520). +Le(i1420,i1520). +Le(i1430,i1520). +Le(i1450,i1520). +Le(i1460,i1520). +Le(i1490,i1520). +Le(i-30,i1530). +Le(i0,i1530). +Le(i13,i1530). +Le(i26,i1530). +Le(i39,i1530). +Le(i52,i1530). +Le(i60,i1530). +Le(i65,i1530). +Le(i70,i1530). +Le(i78,i1530). +Le(i90,i1530). +Le(i91,i1530). +Le(i104,i1530). +Le(i117,i1530). +Le(i130,i1530). +Le(i143,i1530). +Le(i156,i1530). +Le(i169,i1530). +Le(i182,i1530). +Le(i195,i1530). +Le(i208,i1530). +Le(i221,i1530). +Le(i234,i1530). +Le(i247,i1530). +Le(i260,i1530). +Le(i460,i1530). +Le(i530,i1530). +Le(i600,i1530). +Le(i660,i1530). +Le(i670,i1530). +Le(i710,i1530). +Le(i740,i1530). +Le(i810,i1530). +Le(i850,i1530). +Le(i880,i1530). +Le(i890,i1530). +Le(i920,i1530). +Le(i960,i1530). +Le(i990,i1530). +Le(i1030,i1530). +Le(i1060,i1530). +Le(i1100,i1530). +Le(i1130,i1530). +Le(i1170,i1530). +Le(i1200,i1530). +Le(i1240,i1530). +Le(i1260,i1530). +Le(i1270,i1530). +Le(i1290,i1530). +Le(i1310,i1530). +Le(i1320,i1530). +Le(i1330,i1530). +Le(i1350,i1530). +Le(i1360,i1530). +Le(i1380,i1530). +Le(i1390,i1530). +Le(i1420,i1530). +Le(i1430,i1530). +Le(i1450,i1530). +Le(i1460,i1530). +Le(i1490,i1530). +Le(i1520,i1530). +Le(i-30,i1540). +Le(i0,i1540). +Le(i13,i1540). +Le(i26,i1540). +Le(i39,i1540). +Le(i52,i1540). +Le(i60,i1540). +Le(i65,i1540). +Le(i70,i1540). +Le(i78,i1540). +Le(i90,i1540). +Le(i91,i1540). +Le(i104,i1540). +Le(i117,i1540). +Le(i130,i1540). +Le(i143,i1540). +Le(i156,i1540). +Le(i169,i1540). +Le(i182,i1540). +Le(i195,i1540). +Le(i208,i1540). +Le(i221,i1540). +Le(i234,i1540). +Le(i247,i1540). +Le(i260,i1540). +Le(i460,i1540). +Le(i530,i1540). +Le(i600,i1540). +Le(i660,i1540). +Le(i670,i1540). +Le(i710,i1540). +Le(i740,i1540). +Le(i810,i1540). +Le(i850,i1540). +Le(i880,i1540). +Le(i890,i1540). +Le(i920,i1540). +Le(i960,i1540). +Le(i990,i1540). +Le(i1030,i1540). +Le(i1060,i1540). +Le(i1100,i1540). +Le(i1130,i1540). +Le(i1170,i1540). +Le(i1200,i1540). +Le(i1240,i1540). +Le(i1260,i1540). +Le(i1270,i1540). +Le(i1290,i1540). +Le(i1310,i1540). +Le(i1320,i1540). +Le(i1330,i1540). +Le(i1350,i1540). +Le(i1360,i1540). +Le(i1380,i1540). +Le(i1390,i1540). +Le(i1420,i1540). +Le(i1430,i1540). +Le(i1450,i1540). +Le(i1460,i1540). +Le(i1490,i1540). +Le(i1520,i1540). +Le(i1530,i1540). +Le(i-30,i1560). +Le(i0,i1560). +Le(i13,i1560). +Le(i26,i1560). +Le(i39,i1560). +Le(i52,i1560). +Le(i60,i1560). +Le(i65,i1560). +Le(i70,i1560). +Le(i78,i1560). +Le(i90,i1560). +Le(i91,i1560). +Le(i104,i1560). +Le(i117,i1560). +Le(i130,i1560). +Le(i143,i1560). +Le(i156,i1560). +Le(i169,i1560). +Le(i182,i1560). +Le(i195,i1560). +Le(i208,i1560). +Le(i221,i1560). +Le(i234,i1560). +Le(i247,i1560). +Le(i260,i1560). +Le(i460,i1560). +Le(i530,i1560). +Le(i600,i1560). +Le(i660,i1560). +Le(i670,i1560). +Le(i710,i1560). +Le(i740,i1560). +Le(i810,i1560). +Le(i850,i1560). +Le(i880,i1560). +Le(i890,i1560). +Le(i920,i1560). +Le(i960,i1560). +Le(i990,i1560). +Le(i1030,i1560). +Le(i1060,i1560). +Le(i1100,i1560). +Le(i1130,i1560). +Le(i1170,i1560). +Le(i1200,i1560). +Le(i1240,i1560). +Le(i1260,i1560). +Le(i1270,i1560). +Le(i1290,i1560). +Le(i1310,i1560). +Le(i1320,i1560). +Le(i1330,i1560). +Le(i1350,i1560). +Le(i1360,i1560). +Le(i1380,i1560). +Le(i1390,i1560). +Le(i1420,i1560). +Le(i1430,i1560). +Le(i1450,i1560). +Le(i1460,i1560). +Le(i1490,i1560). +Le(i1520,i1560). +Le(i1530,i1560). +Le(i1540,i1560). +Le(i-30,i1590). +Le(i0,i1590). +Le(i13,i1590). +Le(i26,i1590). +Le(i39,i1590). +Le(i52,i1590). +Le(i60,i1590). +Le(i65,i1590). +Le(i70,i1590). +Le(i78,i1590). +Le(i90,i1590). +Le(i91,i1590). +Le(i104,i1590). +Le(i117,i1590). +Le(i130,i1590). +Le(i143,i1590). +Le(i156,i1590). +Le(i169,i1590). +Le(i182,i1590). +Le(i195,i1590). +Le(i208,i1590). +Le(i221,i1590). +Le(i234,i1590). +Le(i247,i1590). +Le(i260,i1590). +Le(i460,i1590). +Le(i530,i1590). +Le(i600,i1590). +Le(i660,i1590). +Le(i670,i1590). +Le(i710,i1590). +Le(i740,i1590). +Le(i810,i1590). +Le(i850,i1590). +Le(i880,i1590). +Le(i890,i1590). +Le(i920,i1590). +Le(i960,i1590). +Le(i990,i1590). +Le(i1030,i1590). +Le(i1060,i1590). +Le(i1100,i1590). +Le(i1130,i1590). +Le(i1170,i1590). +Le(i1200,i1590). +Le(i1240,i1590). +Le(i1260,i1590). +Le(i1270,i1590). +Le(i1290,i1590). +Le(i1310,i1590). +Le(i1320,i1590). +Le(i1330,i1590). +Le(i1350,i1590). +Le(i1360,i1590). +Le(i1380,i1590). +Le(i1390,i1590). +Le(i1420,i1590). +Le(i1430,i1590). +Le(i1450,i1590). +Le(i1460,i1590). +Le(i1490,i1590). +Le(i1520,i1590). +Le(i1530,i1590). +Le(i1540,i1590). +Le(i1560,i1590). +Le(i-30,i1630). +Le(i0,i1630). +Le(i13,i1630). +Le(i26,i1630). +Le(i39,i1630). +Le(i52,i1630). +Le(i60,i1630). +Le(i65,i1630). +Le(i70,i1630). +Le(i78,i1630). +Le(i90,i1630). +Le(i91,i1630). +Le(i104,i1630). +Le(i117,i1630). +Le(i130,i1630). +Le(i143,i1630). +Le(i156,i1630). +Le(i169,i1630). +Le(i182,i1630). +Le(i195,i1630). +Le(i208,i1630). +Le(i221,i1630). +Le(i234,i1630). +Le(i247,i1630). +Le(i260,i1630). +Le(i460,i1630). +Le(i530,i1630). +Le(i600,i1630). +Le(i660,i1630). +Le(i670,i1630). +Le(i710,i1630). +Le(i740,i1630). +Le(i810,i1630). +Le(i850,i1630). +Le(i880,i1630). +Le(i890,i1630). +Le(i920,i1630). +Le(i960,i1630). +Le(i990,i1630). +Le(i1030,i1630). +Le(i1060,i1630). +Le(i1100,i1630). +Le(i1130,i1630). +Le(i1170,i1630). +Le(i1200,i1630). +Le(i1240,i1630). +Le(i1260,i1630). +Le(i1270,i1630). +Le(i1290,i1630). +Le(i1310,i1630). +Le(i1320,i1630). +Le(i1330,i1630). +Le(i1350,i1630). +Le(i1360,i1630). +Le(i1380,i1630). +Le(i1390,i1630). +Le(i1420,i1630). +Le(i1430,i1630). +Le(i1450,i1630). +Le(i1460,i1630). +Le(i1490,i1630). +Le(i1520,i1630). +Le(i1530,i1630). +Le(i1540,i1630). +Le(i1560,i1630). +Le(i1590,i1630). +Le(i-30,i1660). +Le(i0,i1660). +Le(i13,i1660). +Le(i26,i1660). +Le(i39,i1660). +Le(i52,i1660). +Le(i60,i1660). +Le(i65,i1660). +Le(i70,i1660). +Le(i78,i1660). +Le(i90,i1660). +Le(i91,i1660). +Le(i104,i1660). +Le(i117,i1660). +Le(i130,i1660). +Le(i143,i1660). +Le(i156,i1660). +Le(i169,i1660). +Le(i182,i1660). +Le(i195,i1660). +Le(i208,i1660). +Le(i221,i1660). +Le(i234,i1660). +Le(i247,i1660). +Le(i260,i1660). +Le(i460,i1660). +Le(i530,i1660). +Le(i600,i1660). +Le(i660,i1660). +Le(i670,i1660). +Le(i710,i1660). +Le(i740,i1660). +Le(i810,i1660). +Le(i850,i1660). +Le(i880,i1660). +Le(i890,i1660). +Le(i920,i1660). +Le(i960,i1660). +Le(i990,i1660). +Le(i1030,i1660). +Le(i1060,i1660). +Le(i1100,i1660). +Le(i1130,i1660). +Le(i1170,i1660). +Le(i1200,i1660). +Le(i1240,i1660). +Le(i1260,i1660). +Le(i1270,i1660). +Le(i1290,i1660). +Le(i1310,i1660). +Le(i1320,i1660). +Le(i1330,i1660). +Le(i1350,i1660). +Le(i1360,i1660). +Le(i1380,i1660). +Le(i1390,i1660). +Le(i1420,i1660). +Le(i1430,i1660). +Le(i1450,i1660). +Le(i1460,i1660). +Le(i1490,i1660). +Le(i1520,i1660). +Le(i1530,i1660). +Le(i1540,i1660). +Le(i1560,i1660). +Le(i1590,i1660). +Le(i1630,i1660). +Le(i-30,i1700). +Le(i0,i1700). +Le(i13,i1700). +Le(i26,i1700). +Le(i39,i1700). +Le(i52,i1700). +Le(i60,i1700). +Le(i65,i1700). +Le(i70,i1700). +Le(i78,i1700). +Le(i90,i1700). +Le(i91,i1700). +Le(i104,i1700). +Le(i117,i1700). +Le(i130,i1700). +Le(i143,i1700). +Le(i156,i1700). +Le(i169,i1700). +Le(i182,i1700). +Le(i195,i1700). +Le(i208,i1700). +Le(i221,i1700). +Le(i234,i1700). +Le(i247,i1700). +Le(i260,i1700). +Le(i460,i1700). +Le(i530,i1700). +Le(i600,i1700). +Le(i660,i1700). +Le(i670,i1700). +Le(i710,i1700). +Le(i740,i1700). +Le(i810,i1700). +Le(i850,i1700). +Le(i880,i1700). +Le(i890,i1700). +Le(i920,i1700). +Le(i960,i1700). +Le(i990,i1700). +Le(i1030,i1700). +Le(i1060,i1700). +Le(i1100,i1700). +Le(i1130,i1700). +Le(i1170,i1700). +Le(i1200,i1700). +Le(i1240,i1700). +Le(i1260,i1700). +Le(i1270,i1700). +Le(i1290,i1700). +Le(i1310,i1700). +Le(i1320,i1700). +Le(i1330,i1700). +Le(i1350,i1700). +Le(i1360,i1700). +Le(i1380,i1700). +Le(i1390,i1700). +Le(i1420,i1700). +Le(i1430,i1700). +Le(i1450,i1700). +Le(i1460,i1700). +Le(i1490,i1700). +Le(i1520,i1700). +Le(i1530,i1700). +Le(i1540,i1700). +Le(i1560,i1700). +Le(i1590,i1700). +Le(i1630,i1700). +Le(i1660,i1700). +Le(i-30,i1730). +Le(i0,i1730). +Le(i13,i1730). +Le(i26,i1730). +Le(i39,i1730). +Le(i52,i1730). +Le(i60,i1730). +Le(i65,i1730). +Le(i70,i1730). +Le(i78,i1730). +Le(i90,i1730). +Le(i91,i1730). +Le(i104,i1730). +Le(i117,i1730). +Le(i130,i1730). +Le(i143,i1730). +Le(i156,i1730). +Le(i169,i1730). +Le(i182,i1730). +Le(i195,i1730). +Le(i208,i1730). +Le(i221,i1730). +Le(i234,i1730). +Le(i247,i1730). +Le(i260,i1730). +Le(i460,i1730). +Le(i530,i1730). +Le(i600,i1730). +Le(i660,i1730). +Le(i670,i1730). +Le(i710,i1730). +Le(i740,i1730). +Le(i810,i1730). +Le(i850,i1730). +Le(i880,i1730). +Le(i890,i1730). +Le(i920,i1730). +Le(i960,i1730). +Le(i990,i1730). +Le(i1030,i1730). +Le(i1060,i1730). +Le(i1100,i1730). +Le(i1130,i1730). +Le(i1170,i1730). +Le(i1200,i1730). +Le(i1240,i1730). +Le(i1260,i1730). +Le(i1270,i1730). +Le(i1290,i1730). +Le(i1310,i1730). +Le(i1320,i1730). +Le(i1330,i1730). +Le(i1350,i1730). +Le(i1360,i1730). +Le(i1380,i1730). +Le(i1390,i1730). +Le(i1420,i1730). +Le(i1430,i1730). +Le(i1450,i1730). +Le(i1460,i1730). +Le(i1490,i1730). +Le(i1520,i1730). +Le(i1530,i1730). +Le(i1540,i1730). +Le(i1560,i1730). +Le(i1590,i1730). +Le(i1630,i1730). +Le(i1660,i1730). +Le(i1700,i1730). +Le(i-30,i1760). +Le(i0,i1760). +Le(i13,i1760). +Le(i26,i1760). +Le(i39,i1760). +Le(i52,i1760). +Le(i60,i1760). +Le(i65,i1760). +Le(i70,i1760). +Le(i78,i1760). +Le(i90,i1760). +Le(i91,i1760). +Le(i104,i1760). +Le(i117,i1760). +Le(i130,i1760). +Le(i143,i1760). +Le(i156,i1760). +Le(i169,i1760). +Le(i182,i1760). +Le(i195,i1760). +Le(i208,i1760). +Le(i221,i1760). +Le(i234,i1760). +Le(i247,i1760). +Le(i260,i1760). +Le(i460,i1760). +Le(i530,i1760). +Le(i600,i1760). +Le(i660,i1760). +Le(i670,i1760). +Le(i710,i1760). +Le(i740,i1760). +Le(i810,i1760). +Le(i850,i1760). +Le(i880,i1760). +Le(i890,i1760). +Le(i920,i1760). +Le(i960,i1760). +Le(i990,i1760). +Le(i1030,i1760). +Le(i1060,i1760). +Le(i1100,i1760). +Le(i1130,i1760). +Le(i1170,i1760). +Le(i1200,i1760). +Le(i1240,i1760). +Le(i1260,i1760). +Le(i1270,i1760). +Le(i1290,i1760). +Le(i1310,i1760). +Le(i1320,i1760). +Le(i1330,i1760). +Le(i1350,i1760). +Le(i1360,i1760). +Le(i1380,i1760). +Le(i1390,i1760). +Le(i1420,i1760). +Le(i1430,i1760). +Le(i1450,i1760). +Le(i1460,i1760). +Le(i1490,i1760). +Le(i1520,i1760). +Le(i1530,i1760). +Le(i1540,i1760). +Le(i1560,i1760). +Le(i1590,i1760). +Le(i1630,i1760). +Le(i1660,i1760). +Le(i1700,i1760). +Le(i1730,i1760). +Le(i-30,i1770). +Le(i0,i1770). +Le(i13,i1770). +Le(i26,i1770). +Le(i39,i1770). +Le(i52,i1770). +Le(i60,i1770). +Le(i65,i1770). +Le(i70,i1770). +Le(i78,i1770). +Le(i90,i1770). +Le(i91,i1770). +Le(i104,i1770). +Le(i117,i1770). +Le(i130,i1770). +Le(i143,i1770). +Le(i156,i1770). +Le(i169,i1770). +Le(i182,i1770). +Le(i195,i1770). +Le(i208,i1770). +Le(i221,i1770). +Le(i234,i1770). +Le(i247,i1770). +Le(i260,i1770). +Le(i460,i1770). +Le(i530,i1770). +Le(i600,i1770). +Le(i660,i1770). +Le(i670,i1770). +Le(i710,i1770). +Le(i740,i1770). +Le(i810,i1770). +Le(i850,i1770). +Le(i880,i1770). +Le(i890,i1770). +Le(i920,i1770). +Le(i960,i1770). +Le(i990,i1770). +Le(i1030,i1770). +Le(i1060,i1770). +Le(i1100,i1770). +Le(i1130,i1770). +Le(i1170,i1770). +Le(i1200,i1770). +Le(i1240,i1770). +Le(i1260,i1770). +Le(i1270,i1770). +Le(i1290,i1770). +Le(i1310,i1770). +Le(i1320,i1770). +Le(i1330,i1770). +Le(i1350,i1770). +Le(i1360,i1770). +Le(i1380,i1770). +Le(i1390,i1770). +Le(i1420,i1770). +Le(i1430,i1770). +Le(i1450,i1770). +Le(i1460,i1770). +Le(i1490,i1770). +Le(i1520,i1770). +Le(i1530,i1770). +Le(i1540,i1770). +Le(i1560,i1770). +Le(i1590,i1770). +Le(i1630,i1770). +Le(i1660,i1770). +Le(i1700,i1770). +Le(i1730,i1770). +Le(i1760,i1770). +Le(i-30,i1810). +Le(i0,i1810). +Le(i13,i1810). +Le(i26,i1810). +Le(i39,i1810). +Le(i52,i1810). +Le(i60,i1810). +Le(i65,i1810). +Le(i70,i1810). +Le(i78,i1810). +Le(i90,i1810). +Le(i91,i1810). +Le(i104,i1810). +Le(i117,i1810). +Le(i130,i1810). +Le(i143,i1810). +Le(i156,i1810). +Le(i169,i1810). +Le(i182,i1810). +Le(i195,i1810). +Le(i208,i1810). +Le(i221,i1810). +Le(i234,i1810). +Le(i247,i1810). +Le(i260,i1810). +Le(i460,i1810). +Le(i530,i1810). +Le(i600,i1810). +Le(i660,i1810). +Le(i670,i1810). +Le(i710,i1810). +Le(i740,i1810). +Le(i810,i1810). +Le(i850,i1810). +Le(i880,i1810). +Le(i890,i1810). +Le(i920,i1810). +Le(i960,i1810). +Le(i990,i1810). +Le(i1030,i1810). +Le(i1060,i1810). +Le(i1100,i1810). +Le(i1130,i1810). +Le(i1170,i1810). +Le(i1200,i1810). +Le(i1240,i1810). +Le(i1260,i1810). +Le(i1270,i1810). +Le(i1290,i1810). +Le(i1310,i1810). +Le(i1320,i1810). +Le(i1330,i1810). +Le(i1350,i1810). +Le(i1360,i1810). +Le(i1380,i1810). +Le(i1390,i1810). +Le(i1420,i1810). +Le(i1430,i1810). +Le(i1450,i1810). +Le(i1460,i1810). +Le(i1490,i1810). +Le(i1520,i1810). +Le(i1530,i1810). +Le(i1540,i1810). +Le(i1560,i1810). +Le(i1590,i1810). +Le(i1630,i1810). +Le(i1660,i1810). +Le(i1700,i1810). +Le(i1730,i1810). +Le(i1760,i1810). +Le(i1770,i1810). +Le(i-30,i1840). +Le(i0,i1840). +Le(i13,i1840). +Le(i26,i1840). +Le(i39,i1840). +Le(i52,i1840). +Le(i60,i1840). +Le(i65,i1840). +Le(i70,i1840). +Le(i78,i1840). +Le(i90,i1840). +Le(i91,i1840). +Le(i104,i1840). +Le(i117,i1840). +Le(i130,i1840). +Le(i143,i1840). +Le(i156,i1840). +Le(i169,i1840). +Le(i182,i1840). +Le(i195,i1840). +Le(i208,i1840). +Le(i221,i1840). +Le(i234,i1840). +Le(i247,i1840). +Le(i260,i1840). +Le(i460,i1840). +Le(i530,i1840). +Le(i600,i1840). +Le(i660,i1840). +Le(i670,i1840). +Le(i710,i1840). +Le(i740,i1840). +Le(i810,i1840). +Le(i850,i1840). +Le(i880,i1840). +Le(i890,i1840). +Le(i920,i1840). +Le(i960,i1840). +Le(i990,i1840). +Le(i1030,i1840). +Le(i1060,i1840). +Le(i1100,i1840). +Le(i1130,i1840). +Le(i1170,i1840). +Le(i1200,i1840). +Le(i1240,i1840). +Le(i1260,i1840). +Le(i1270,i1840). +Le(i1290,i1840). +Le(i1310,i1840). +Le(i1320,i1840). +Le(i1330,i1840). +Le(i1350,i1840). +Le(i1360,i1840). +Le(i1380,i1840). +Le(i1390,i1840). +Le(i1420,i1840). +Le(i1430,i1840). +Le(i1450,i1840). +Le(i1460,i1840). +Le(i1490,i1840). +Le(i1520,i1840). +Le(i1530,i1840). +Le(i1540,i1840). +Le(i1560,i1840). +Le(i1590,i1840). +Le(i1630,i1840). +Le(i1660,i1840). +Le(i1700,i1840). +Le(i1730,i1840). +Le(i1760,i1840). +Le(i1770,i1840). +Le(i1810,i1840). +Le(i-30,i1880). +Le(i0,i1880). +Le(i13,i1880). +Le(i26,i1880). +Le(i39,i1880). +Le(i52,i1880). +Le(i60,i1880). +Le(i65,i1880). +Le(i70,i1880). +Le(i78,i1880). +Le(i90,i1880). +Le(i91,i1880). +Le(i104,i1880). +Le(i117,i1880). +Le(i130,i1880). +Le(i143,i1880). +Le(i156,i1880). +Le(i169,i1880). +Le(i182,i1880). +Le(i195,i1880). +Le(i208,i1880). +Le(i221,i1880). +Le(i234,i1880). +Le(i247,i1880). +Le(i260,i1880). +Le(i460,i1880). +Le(i530,i1880). +Le(i600,i1880). +Le(i660,i1880). +Le(i670,i1880). +Le(i710,i1880). +Le(i740,i1880). +Le(i810,i1880). +Le(i850,i1880). +Le(i880,i1880). +Le(i890,i1880). +Le(i920,i1880). +Le(i960,i1880). +Le(i990,i1880). +Le(i1030,i1880). +Le(i1060,i1880). +Le(i1100,i1880). +Le(i1130,i1880). +Le(i1170,i1880). +Le(i1200,i1880). +Le(i1240,i1880). +Le(i1260,i1880). +Le(i1270,i1880). +Le(i1290,i1880). +Le(i1310,i1880). +Le(i1320,i1880). +Le(i1330,i1880). +Le(i1350,i1880). +Le(i1360,i1880). +Le(i1380,i1880). +Le(i1390,i1880). +Le(i1420,i1880). +Le(i1430,i1880). +Le(i1450,i1880). +Le(i1460,i1880). +Le(i1490,i1880). +Le(i1520,i1880). +Le(i1530,i1880). +Le(i1540,i1880). +Le(i1560,i1880). +Le(i1590,i1880). +Le(i1630,i1880). +Le(i1660,i1880). +Le(i1700,i1880). +Le(i1730,i1880). +Le(i1760,i1880). +Le(i1770,i1880). +Le(i1810,i1880). +Le(i1840,i1880). +Le(i-30,i1910). +Le(i0,i1910). +Le(i13,i1910). +Le(i26,i1910). +Le(i39,i1910). +Le(i52,i1910). +Le(i60,i1910). +Le(i65,i1910). +Le(i70,i1910). +Le(i78,i1910). +Le(i90,i1910). +Le(i91,i1910). +Le(i104,i1910). +Le(i117,i1910). +Le(i130,i1910). +Le(i143,i1910). +Le(i156,i1910). +Le(i169,i1910). +Le(i182,i1910). +Le(i195,i1910). +Le(i208,i1910). +Le(i221,i1910). +Le(i234,i1910). +Le(i247,i1910). +Le(i260,i1910). +Le(i460,i1910). +Le(i530,i1910). +Le(i600,i1910). +Le(i660,i1910). +Le(i670,i1910). +Le(i710,i1910). +Le(i740,i1910). +Le(i810,i1910). +Le(i850,i1910). +Le(i880,i1910). +Le(i890,i1910). +Le(i920,i1910). +Le(i960,i1910). +Le(i990,i1910). +Le(i1030,i1910). +Le(i1060,i1910). +Le(i1100,i1910). +Le(i1130,i1910). +Le(i1170,i1910). +Le(i1200,i1910). +Le(i1240,i1910). +Le(i1260,i1910). +Le(i1270,i1910). +Le(i1290,i1910). +Le(i1310,i1910). +Le(i1320,i1910). +Le(i1330,i1910). +Le(i1350,i1910). +Le(i1360,i1910). +Le(i1380,i1910). +Le(i1390,i1910). +Le(i1420,i1910). +Le(i1430,i1910). +Le(i1450,i1910). +Le(i1460,i1910). +Le(i1490,i1910). +Le(i1520,i1910). +Le(i1530,i1910). +Le(i1540,i1910). +Le(i1560,i1910). +Le(i1590,i1910). +Le(i1630,i1910). +Le(i1660,i1910). +Le(i1700,i1910). +Le(i1730,i1910). +Le(i1760,i1910). +Le(i1770,i1910). +Le(i1810,i1910). +Le(i1840,i1910). +Le(i1880,i1910). +Le(i-30,i1950). +Le(i0,i1950). +Le(i13,i1950). +Le(i26,i1950). +Le(i39,i1950). +Le(i52,i1950). +Le(i60,i1950). +Le(i65,i1950). +Le(i70,i1950). +Le(i78,i1950). +Le(i90,i1950). +Le(i91,i1950). +Le(i104,i1950). +Le(i117,i1950). +Le(i130,i1950). +Le(i143,i1950). +Le(i156,i1950). +Le(i169,i1950). +Le(i182,i1950). +Le(i195,i1950). +Le(i208,i1950). +Le(i221,i1950). +Le(i234,i1950). +Le(i247,i1950). +Le(i260,i1950). +Le(i460,i1950). +Le(i530,i1950). +Le(i600,i1950). +Le(i660,i1950). +Le(i670,i1950). +Le(i710,i1950). +Le(i740,i1950). +Le(i810,i1950). +Le(i850,i1950). +Le(i880,i1950). +Le(i890,i1950). +Le(i920,i1950). +Le(i960,i1950). +Le(i990,i1950). +Le(i1030,i1950). +Le(i1060,i1950). +Le(i1100,i1950). +Le(i1130,i1950). +Le(i1170,i1950). +Le(i1200,i1950). +Le(i1240,i1950). +Le(i1260,i1950). +Le(i1270,i1950). +Le(i1290,i1950). +Le(i1310,i1950). +Le(i1320,i1950). +Le(i1330,i1950). +Le(i1350,i1950). +Le(i1360,i1950). +Le(i1380,i1950). +Le(i1390,i1950). +Le(i1420,i1950). +Le(i1430,i1950). +Le(i1450,i1950). +Le(i1460,i1950). +Le(i1490,i1950). +Le(i1520,i1950). +Le(i1530,i1950). +Le(i1540,i1950). +Le(i1560,i1950). +Le(i1590,i1950). +Le(i1630,i1950). +Le(i1660,i1950). +Le(i1700,i1950). +Le(i1730,i1950). +Le(i1760,i1950). +Le(i1770,i1950). +Le(i1810,i1950). +Le(i1840,i1950). +Le(i1880,i1950). +Le(i1910,i1950). +Le(i-30,i1980). +Le(i0,i1980). +Le(i13,i1980). +Le(i26,i1980). +Le(i39,i1980). +Le(i52,i1980). +Le(i60,i1980). +Le(i65,i1980). +Le(i70,i1980). +Le(i78,i1980). +Le(i90,i1980). +Le(i91,i1980). +Le(i104,i1980). +Le(i117,i1980). +Le(i130,i1980). +Le(i143,i1980). +Le(i156,i1980). +Le(i169,i1980). +Le(i182,i1980). +Le(i195,i1980). +Le(i208,i1980). +Le(i221,i1980). +Le(i234,i1980). +Le(i247,i1980). +Le(i260,i1980). +Le(i460,i1980). +Le(i530,i1980). +Le(i600,i1980). +Le(i660,i1980). +Le(i670,i1980). +Le(i710,i1980). +Le(i740,i1980). +Le(i810,i1980). +Le(i850,i1980). +Le(i880,i1980). +Le(i890,i1980). +Le(i920,i1980). +Le(i960,i1980). +Le(i990,i1980). +Le(i1030,i1980). +Le(i1060,i1980). +Le(i1100,i1980). +Le(i1130,i1980). +Le(i1170,i1980). +Le(i1200,i1980). +Le(i1240,i1980). +Le(i1260,i1980). +Le(i1270,i1980). +Le(i1290,i1980). +Le(i1310,i1980). +Le(i1320,i1980). +Le(i1330,i1980). +Le(i1350,i1980). +Le(i1360,i1980). +Le(i1380,i1980). +Le(i1390,i1980). +Le(i1420,i1980). +Le(i1430,i1980). +Le(i1450,i1980). +Le(i1460,i1980). +Le(i1490,i1980). +Le(i1520,i1980). +Le(i1530,i1980). +Le(i1540,i1980). +Le(i1560,i1980). +Le(i1590,i1980). +Le(i1630,i1980). +Le(i1660,i1980). +Le(i1700,i1980). +Le(i1730,i1980). +Le(i1760,i1980). +Le(i1770,i1980). +Le(i1810,i1980). +Le(i1840,i1980). +Le(i1880,i1980). +Le(i1910,i1980). +Le(i1950,i1980). +Le(i-30,i2020). +Le(i0,i2020). +Le(i13,i2020). +Le(i26,i2020). +Le(i39,i2020). +Le(i52,i2020). +Le(i60,i2020). +Le(i65,i2020). +Le(i70,i2020). +Le(i78,i2020). +Le(i90,i2020). +Le(i91,i2020). +Le(i104,i2020). +Le(i117,i2020). +Le(i130,i2020). +Le(i143,i2020). +Le(i156,i2020). +Le(i169,i2020). +Le(i182,i2020). +Le(i195,i2020). +Le(i208,i2020). +Le(i221,i2020). +Le(i234,i2020). +Le(i247,i2020). +Le(i260,i2020). +Le(i460,i2020). +Le(i530,i2020). +Le(i600,i2020). +Le(i660,i2020). +Le(i670,i2020). +Le(i710,i2020). +Le(i740,i2020). +Le(i810,i2020). +Le(i850,i2020). +Le(i880,i2020). +Le(i890,i2020). +Le(i920,i2020). +Le(i960,i2020). +Le(i990,i2020). +Le(i1030,i2020). +Le(i1060,i2020). +Le(i1100,i2020). +Le(i1130,i2020). +Le(i1170,i2020). +Le(i1200,i2020). +Le(i1240,i2020). +Le(i1260,i2020). +Le(i1270,i2020). +Le(i1290,i2020). +Le(i1310,i2020). +Le(i1320,i2020). +Le(i1330,i2020). +Le(i1350,i2020). +Le(i1360,i2020). +Le(i1380,i2020). +Le(i1390,i2020). +Le(i1420,i2020). +Le(i1430,i2020). +Le(i1450,i2020). +Le(i1460,i2020). +Le(i1490,i2020). +Le(i1520,i2020). +Le(i1530,i2020). +Le(i1540,i2020). +Le(i1560,i2020). +Le(i1590,i2020). +Le(i1630,i2020). +Le(i1660,i2020). +Le(i1700,i2020). +Le(i1730,i2020). +Le(i1760,i2020). +Le(i1770,i2020). +Le(i1810,i2020). +Le(i1840,i2020). +Le(i1880,i2020). +Le(i1910,i2020). +Le(i1950,i2020). +Le(i1980,i2020). +Le(i-30,i2050). +Le(i0,i2050). +Le(i13,i2050). +Le(i26,i2050). +Le(i39,i2050). +Le(i52,i2050). +Le(i60,i2050). +Le(i65,i2050). +Le(i70,i2050). +Le(i78,i2050). +Le(i90,i2050). +Le(i91,i2050). +Le(i104,i2050). +Le(i117,i2050). +Le(i130,i2050). +Le(i143,i2050). +Le(i156,i2050). +Le(i169,i2050). +Le(i182,i2050). +Le(i195,i2050). +Le(i208,i2050). +Le(i221,i2050). +Le(i234,i2050). +Le(i247,i2050). +Le(i260,i2050). +Le(i460,i2050). +Le(i530,i2050). +Le(i600,i2050). +Le(i660,i2050). +Le(i670,i2050). +Le(i710,i2050). +Le(i740,i2050). +Le(i810,i2050). +Le(i850,i2050). +Le(i880,i2050). +Le(i890,i2050). +Le(i920,i2050). +Le(i960,i2050). +Le(i990,i2050). +Le(i1030,i2050). +Le(i1060,i2050). +Le(i1100,i2050). +Le(i1130,i2050). +Le(i1170,i2050). +Le(i1200,i2050). +Le(i1240,i2050). +Le(i1260,i2050). +Le(i1270,i2050). +Le(i1290,i2050). +Le(i1310,i2050). +Le(i1320,i2050). +Le(i1330,i2050). +Le(i1350,i2050). +Le(i1360,i2050). +Le(i1380,i2050). +Le(i1390,i2050). +Le(i1420,i2050). +Le(i1430,i2050). +Le(i1450,i2050). +Le(i1460,i2050). +Le(i1490,i2050). +Le(i1520,i2050). +Le(i1530,i2050). +Le(i1540,i2050). +Le(i1560,i2050). +Le(i1590,i2050). +Le(i1630,i2050). +Le(i1660,i2050). +Le(i1700,i2050). +Le(i1730,i2050). +Le(i1760,i2050). +Le(i1770,i2050). +Le(i1810,i2050). +Le(i1840,i2050). +Le(i1880,i2050). +Le(i1910,i2050). +Le(i1950,i2050). +Le(i1980,i2050). +Le(i2020,i2050). +Le(i-30,i2090). +Le(i0,i2090). +Le(i13,i2090). +Le(i26,i2090). +Le(i39,i2090). +Le(i52,i2090). +Le(i60,i2090). +Le(i65,i2090). +Le(i70,i2090). +Le(i78,i2090). +Le(i90,i2090). +Le(i91,i2090). +Le(i104,i2090). +Le(i117,i2090). +Le(i130,i2090). +Le(i143,i2090). +Le(i156,i2090). +Le(i169,i2090). +Le(i182,i2090). +Le(i195,i2090). +Le(i208,i2090). +Le(i221,i2090). +Le(i234,i2090). +Le(i247,i2090). +Le(i260,i2090). +Le(i460,i2090). +Le(i530,i2090). +Le(i600,i2090). +Le(i660,i2090). +Le(i670,i2090). +Le(i710,i2090). +Le(i740,i2090). +Le(i810,i2090). +Le(i850,i2090). +Le(i880,i2090). +Le(i890,i2090). +Le(i920,i2090). +Le(i960,i2090). +Le(i990,i2090). +Le(i1030,i2090). +Le(i1060,i2090). +Le(i1100,i2090). +Le(i1130,i2090). +Le(i1170,i2090). +Le(i1200,i2090). +Le(i1240,i2090). +Le(i1260,i2090). +Le(i1270,i2090). +Le(i1290,i2090). +Le(i1310,i2090). +Le(i1320,i2090). +Le(i1330,i2090). +Le(i1350,i2090). +Le(i1360,i2090). +Le(i1380,i2090). +Le(i1390,i2090). +Le(i1420,i2090). +Le(i1430,i2090). +Le(i1450,i2090). +Le(i1460,i2090). +Le(i1490,i2090). +Le(i1520,i2090). +Le(i1530,i2090). +Le(i1540,i2090). +Le(i1560,i2090). +Le(i1590,i2090). +Le(i1630,i2090). +Le(i1660,i2090). +Le(i1700,i2090). +Le(i1730,i2090). +Le(i1760,i2090). +Le(i1770,i2090). +Le(i1810,i2090). +Le(i1840,i2090). +Le(i1880,i2090). +Le(i1910,i2090). +Le(i1950,i2090). +Le(i1980,i2090). +Le(i2020,i2090). +Le(i2050,i2090). +Le(i-30,i2120). +Le(i0,i2120). +Le(i13,i2120). +Le(i26,i2120). +Le(i39,i2120). +Le(i52,i2120). +Le(i60,i2120). +Le(i65,i2120). +Le(i70,i2120). +Le(i78,i2120). +Le(i90,i2120). +Le(i91,i2120). +Le(i104,i2120). +Le(i117,i2120). +Le(i130,i2120). +Le(i143,i2120). +Le(i156,i2120). +Le(i169,i2120). +Le(i182,i2120). +Le(i195,i2120). +Le(i208,i2120). +Le(i221,i2120). +Le(i234,i2120). +Le(i247,i2120). +Le(i260,i2120). +Le(i460,i2120). +Le(i530,i2120). +Le(i600,i2120). +Le(i660,i2120). +Le(i670,i2120). +Le(i710,i2120). +Le(i740,i2120). +Le(i810,i2120). +Le(i850,i2120). +Le(i880,i2120). +Le(i890,i2120). +Le(i920,i2120). +Le(i960,i2120). +Le(i990,i2120). +Le(i1030,i2120). +Le(i1060,i2120). +Le(i1100,i2120). +Le(i1130,i2120). +Le(i1170,i2120). +Le(i1200,i2120). +Le(i1240,i2120). +Le(i1260,i2120). +Le(i1270,i2120). +Le(i1290,i2120). +Le(i1310,i2120). +Le(i1320,i2120). +Le(i1330,i2120). +Le(i1350,i2120). +Le(i1360,i2120). +Le(i1380,i2120). +Le(i1390,i2120). +Le(i1420,i2120). +Le(i1430,i2120). +Le(i1450,i2120). +Le(i1460,i2120). +Le(i1490,i2120). +Le(i1520,i2120). +Le(i1530,i2120). +Le(i1540,i2120). +Le(i1560,i2120). +Le(i1590,i2120). +Le(i1630,i2120). +Le(i1660,i2120). +Le(i1700,i2120). +Le(i1730,i2120). +Le(i1760,i2120). +Le(i1770,i2120). +Le(i1810,i2120). +Le(i1840,i2120). +Le(i1880,i2120). +Le(i1910,i2120). +Le(i1950,i2120). +Le(i1980,i2120). +Le(i2020,i2120). +Le(i2050,i2120). +Le(i2090,i2120). +Le(i-30,i2160). +Le(i0,i2160). +Le(i13,i2160). +Le(i26,i2160). +Le(i39,i2160). +Le(i52,i2160). +Le(i60,i2160). +Le(i65,i2160). +Le(i70,i2160). +Le(i78,i2160). +Le(i90,i2160). +Le(i91,i2160). +Le(i104,i2160). +Le(i117,i2160). +Le(i130,i2160). +Le(i143,i2160). +Le(i156,i2160). +Le(i169,i2160). +Le(i182,i2160). +Le(i195,i2160). +Le(i208,i2160). +Le(i221,i2160). +Le(i234,i2160). +Le(i247,i2160). +Le(i260,i2160). +Le(i460,i2160). +Le(i530,i2160). +Le(i600,i2160). +Le(i660,i2160). +Le(i670,i2160). +Le(i710,i2160). +Le(i740,i2160). +Le(i810,i2160). +Le(i850,i2160). +Le(i880,i2160). +Le(i890,i2160). +Le(i920,i2160). +Le(i960,i2160). +Le(i990,i2160). +Le(i1030,i2160). +Le(i1060,i2160). +Le(i1100,i2160). +Le(i1130,i2160). +Le(i1170,i2160). +Le(i1200,i2160). +Le(i1240,i2160). +Le(i1260,i2160). +Le(i1270,i2160). +Le(i1290,i2160). +Le(i1310,i2160). +Le(i1320,i2160). +Le(i1330,i2160). +Le(i1350,i2160). +Le(i1360,i2160). +Le(i1380,i2160). +Le(i1390,i2160). +Le(i1420,i2160). +Le(i1430,i2160). +Le(i1450,i2160). +Le(i1460,i2160). +Le(i1490,i2160). +Le(i1520,i2160). +Le(i1530,i2160). +Le(i1540,i2160). +Le(i1560,i2160). +Le(i1590,i2160). +Le(i1630,i2160). +Le(i1660,i2160). +Le(i1700,i2160). +Le(i1730,i2160). +Le(i1760,i2160). +Le(i1770,i2160). +Le(i1810,i2160). +Le(i1840,i2160). +Le(i1880,i2160). +Le(i1910,i2160). +Le(i1950,i2160). +Le(i1980,i2160). +Le(i2020,i2160). +Le(i2050,i2160). +Le(i2090,i2160). +Le(i2120,i2160). +Le(i-30,i2190). +Le(i0,i2190). +Le(i13,i2190). +Le(i26,i2190). +Le(i39,i2190). +Le(i52,i2190). +Le(i60,i2190). +Le(i65,i2190). +Le(i70,i2190). +Le(i78,i2190). +Le(i90,i2190). +Le(i91,i2190). +Le(i104,i2190). +Le(i117,i2190). +Le(i130,i2190). +Le(i143,i2190). +Le(i156,i2190). +Le(i169,i2190). +Le(i182,i2190). +Le(i195,i2190). +Le(i208,i2190). +Le(i221,i2190). +Le(i234,i2190). +Le(i247,i2190). +Le(i260,i2190). +Le(i460,i2190). +Le(i530,i2190). +Le(i600,i2190). +Le(i660,i2190). +Le(i670,i2190). +Le(i710,i2190). +Le(i740,i2190). +Le(i810,i2190). +Le(i850,i2190). +Le(i880,i2190). +Le(i890,i2190). +Le(i920,i2190). +Le(i960,i2190). +Le(i990,i2190). +Le(i1030,i2190). +Le(i1060,i2190). +Le(i1100,i2190). +Le(i1130,i2190). +Le(i1170,i2190). +Le(i1200,i2190). +Le(i1240,i2190). +Le(i1260,i2190). +Le(i1270,i2190). +Le(i1290,i2190). +Le(i1310,i2190). +Le(i1320,i2190). +Le(i1330,i2190). +Le(i1350,i2190). +Le(i1360,i2190). +Le(i1380,i2190). +Le(i1390,i2190). +Le(i1420,i2190). +Le(i1430,i2190). +Le(i1450,i2190). +Le(i1460,i2190). +Le(i1490,i2190). +Le(i1520,i2190). +Le(i1530,i2190). +Le(i1540,i2190). +Le(i1560,i2190). +Le(i1590,i2190). +Le(i1630,i2190). +Le(i1660,i2190). +Le(i1700,i2190). +Le(i1730,i2190). +Le(i1760,i2190). +Le(i1770,i2190). +Le(i1810,i2190). +Le(i1840,i2190). +Le(i1880,i2190). +Le(i1910,i2190). +Le(i1950,i2190). +Le(i1980,i2190). +Le(i2020,i2190). +Le(i2050,i2190). +Le(i2090,i2190). +Le(i2120,i2190). +Le(i2160,i2190). +Le(i-30,i2200). +Le(i0,i2200). +Le(i13,i2200). +Le(i26,i2200). +Le(i39,i2200). +Le(i52,i2200). +Le(i60,i2200). +Le(i65,i2200). +Le(i70,i2200). +Le(i78,i2200). +Le(i90,i2200). +Le(i91,i2200). +Le(i104,i2200). +Le(i117,i2200). +Le(i130,i2200). +Le(i143,i2200). +Le(i156,i2200). +Le(i169,i2200). +Le(i182,i2200). +Le(i195,i2200). +Le(i208,i2200). +Le(i221,i2200). +Le(i234,i2200). +Le(i247,i2200). +Le(i260,i2200). +Le(i460,i2200). +Le(i530,i2200). +Le(i600,i2200). +Le(i660,i2200). +Le(i670,i2200). +Le(i710,i2200). +Le(i740,i2200). +Le(i810,i2200). +Le(i850,i2200). +Le(i880,i2200). +Le(i890,i2200). +Le(i920,i2200). +Le(i960,i2200). +Le(i990,i2200). +Le(i1030,i2200). +Le(i1060,i2200). +Le(i1100,i2200). +Le(i1130,i2200). +Le(i1170,i2200). +Le(i1200,i2200). +Le(i1240,i2200). +Le(i1260,i2200). +Le(i1270,i2200). +Le(i1290,i2200). +Le(i1310,i2200). +Le(i1320,i2200). +Le(i1330,i2200). +Le(i1350,i2200). +Le(i1360,i2200). +Le(i1380,i2200). +Le(i1390,i2200). +Le(i1420,i2200). +Le(i1430,i2200). +Le(i1450,i2200). +Le(i1460,i2200). +Le(i1490,i2200). +Le(i1520,i2200). +Le(i1530,i2200). +Le(i1540,i2200). +Le(i1560,i2200). +Le(i1590,i2200). +Le(i1630,i2200). +Le(i1660,i2200). +Le(i1700,i2200). +Le(i1730,i2200). +Le(i1760,i2200). +Le(i1770,i2200). +Le(i1810,i2200). +Le(i1840,i2200). +Le(i1880,i2200). +Le(i1910,i2200). +Le(i1950,i2200). +Le(i1980,i2200). +Le(i2020,i2200). +Le(i2050,i2200). +Le(i2090,i2200). +Le(i2120,i2200). +Le(i2160,i2200). +Le(i2190,i2200). +Le(i-30,i2230). +Le(i0,i2230). +Le(i13,i2230). +Le(i26,i2230). +Le(i39,i2230). +Le(i52,i2230). +Le(i60,i2230). +Le(i65,i2230). +Le(i70,i2230). +Le(i78,i2230). +Le(i90,i2230). +Le(i91,i2230). +Le(i104,i2230). +Le(i117,i2230). +Le(i130,i2230). +Le(i143,i2230). +Le(i156,i2230). +Le(i169,i2230). +Le(i182,i2230). +Le(i195,i2230). +Le(i208,i2230). +Le(i221,i2230). +Le(i234,i2230). +Le(i247,i2230). +Le(i260,i2230). +Le(i460,i2230). +Le(i530,i2230). +Le(i600,i2230). +Le(i660,i2230). +Le(i670,i2230). +Le(i710,i2230). +Le(i740,i2230). +Le(i810,i2230). +Le(i850,i2230). +Le(i880,i2230). +Le(i890,i2230). +Le(i920,i2230). +Le(i960,i2230). +Le(i990,i2230). +Le(i1030,i2230). +Le(i1060,i2230). +Le(i1100,i2230). +Le(i1130,i2230). +Le(i1170,i2230). +Le(i1200,i2230). +Le(i1240,i2230). +Le(i1260,i2230). +Le(i1270,i2230). +Le(i1290,i2230). +Le(i1310,i2230). +Le(i1320,i2230). +Le(i1330,i2230). +Le(i1350,i2230). +Le(i1360,i2230). +Le(i1380,i2230). +Le(i1390,i2230). +Le(i1420,i2230). +Le(i1430,i2230). +Le(i1450,i2230). +Le(i1460,i2230). +Le(i1490,i2230). +Le(i1520,i2230). +Le(i1530,i2230). +Le(i1540,i2230). +Le(i1560,i2230). +Le(i1590,i2230). +Le(i1630,i2230). +Le(i1660,i2230). +Le(i1700,i2230). +Le(i1730,i2230). +Le(i1760,i2230). +Le(i1770,i2230). +Le(i1810,i2230). +Le(i1840,i2230). +Le(i1880,i2230). +Le(i1910,i2230). +Le(i1950,i2230). +Le(i1980,i2230). +Le(i2020,i2230). +Le(i2050,i2230). +Le(i2090,i2230). +Le(i2120,i2230). +Le(i2160,i2230). +Le(i2190,i2230). +Le(i2200,i2230). +Le(i-30,i2270). +Le(i0,i2270). +Le(i13,i2270). +Le(i26,i2270). +Le(i39,i2270). +Le(i52,i2270). +Le(i60,i2270). +Le(i65,i2270). +Le(i70,i2270). +Le(i78,i2270). +Le(i90,i2270). +Le(i91,i2270). +Le(i104,i2270). +Le(i117,i2270). +Le(i130,i2270). +Le(i143,i2270). +Le(i156,i2270). +Le(i169,i2270). +Le(i182,i2270). +Le(i195,i2270). +Le(i208,i2270). +Le(i221,i2270). +Le(i234,i2270). +Le(i247,i2270). +Le(i260,i2270). +Le(i460,i2270). +Le(i530,i2270). +Le(i600,i2270). +Le(i660,i2270). +Le(i670,i2270). +Le(i710,i2270). +Le(i740,i2270). +Le(i810,i2270). +Le(i850,i2270). +Le(i880,i2270). +Le(i890,i2270). +Le(i920,i2270). +Le(i960,i2270). +Le(i990,i2270). +Le(i1030,i2270). +Le(i1060,i2270). +Le(i1100,i2270). +Le(i1130,i2270). +Le(i1170,i2270). +Le(i1200,i2270). +Le(i1240,i2270). +Le(i1260,i2270). +Le(i1270,i2270). +Le(i1290,i2270). +Le(i1310,i2270). +Le(i1320,i2270). +Le(i1330,i2270). +Le(i1350,i2270). +Le(i1360,i2270). +Le(i1380,i2270). +Le(i1390,i2270). +Le(i1420,i2270). +Le(i1430,i2270). +Le(i1450,i2270). +Le(i1460,i2270). +Le(i1490,i2270). +Le(i1520,i2270). +Le(i1530,i2270). +Le(i1540,i2270). +Le(i1560,i2270). +Le(i1590,i2270). +Le(i1630,i2270). +Le(i1660,i2270). +Le(i1700,i2270). +Le(i1730,i2270). +Le(i1760,i2270). +Le(i1770,i2270). +Le(i1810,i2270). +Le(i1840,i2270). +Le(i1880,i2270). +Le(i1910,i2270). +Le(i1950,i2270). +Le(i1980,i2270). +Le(i2020,i2270). +Le(i2050,i2270). +Le(i2090,i2270). +Le(i2120,i2270). +Le(i2160,i2270). +Le(i2190,i2270). +Le(i2200,i2270). +Le(i2230,i2270). +Le(i-30,i2300). +Le(i0,i2300). +Le(i13,i2300). +Le(i26,i2300). +Le(i39,i2300). +Le(i52,i2300). +Le(i60,i2300). +Le(i65,i2300). +Le(i70,i2300). +Le(i78,i2300). +Le(i90,i2300). +Le(i91,i2300). +Le(i104,i2300). +Le(i117,i2300). +Le(i130,i2300). +Le(i143,i2300). +Le(i156,i2300). +Le(i169,i2300). +Le(i182,i2300). +Le(i195,i2300). +Le(i208,i2300). +Le(i221,i2300). +Le(i234,i2300). +Le(i247,i2300). +Le(i260,i2300). +Le(i460,i2300). +Le(i530,i2300). +Le(i600,i2300). +Le(i660,i2300). +Le(i670,i2300). +Le(i710,i2300). +Le(i740,i2300). +Le(i810,i2300). +Le(i850,i2300). +Le(i880,i2300). +Le(i890,i2300). +Le(i920,i2300). +Le(i960,i2300). +Le(i990,i2300). +Le(i1030,i2300). +Le(i1060,i2300). +Le(i1100,i2300). +Le(i1130,i2300). +Le(i1170,i2300). +Le(i1200,i2300). +Le(i1240,i2300). +Le(i1260,i2300). +Le(i1270,i2300). +Le(i1290,i2300). +Le(i1310,i2300). +Le(i1320,i2300). +Le(i1330,i2300). +Le(i1350,i2300). +Le(i1360,i2300). +Le(i1380,i2300). +Le(i1390,i2300). +Le(i1420,i2300). +Le(i1430,i2300). +Le(i1450,i2300). +Le(i1460,i2300). +Le(i1490,i2300). +Le(i1520,i2300). +Le(i1530,i2300). +Le(i1540,i2300). +Le(i1560,i2300). +Le(i1590,i2300). +Le(i1630,i2300). +Le(i1660,i2300). +Le(i1700,i2300). +Le(i1730,i2300). +Le(i1760,i2300). +Le(i1770,i2300). +Le(i1810,i2300). +Le(i1840,i2300). +Le(i1880,i2300). +Le(i1910,i2300). +Le(i1950,i2300). +Le(i1980,i2300). +Le(i2020,i2300). +Le(i2050,i2300). +Le(i2090,i2300). +Le(i2120,i2300). +Le(i2160,i2300). +Le(i2190,i2300). +Le(i2200,i2300). +Le(i2230,i2300). +Le(i2270,i2300). +Le(i-30,i2340). +Le(i0,i2340). +Le(i13,i2340). +Le(i26,i2340). +Le(i39,i2340). +Le(i52,i2340). +Le(i60,i2340). +Le(i65,i2340). +Le(i70,i2340). +Le(i78,i2340). +Le(i90,i2340). +Le(i91,i2340). +Le(i104,i2340). +Le(i117,i2340). +Le(i130,i2340). +Le(i143,i2340). +Le(i156,i2340). +Le(i169,i2340). +Le(i182,i2340). +Le(i195,i2340). +Le(i208,i2340). +Le(i221,i2340). +Le(i234,i2340). +Le(i247,i2340). +Le(i260,i2340). +Le(i460,i2340). +Le(i530,i2340). +Le(i600,i2340). +Le(i660,i2340). +Le(i670,i2340). +Le(i710,i2340). +Le(i740,i2340). +Le(i810,i2340). +Le(i850,i2340). +Le(i880,i2340). +Le(i890,i2340). +Le(i920,i2340). +Le(i960,i2340). +Le(i990,i2340). +Le(i1030,i2340). +Le(i1060,i2340). +Le(i1100,i2340). +Le(i1130,i2340). +Le(i1170,i2340). +Le(i1200,i2340). +Le(i1240,i2340). +Le(i1260,i2340). +Le(i1270,i2340). +Le(i1290,i2340). +Le(i1310,i2340). +Le(i1320,i2340). +Le(i1330,i2340). +Le(i1350,i2340). +Le(i1360,i2340). +Le(i1380,i2340). +Le(i1390,i2340). +Le(i1420,i2340). +Le(i1430,i2340). +Le(i1450,i2340). +Le(i1460,i2340). +Le(i1490,i2340). +Le(i1520,i2340). +Le(i1530,i2340). +Le(i1540,i2340). +Le(i1560,i2340). +Le(i1590,i2340). +Le(i1630,i2340). +Le(i1660,i2340). +Le(i1700,i2340). +Le(i1730,i2340). +Le(i1760,i2340). +Le(i1770,i2340). +Le(i1810,i2340). +Le(i1840,i2340). +Le(i1880,i2340). +Le(i1910,i2340). +Le(i1950,i2340). +Le(i1980,i2340). +Le(i2020,i2340). +Le(i2050,i2340). +Le(i2090,i2340). +Le(i2120,i2340). +Le(i2160,i2340). +Le(i2190,i2340). +Le(i2200,i2340). +Le(i2230,i2340). +Le(i2270,i2340). +Le(i2300,i2340). +Le(i-30,i2370). +Le(i0,i2370). +Le(i13,i2370). +Le(i26,i2370). +Le(i39,i2370). +Le(i52,i2370). +Le(i60,i2370). +Le(i65,i2370). +Le(i70,i2370). +Le(i78,i2370). +Le(i90,i2370). +Le(i91,i2370). +Le(i104,i2370). +Le(i117,i2370). +Le(i130,i2370). +Le(i143,i2370). +Le(i156,i2370). +Le(i169,i2370). +Le(i182,i2370). +Le(i195,i2370). +Le(i208,i2370). +Le(i221,i2370). +Le(i234,i2370). +Le(i247,i2370). +Le(i260,i2370). +Le(i460,i2370). +Le(i530,i2370). +Le(i600,i2370). +Le(i660,i2370). +Le(i670,i2370). +Le(i710,i2370). +Le(i740,i2370). +Le(i810,i2370). +Le(i850,i2370). +Le(i880,i2370). +Le(i890,i2370). +Le(i920,i2370). +Le(i960,i2370). +Le(i990,i2370). +Le(i1030,i2370). +Le(i1060,i2370). +Le(i1100,i2370). +Le(i1130,i2370). +Le(i1170,i2370). +Le(i1200,i2370). +Le(i1240,i2370). +Le(i1260,i2370). +Le(i1270,i2370). +Le(i1290,i2370). +Le(i1310,i2370). +Le(i1320,i2370). +Le(i1330,i2370). +Le(i1350,i2370). +Le(i1360,i2370). +Le(i1380,i2370). +Le(i1390,i2370). +Le(i1420,i2370). +Le(i1430,i2370). +Le(i1450,i2370). +Le(i1460,i2370). +Le(i1490,i2370). +Le(i1520,i2370). +Le(i1530,i2370). +Le(i1540,i2370). +Le(i1560,i2370). +Le(i1590,i2370). +Le(i1630,i2370). +Le(i1660,i2370). +Le(i1700,i2370). +Le(i1730,i2370). +Le(i1760,i2370). +Le(i1770,i2370). +Le(i1810,i2370). +Le(i1840,i2370). +Le(i1880,i2370). +Le(i1910,i2370). +Le(i1950,i2370). +Le(i1980,i2370). +Le(i2020,i2370). +Le(i2050,i2370). +Le(i2090,i2370). +Le(i2120,i2370). +Le(i2160,i2370). +Le(i2190,i2370). +Le(i2200,i2370). +Le(i2230,i2370). +Le(i2270,i2370). +Le(i2300,i2370). +Le(i2340,i2370). +Le(i-30,i2410). +Le(i0,i2410). +Le(i13,i2410). +Le(i26,i2410). +Le(i39,i2410). +Le(i52,i2410). +Le(i60,i2410). +Le(i65,i2410). +Le(i70,i2410). +Le(i78,i2410). +Le(i90,i2410). +Le(i91,i2410). +Le(i104,i2410). +Le(i117,i2410). +Le(i130,i2410). +Le(i143,i2410). +Le(i156,i2410). +Le(i169,i2410). +Le(i182,i2410). +Le(i195,i2410). +Le(i208,i2410). +Le(i221,i2410). +Le(i234,i2410). +Le(i247,i2410). +Le(i260,i2410). +Le(i460,i2410). +Le(i530,i2410). +Le(i600,i2410). +Le(i660,i2410). +Le(i670,i2410). +Le(i710,i2410). +Le(i740,i2410). +Le(i810,i2410). +Le(i850,i2410). +Le(i880,i2410). +Le(i890,i2410). +Le(i920,i2410). +Le(i960,i2410). +Le(i990,i2410). +Le(i1030,i2410). +Le(i1060,i2410). +Le(i1100,i2410). +Le(i1130,i2410). +Le(i1170,i2410). +Le(i1200,i2410). +Le(i1240,i2410). +Le(i1260,i2410). +Le(i1270,i2410). +Le(i1290,i2410). +Le(i1310,i2410). +Le(i1320,i2410). +Le(i1330,i2410). +Le(i1350,i2410). +Le(i1360,i2410). +Le(i1380,i2410). +Le(i1390,i2410). +Le(i1420,i2410). +Le(i1430,i2410). +Le(i1450,i2410). +Le(i1460,i2410). +Le(i1490,i2410). +Le(i1520,i2410). +Le(i1530,i2410). +Le(i1540,i2410). +Le(i1560,i2410). +Le(i1590,i2410). +Le(i1630,i2410). +Le(i1660,i2410). +Le(i1700,i2410). +Le(i1730,i2410). +Le(i1760,i2410). +Le(i1770,i2410). +Le(i1810,i2410). +Le(i1840,i2410). +Le(i1880,i2410). +Le(i1910,i2410). +Le(i1950,i2410). +Le(i1980,i2410). +Le(i2020,i2410). +Le(i2050,i2410). +Le(i2090,i2410). +Le(i2120,i2410). +Le(i2160,i2410). +Le(i2190,i2410). +Le(i2200,i2410). +Le(i2230,i2410). +Le(i2270,i2410). +Le(i2300,i2410). +Le(i2340,i2410). +Le(i2370,i2410). +Le(i-30,i2420). +Le(i0,i2420). +Le(i13,i2420). +Le(i26,i2420). +Le(i39,i2420). +Le(i52,i2420). +Le(i60,i2420). +Le(i65,i2420). +Le(i70,i2420). +Le(i78,i2420). +Le(i90,i2420). +Le(i91,i2420). +Le(i104,i2420). +Le(i117,i2420). +Le(i130,i2420). +Le(i143,i2420). +Le(i156,i2420). +Le(i169,i2420). +Le(i182,i2420). +Le(i195,i2420). +Le(i208,i2420). +Le(i221,i2420). +Le(i234,i2420). +Le(i247,i2420). +Le(i260,i2420). +Le(i460,i2420). +Le(i530,i2420). +Le(i600,i2420). +Le(i660,i2420). +Le(i670,i2420). +Le(i710,i2420). +Le(i740,i2420). +Le(i810,i2420). +Le(i850,i2420). +Le(i880,i2420). +Le(i890,i2420). +Le(i920,i2420). +Le(i960,i2420). +Le(i990,i2420). +Le(i1030,i2420). +Le(i1060,i2420). +Le(i1100,i2420). +Le(i1130,i2420). +Le(i1170,i2420). +Le(i1200,i2420). +Le(i1240,i2420). +Le(i1260,i2420). +Le(i1270,i2420). +Le(i1290,i2420). +Le(i1310,i2420). +Le(i1320,i2420). +Le(i1330,i2420). +Le(i1350,i2420). +Le(i1360,i2420). +Le(i1380,i2420). +Le(i1390,i2420). +Le(i1420,i2420). +Le(i1430,i2420). +Le(i1450,i2420). +Le(i1460,i2420). +Le(i1490,i2420). +Le(i1520,i2420). +Le(i1530,i2420). +Le(i1540,i2420). +Le(i1560,i2420). +Le(i1590,i2420). +Le(i1630,i2420). +Le(i1660,i2420). +Le(i1700,i2420). +Le(i1730,i2420). +Le(i1760,i2420). +Le(i1770,i2420). +Le(i1810,i2420). +Le(i1840,i2420). +Le(i1880,i2420). +Le(i1910,i2420). +Le(i1950,i2420). +Le(i1980,i2420). +Le(i2020,i2420). +Le(i2050,i2420). +Le(i2090,i2420). +Le(i2120,i2420). +Le(i2160,i2420). +Le(i2190,i2420). +Le(i2200,i2420). +Le(i2230,i2420). +Le(i2270,i2420). +Le(i2300,i2420). +Le(i2340,i2420). +Le(i2370,i2420). +Le(i2410,i2420). +Le(i-30,i2440). +Le(i0,i2440). +Le(i13,i2440). +Le(i26,i2440). +Le(i39,i2440). +Le(i52,i2440). +Le(i60,i2440). +Le(i65,i2440). +Le(i70,i2440). +Le(i78,i2440). +Le(i90,i2440). +Le(i91,i2440). +Le(i104,i2440). +Le(i117,i2440). +Le(i130,i2440). +Le(i143,i2440). +Le(i156,i2440). +Le(i169,i2440). +Le(i182,i2440). +Le(i195,i2440). +Le(i208,i2440). +Le(i221,i2440). +Le(i234,i2440). +Le(i247,i2440). +Le(i260,i2440). +Le(i460,i2440). +Le(i530,i2440). +Le(i600,i2440). +Le(i660,i2440). +Le(i670,i2440). +Le(i710,i2440). +Le(i740,i2440). +Le(i810,i2440). +Le(i850,i2440). +Le(i880,i2440). +Le(i890,i2440). +Le(i920,i2440). +Le(i960,i2440). +Le(i990,i2440). +Le(i1030,i2440). +Le(i1060,i2440). +Le(i1100,i2440). +Le(i1130,i2440). +Le(i1170,i2440). +Le(i1200,i2440). +Le(i1240,i2440). +Le(i1260,i2440). +Le(i1270,i2440). +Le(i1290,i2440). +Le(i1310,i2440). +Le(i1320,i2440). +Le(i1330,i2440). +Le(i1350,i2440). +Le(i1360,i2440). +Le(i1380,i2440). +Le(i1390,i2440). +Le(i1420,i2440). +Le(i1430,i2440). +Le(i1450,i2440). +Le(i1460,i2440). +Le(i1490,i2440). +Le(i1520,i2440). +Le(i1530,i2440). +Le(i1540,i2440). +Le(i1560,i2440). +Le(i1590,i2440). +Le(i1630,i2440). +Le(i1660,i2440). +Le(i1700,i2440). +Le(i1730,i2440). +Le(i1760,i2440). +Le(i1770,i2440). +Le(i1810,i2440). +Le(i1840,i2440). +Le(i1880,i2440). +Le(i1910,i2440). +Le(i1950,i2440). +Le(i1980,i2440). +Le(i2020,i2440). +Le(i2050,i2440). +Le(i2090,i2440). +Le(i2120,i2440). +Le(i2160,i2440). +Le(i2190,i2440). +Le(i2200,i2440). +Le(i2230,i2440). +Le(i2270,i2440). +Le(i2300,i2440). +Le(i2340,i2440). +Le(i2370,i2440). +Le(i2410,i2440). +Le(i2420,i2440). +Le(i-30,i2480). +Le(i0,i2480). +Le(i13,i2480). +Le(i26,i2480). +Le(i39,i2480). +Le(i52,i2480). +Le(i60,i2480). +Le(i65,i2480). +Le(i70,i2480). +Le(i78,i2480). +Le(i90,i2480). +Le(i91,i2480). +Le(i104,i2480). +Le(i117,i2480). +Le(i130,i2480). +Le(i143,i2480). +Le(i156,i2480). +Le(i169,i2480). +Le(i182,i2480). +Le(i195,i2480). +Le(i208,i2480). +Le(i221,i2480). +Le(i234,i2480). +Le(i247,i2480). +Le(i260,i2480). +Le(i460,i2480). +Le(i530,i2480). +Le(i600,i2480). +Le(i660,i2480). +Le(i670,i2480). +Le(i710,i2480). +Le(i740,i2480). +Le(i810,i2480). +Le(i850,i2480). +Le(i880,i2480). +Le(i890,i2480). +Le(i920,i2480). +Le(i960,i2480). +Le(i990,i2480). +Le(i1030,i2480). +Le(i1060,i2480). +Le(i1100,i2480). +Le(i1130,i2480). +Le(i1170,i2480). +Le(i1200,i2480). +Le(i1240,i2480). +Le(i1260,i2480). +Le(i1270,i2480). +Le(i1290,i2480). +Le(i1310,i2480). +Le(i1320,i2480). +Le(i1330,i2480). +Le(i1350,i2480). +Le(i1360,i2480). +Le(i1380,i2480). +Le(i1390,i2480). +Le(i1420,i2480). +Le(i1430,i2480). +Le(i1450,i2480). +Le(i1460,i2480). +Le(i1490,i2480). +Le(i1520,i2480). +Le(i1530,i2480). +Le(i1540,i2480). +Le(i1560,i2480). +Le(i1590,i2480). +Le(i1630,i2480). +Le(i1660,i2480). +Le(i1700,i2480). +Le(i1730,i2480). +Le(i1760,i2480). +Le(i1770,i2480). +Le(i1810,i2480). +Le(i1840,i2480). +Le(i1880,i2480). +Le(i1910,i2480). +Le(i1950,i2480). +Le(i1980,i2480). +Le(i2020,i2480). +Le(i2050,i2480). +Le(i2090,i2480). +Le(i2120,i2480). +Le(i2160,i2480). +Le(i2190,i2480). +Le(i2200,i2480). +Le(i2230,i2480). +Le(i2270,i2480). +Le(i2300,i2480). +Le(i2340,i2480). +Le(i2370,i2480). +Le(i2410,i2480). +Le(i2420,i2480). +Le(i2440,i2480). +Le(i-30,i2510). +Le(i0,i2510). +Le(i13,i2510). +Le(i26,i2510). +Le(i39,i2510). +Le(i52,i2510). +Le(i60,i2510). +Le(i65,i2510). +Le(i70,i2510). +Le(i78,i2510). +Le(i90,i2510). +Le(i91,i2510). +Le(i104,i2510). +Le(i117,i2510). +Le(i130,i2510). +Le(i143,i2510). +Le(i156,i2510). +Le(i169,i2510). +Le(i182,i2510). +Le(i195,i2510). +Le(i208,i2510). +Le(i221,i2510). +Le(i234,i2510). +Le(i247,i2510). +Le(i260,i2510). +Le(i460,i2510). +Le(i530,i2510). +Le(i600,i2510). +Le(i660,i2510). +Le(i670,i2510). +Le(i710,i2510). +Le(i740,i2510). +Le(i810,i2510). +Le(i850,i2510). +Le(i880,i2510). +Le(i890,i2510). +Le(i920,i2510). +Le(i960,i2510). +Le(i990,i2510). +Le(i1030,i2510). +Le(i1060,i2510). +Le(i1100,i2510). +Le(i1130,i2510). +Le(i1170,i2510). +Le(i1200,i2510). +Le(i1240,i2510). +Le(i1260,i2510). +Le(i1270,i2510). +Le(i1290,i2510). +Le(i1310,i2510). +Le(i1320,i2510). +Le(i1330,i2510). +Le(i1350,i2510). +Le(i1360,i2510). +Le(i1380,i2510). +Le(i1390,i2510). +Le(i1420,i2510). +Le(i1430,i2510). +Le(i1450,i2510). +Le(i1460,i2510). +Le(i1490,i2510). +Le(i1520,i2510). +Le(i1530,i2510). +Le(i1540,i2510). +Le(i1560,i2510). +Le(i1590,i2510). +Le(i1630,i2510). +Le(i1660,i2510). +Le(i1700,i2510). +Le(i1730,i2510). +Le(i1760,i2510). +Le(i1770,i2510). +Le(i1810,i2510). +Le(i1840,i2510). +Le(i1880,i2510). +Le(i1910,i2510). +Le(i1950,i2510). +Le(i1980,i2510). +Le(i2020,i2510). +Le(i2050,i2510). +Le(i2090,i2510). +Le(i2120,i2510). +Le(i2160,i2510). +Le(i2190,i2510). +Le(i2200,i2510). +Le(i2230,i2510). +Le(i2270,i2510). +Le(i2300,i2510). +Le(i2340,i2510). +Le(i2370,i2510). +Le(i2410,i2510). +Le(i2420,i2510). +Le(i2440,i2510). +Le(i2480,i2510). +Le(i-30,i2550). +Le(i0,i2550). +Le(i13,i2550). +Le(i26,i2550). +Le(i39,i2550). +Le(i52,i2550). +Le(i60,i2550). +Le(i65,i2550). +Le(i70,i2550). +Le(i78,i2550). +Le(i90,i2550). +Le(i91,i2550). +Le(i104,i2550). +Le(i117,i2550). +Le(i130,i2550). +Le(i143,i2550). +Le(i156,i2550). +Le(i169,i2550). +Le(i182,i2550). +Le(i195,i2550). +Le(i208,i2550). +Le(i221,i2550). +Le(i234,i2550). +Le(i247,i2550). +Le(i260,i2550). +Le(i460,i2550). +Le(i530,i2550). +Le(i600,i2550). +Le(i660,i2550). +Le(i670,i2550). +Le(i710,i2550). +Le(i740,i2550). +Le(i810,i2550). +Le(i850,i2550). +Le(i880,i2550). +Le(i890,i2550). +Le(i920,i2550). +Le(i960,i2550). +Le(i990,i2550). +Le(i1030,i2550). +Le(i1060,i2550). +Le(i1100,i2550). +Le(i1130,i2550). +Le(i1170,i2550). +Le(i1200,i2550). +Le(i1240,i2550). +Le(i1260,i2550). +Le(i1270,i2550). +Le(i1290,i2550). +Le(i1310,i2550). +Le(i1320,i2550). +Le(i1330,i2550). +Le(i1350,i2550). +Le(i1360,i2550). +Le(i1380,i2550). +Le(i1390,i2550). +Le(i1420,i2550). +Le(i1430,i2550). +Le(i1450,i2550). +Le(i1460,i2550). +Le(i1490,i2550). +Le(i1520,i2550). +Le(i1530,i2550). +Le(i1540,i2550). +Le(i1560,i2550). +Le(i1590,i2550). +Le(i1630,i2550). +Le(i1660,i2550). +Le(i1700,i2550). +Le(i1730,i2550). +Le(i1760,i2550). +Le(i1770,i2550). +Le(i1810,i2550). +Le(i1840,i2550). +Le(i1880,i2550). +Le(i1910,i2550). +Le(i1950,i2550). +Le(i1980,i2550). +Le(i2020,i2550). +Le(i2050,i2550). +Le(i2090,i2550). +Le(i2120,i2550). +Le(i2160,i2550). +Le(i2190,i2550). +Le(i2200,i2550). +Le(i2230,i2550). +Le(i2270,i2550). +Le(i2300,i2550). +Le(i2340,i2550). +Le(i2370,i2550). +Le(i2410,i2550). +Le(i2420,i2550). +Le(i2440,i2550). +Le(i2480,i2550). +Le(i2510,i2550). +Le(i-30,i2580). +Le(i0,i2580). +Le(i13,i2580). +Le(i26,i2580). +Le(i39,i2580). +Le(i52,i2580). +Le(i60,i2580). +Le(i65,i2580). +Le(i70,i2580). +Le(i78,i2580). +Le(i90,i2580). +Le(i91,i2580). +Le(i104,i2580). +Le(i117,i2580). +Le(i130,i2580). +Le(i143,i2580). +Le(i156,i2580). +Le(i169,i2580). +Le(i182,i2580). +Le(i195,i2580). +Le(i208,i2580). +Le(i221,i2580). +Le(i234,i2580). +Le(i247,i2580). +Le(i260,i2580). +Le(i460,i2580). +Le(i530,i2580). +Le(i600,i2580). +Le(i660,i2580). +Le(i670,i2580). +Le(i710,i2580). +Le(i740,i2580). +Le(i810,i2580). +Le(i850,i2580). +Le(i880,i2580). +Le(i890,i2580). +Le(i920,i2580). +Le(i960,i2580). +Le(i990,i2580). +Le(i1030,i2580). +Le(i1060,i2580). +Le(i1100,i2580). +Le(i1130,i2580). +Le(i1170,i2580). +Le(i1200,i2580). +Le(i1240,i2580). +Le(i1260,i2580). +Le(i1270,i2580). +Le(i1290,i2580). +Le(i1310,i2580). +Le(i1320,i2580). +Le(i1330,i2580). +Le(i1350,i2580). +Le(i1360,i2580). +Le(i1380,i2580). +Le(i1390,i2580). +Le(i1420,i2580). +Le(i1430,i2580). +Le(i1450,i2580). +Le(i1460,i2580). +Le(i1490,i2580). +Le(i1520,i2580). +Le(i1530,i2580). +Le(i1540,i2580). +Le(i1560,i2580). +Le(i1590,i2580). +Le(i1630,i2580). +Le(i1660,i2580). +Le(i1700,i2580). +Le(i1730,i2580). +Le(i1760,i2580). +Le(i1770,i2580). +Le(i1810,i2580). +Le(i1840,i2580). +Le(i1880,i2580). +Le(i1910,i2580). +Le(i1950,i2580). +Le(i1980,i2580). +Le(i2020,i2580). +Le(i2050,i2580). +Le(i2090,i2580). +Le(i2120,i2580). +Le(i2160,i2580). +Le(i2190,i2580). +Le(i2200,i2580). +Le(i2230,i2580). +Le(i2270,i2580). +Le(i2300,i2580). +Le(i2340,i2580). +Le(i2370,i2580). +Le(i2410,i2580). +Le(i2420,i2580). +Le(i2440,i2580). +Le(i2480,i2580). +Le(i2510,i2580). +Le(i2550,i2580). +Le(i-30,i2620). +Le(i0,i2620). +Le(i13,i2620). +Le(i26,i2620). +Le(i39,i2620). +Le(i52,i2620). +Le(i60,i2620). +Le(i65,i2620). +Le(i70,i2620). +Le(i78,i2620). +Le(i90,i2620). +Le(i91,i2620). +Le(i104,i2620). +Le(i117,i2620). +Le(i130,i2620). +Le(i143,i2620). +Le(i156,i2620). +Le(i169,i2620). +Le(i182,i2620). +Le(i195,i2620). +Le(i208,i2620). +Le(i221,i2620). +Le(i234,i2620). +Le(i247,i2620). +Le(i260,i2620). +Le(i460,i2620). +Le(i530,i2620). +Le(i600,i2620). +Le(i660,i2620). +Le(i670,i2620). +Le(i710,i2620). +Le(i740,i2620). +Le(i810,i2620). +Le(i850,i2620). +Le(i880,i2620). +Le(i890,i2620). +Le(i920,i2620). +Le(i960,i2620). +Le(i990,i2620). +Le(i1030,i2620). +Le(i1060,i2620). +Le(i1100,i2620). +Le(i1130,i2620). +Le(i1170,i2620). +Le(i1200,i2620). +Le(i1240,i2620). +Le(i1260,i2620). +Le(i1270,i2620). +Le(i1290,i2620). +Le(i1310,i2620). +Le(i1320,i2620). +Le(i1330,i2620). +Le(i1350,i2620). +Le(i1360,i2620). +Le(i1380,i2620). +Le(i1390,i2620). +Le(i1420,i2620). +Le(i1430,i2620). +Le(i1450,i2620). +Le(i1460,i2620). +Le(i1490,i2620). +Le(i1520,i2620). +Le(i1530,i2620). +Le(i1540,i2620). +Le(i1560,i2620). +Le(i1590,i2620). +Le(i1630,i2620). +Le(i1660,i2620). +Le(i1700,i2620). +Le(i1730,i2620). +Le(i1760,i2620). +Le(i1770,i2620). +Le(i1810,i2620). +Le(i1840,i2620). +Le(i1880,i2620). +Le(i1910,i2620). +Le(i1950,i2620). +Le(i1980,i2620). +Le(i2020,i2620). +Le(i2050,i2620). +Le(i2090,i2620). +Le(i2120,i2620). +Le(i2160,i2620). +Le(i2190,i2620). +Le(i2200,i2620). +Le(i2230,i2620). +Le(i2270,i2620). +Le(i2300,i2620). +Le(i2340,i2620). +Le(i2370,i2620). +Le(i2410,i2620). +Le(i2420,i2620). +Le(i2440,i2620). +Le(i2480,i2620). +Le(i2510,i2620). +Le(i2550,i2620). +Le(i2580,i2620). +Le(i-30,i2640). +Le(i0,i2640). +Le(i13,i2640). +Le(i26,i2640). +Le(i39,i2640). +Le(i52,i2640). +Le(i60,i2640). +Le(i65,i2640). +Le(i70,i2640). +Le(i78,i2640). +Le(i90,i2640). +Le(i91,i2640). +Le(i104,i2640). +Le(i117,i2640). +Le(i130,i2640). +Le(i143,i2640). +Le(i156,i2640). +Le(i169,i2640). +Le(i182,i2640). +Le(i195,i2640). +Le(i208,i2640). +Le(i221,i2640). +Le(i234,i2640). +Le(i247,i2640). +Le(i260,i2640). +Le(i460,i2640). +Le(i530,i2640). +Le(i600,i2640). +Le(i660,i2640). +Le(i670,i2640). +Le(i710,i2640). +Le(i740,i2640). +Le(i810,i2640). +Le(i850,i2640). +Le(i880,i2640). +Le(i890,i2640). +Le(i920,i2640). +Le(i960,i2640). +Le(i990,i2640). +Le(i1030,i2640). +Le(i1060,i2640). +Le(i1100,i2640). +Le(i1130,i2640). +Le(i1170,i2640). +Le(i1200,i2640). +Le(i1240,i2640). +Le(i1260,i2640). +Le(i1270,i2640). +Le(i1290,i2640). +Le(i1310,i2640). +Le(i1320,i2640). +Le(i1330,i2640). +Le(i1350,i2640). +Le(i1360,i2640). +Le(i1380,i2640). +Le(i1390,i2640). +Le(i1420,i2640). +Le(i1430,i2640). +Le(i1450,i2640). +Le(i1460,i2640). +Le(i1490,i2640). +Le(i1520,i2640). +Le(i1530,i2640). +Le(i1540,i2640). +Le(i1560,i2640). +Le(i1590,i2640). +Le(i1630,i2640). +Le(i1660,i2640). +Le(i1700,i2640). +Le(i1730,i2640). +Le(i1760,i2640). +Le(i1770,i2640). +Le(i1810,i2640). +Le(i1840,i2640). +Le(i1880,i2640). +Le(i1910,i2640). +Le(i1950,i2640). +Le(i1980,i2640). +Le(i2020,i2640). +Le(i2050,i2640). +Le(i2090,i2640). +Le(i2120,i2640). +Le(i2160,i2640). +Le(i2190,i2640). +Le(i2200,i2640). +Le(i2230,i2640). +Le(i2270,i2640). +Le(i2300,i2640). +Le(i2340,i2640). +Le(i2370,i2640). +Le(i2410,i2640). +Le(i2420,i2640). +Le(i2440,i2640). +Le(i2480,i2640). +Le(i2510,i2640). +Le(i2550,i2640). +Le(i2580,i2640). +Le(i2620,i2640). +Le(i-30,i2660). +Le(i0,i2660). +Le(i13,i2660). +Le(i26,i2660). +Le(i39,i2660). +Le(i52,i2660). +Le(i60,i2660). +Le(i65,i2660). +Le(i70,i2660). +Le(i78,i2660). +Le(i90,i2660). +Le(i91,i2660). +Le(i104,i2660). +Le(i117,i2660). +Le(i130,i2660). +Le(i143,i2660). +Le(i156,i2660). +Le(i169,i2660). +Le(i182,i2660). +Le(i195,i2660). +Le(i208,i2660). +Le(i221,i2660). +Le(i234,i2660). +Le(i247,i2660). +Le(i260,i2660). +Le(i460,i2660). +Le(i530,i2660). +Le(i600,i2660). +Le(i660,i2660). +Le(i670,i2660). +Le(i710,i2660). +Le(i740,i2660). +Le(i810,i2660). +Le(i850,i2660). +Le(i880,i2660). +Le(i890,i2660). +Le(i920,i2660). +Le(i960,i2660). +Le(i990,i2660). +Le(i1030,i2660). +Le(i1060,i2660). +Le(i1100,i2660). +Le(i1130,i2660). +Le(i1170,i2660). +Le(i1200,i2660). +Le(i1240,i2660). +Le(i1260,i2660). +Le(i1270,i2660). +Le(i1290,i2660). +Le(i1310,i2660). +Le(i1320,i2660). +Le(i1330,i2660). +Le(i1350,i2660). +Le(i1360,i2660). +Le(i1380,i2660). +Le(i1390,i2660). +Le(i1420,i2660). +Le(i1430,i2660). +Le(i1450,i2660). +Le(i1460,i2660). +Le(i1490,i2660). +Le(i1520,i2660). +Le(i1530,i2660). +Le(i1540,i2660). +Le(i1560,i2660). +Le(i1590,i2660). +Le(i1630,i2660). +Le(i1660,i2660). +Le(i1700,i2660). +Le(i1730,i2660). +Le(i1760,i2660). +Le(i1770,i2660). +Le(i1810,i2660). +Le(i1840,i2660). +Le(i1880,i2660). +Le(i1910,i2660). +Le(i1950,i2660). +Le(i1980,i2660). +Le(i2020,i2660). +Le(i2050,i2660). +Le(i2090,i2660). +Le(i2120,i2660). +Le(i2160,i2660). +Le(i2190,i2660). +Le(i2200,i2660). +Le(i2230,i2660). +Le(i2270,i2660). +Le(i2300,i2660). +Le(i2340,i2660). +Le(i2370,i2660). +Le(i2410,i2660). +Le(i2420,i2660). +Le(i2440,i2660). +Le(i2480,i2660). +Le(i2510,i2660). +Le(i2550,i2660). +Le(i2580,i2660). +Le(i2620,i2660). +Le(i2640,i2660). +Le(i-30,i2730). +Le(i0,i2730). +Le(i13,i2730). +Le(i26,i2730). +Le(i39,i2730). +Le(i52,i2730). +Le(i60,i2730). +Le(i65,i2730). +Le(i70,i2730). +Le(i78,i2730). +Le(i90,i2730). +Le(i91,i2730). +Le(i104,i2730). +Le(i117,i2730). +Le(i130,i2730). +Le(i143,i2730). +Le(i156,i2730). +Le(i169,i2730). +Le(i182,i2730). +Le(i195,i2730). +Le(i208,i2730). +Le(i221,i2730). +Le(i234,i2730). +Le(i247,i2730). +Le(i260,i2730). +Le(i460,i2730). +Le(i530,i2730). +Le(i600,i2730). +Le(i660,i2730). +Le(i670,i2730). +Le(i710,i2730). +Le(i740,i2730). +Le(i810,i2730). +Le(i850,i2730). +Le(i880,i2730). +Le(i890,i2730). +Le(i920,i2730). +Le(i960,i2730). +Le(i990,i2730). +Le(i1030,i2730). +Le(i1060,i2730). +Le(i1100,i2730). +Le(i1130,i2730). +Le(i1170,i2730). +Le(i1200,i2730). +Le(i1240,i2730). +Le(i1260,i2730). +Le(i1270,i2730). +Le(i1290,i2730). +Le(i1310,i2730). +Le(i1320,i2730). +Le(i1330,i2730). +Le(i1350,i2730). +Le(i1360,i2730). +Le(i1380,i2730). +Le(i1390,i2730). +Le(i1420,i2730). +Le(i1430,i2730). +Le(i1450,i2730). +Le(i1460,i2730). +Le(i1490,i2730). +Le(i1520,i2730). +Le(i1530,i2730). +Le(i1540,i2730). +Le(i1560,i2730). +Le(i1590,i2730). +Le(i1630,i2730). +Le(i1660,i2730). +Le(i1700,i2730). +Le(i1730,i2730). +Le(i1760,i2730). +Le(i1770,i2730). +Le(i1810,i2730). +Le(i1840,i2730). +Le(i1880,i2730). +Le(i1910,i2730). +Le(i1950,i2730). +Le(i1980,i2730). +Le(i2020,i2730). +Le(i2050,i2730). +Le(i2090,i2730). +Le(i2120,i2730). +Le(i2160,i2730). +Le(i2190,i2730). +Le(i2200,i2730). +Le(i2230,i2730). +Le(i2270,i2730). +Le(i2300,i2730). +Le(i2340,i2730). +Le(i2370,i2730). +Le(i2410,i2730). +Le(i2420,i2730). +Le(i2440,i2730). +Le(i2480,i2730). +Le(i2510,i2730). +Le(i2550,i2730). +Le(i2580,i2730). +Le(i2620,i2730). +Le(i2640,i2730). +Le(i2660,i2730). +Le(i-30,i2760). +Le(i0,i2760). +Le(i13,i2760). +Le(i26,i2760). +Le(i39,i2760). +Le(i52,i2760). +Le(i60,i2760). +Le(i65,i2760). +Le(i70,i2760). +Le(i78,i2760). +Le(i90,i2760). +Le(i91,i2760). +Le(i104,i2760). +Le(i117,i2760). +Le(i130,i2760). +Le(i143,i2760). +Le(i156,i2760). +Le(i169,i2760). +Le(i182,i2760). +Le(i195,i2760). +Le(i208,i2760). +Le(i221,i2760). +Le(i234,i2760). +Le(i247,i2760). +Le(i260,i2760). +Le(i460,i2760). +Le(i530,i2760). +Le(i600,i2760). +Le(i660,i2760). +Le(i670,i2760). +Le(i710,i2760). +Le(i740,i2760). +Le(i810,i2760). +Le(i850,i2760). +Le(i880,i2760). +Le(i890,i2760). +Le(i920,i2760). +Le(i960,i2760). +Le(i990,i2760). +Le(i1030,i2760). +Le(i1060,i2760). +Le(i1100,i2760). +Le(i1130,i2760). +Le(i1170,i2760). +Le(i1200,i2760). +Le(i1240,i2760). +Le(i1260,i2760). +Le(i1270,i2760). +Le(i1290,i2760). +Le(i1310,i2760). +Le(i1320,i2760). +Le(i1330,i2760). +Le(i1350,i2760). +Le(i1360,i2760). +Le(i1380,i2760). +Le(i1390,i2760). +Le(i1420,i2760). +Le(i1430,i2760). +Le(i1450,i2760). +Le(i1460,i2760). +Le(i1490,i2760). +Le(i1520,i2760). +Le(i1530,i2760). +Le(i1540,i2760). +Le(i1560,i2760). +Le(i1590,i2760). +Le(i1630,i2760). +Le(i1660,i2760). +Le(i1700,i2760). +Le(i1730,i2760). +Le(i1760,i2760). +Le(i1770,i2760). +Le(i1810,i2760). +Le(i1840,i2760). +Le(i1880,i2760). +Le(i1910,i2760). +Le(i1950,i2760). +Le(i1980,i2760). +Le(i2020,i2760). +Le(i2050,i2760). +Le(i2090,i2760). +Le(i2120,i2760). +Le(i2160,i2760). +Le(i2190,i2760). +Le(i2200,i2760). +Le(i2230,i2760). +Le(i2270,i2760). +Le(i2300,i2760). +Le(i2340,i2760). +Le(i2370,i2760). +Le(i2410,i2760). +Le(i2420,i2760). +Le(i2440,i2760). +Le(i2480,i2760). +Le(i2510,i2760). +Le(i2550,i2760). +Le(i2580,i2760). +Le(i2620,i2760). +Le(i2640,i2760). +Le(i2660,i2760). +Le(i2730,i2760). +Le(i-30,i2800). +Le(i0,i2800). +Le(i13,i2800). +Le(i26,i2800). +Le(i39,i2800). +Le(i52,i2800). +Le(i60,i2800). +Le(i65,i2800). +Le(i70,i2800). +Le(i78,i2800). +Le(i90,i2800). +Le(i91,i2800). +Le(i104,i2800). +Le(i117,i2800). +Le(i130,i2800). +Le(i143,i2800). +Le(i156,i2800). +Le(i169,i2800). +Le(i182,i2800). +Le(i195,i2800). +Le(i208,i2800). +Le(i221,i2800). +Le(i234,i2800). +Le(i247,i2800). +Le(i260,i2800). +Le(i460,i2800). +Le(i530,i2800). +Le(i600,i2800). +Le(i660,i2800). +Le(i670,i2800). +Le(i710,i2800). +Le(i740,i2800). +Le(i810,i2800). +Le(i850,i2800). +Le(i880,i2800). +Le(i890,i2800). +Le(i920,i2800). +Le(i960,i2800). +Le(i990,i2800). +Le(i1030,i2800). +Le(i1060,i2800). +Le(i1100,i2800). +Le(i1130,i2800). +Le(i1170,i2800). +Le(i1200,i2800). +Le(i1240,i2800). +Le(i1260,i2800). +Le(i1270,i2800). +Le(i1290,i2800). +Le(i1310,i2800). +Le(i1320,i2800). +Le(i1330,i2800). +Le(i1350,i2800). +Le(i1360,i2800). +Le(i1380,i2800). +Le(i1390,i2800). +Le(i1420,i2800). +Le(i1430,i2800). +Le(i1450,i2800). +Le(i1460,i2800). +Le(i1490,i2800). +Le(i1520,i2800). +Le(i1530,i2800). +Le(i1540,i2800). +Le(i1560,i2800). +Le(i1590,i2800). +Le(i1630,i2800). +Le(i1660,i2800). +Le(i1700,i2800). +Le(i1730,i2800). +Le(i1760,i2800). +Le(i1770,i2800). +Le(i1810,i2800). +Le(i1840,i2800). +Le(i1880,i2800). +Le(i1910,i2800). +Le(i1950,i2800). +Le(i1980,i2800). +Le(i2020,i2800). +Le(i2050,i2800). +Le(i2090,i2800). +Le(i2120,i2800). +Le(i2160,i2800). +Le(i2190,i2800). +Le(i2200,i2800). +Le(i2230,i2800). +Le(i2270,i2800). +Le(i2300,i2800). +Le(i2340,i2800). +Le(i2370,i2800). +Le(i2410,i2800). +Le(i2420,i2800). +Le(i2440,i2800). +Le(i2480,i2800). +Le(i2510,i2800). +Le(i2550,i2800). +Le(i2580,i2800). +Le(i2620,i2800). +Le(i2640,i2800). +Le(i2660,i2800). +Le(i2730,i2800). +Le(i2760,i2800). +Le(i-30,i2830). +Le(i0,i2830). +Le(i13,i2830). +Le(i26,i2830). +Le(i39,i2830). +Le(i52,i2830). +Le(i60,i2830). +Le(i65,i2830). +Le(i70,i2830). +Le(i78,i2830). +Le(i90,i2830). +Le(i91,i2830). +Le(i104,i2830). +Le(i117,i2830). +Le(i130,i2830). +Le(i143,i2830). +Le(i156,i2830). +Le(i169,i2830). +Le(i182,i2830). +Le(i195,i2830). +Le(i208,i2830). +Le(i221,i2830). +Le(i234,i2830). +Le(i247,i2830). +Le(i260,i2830). +Le(i460,i2830). +Le(i530,i2830). +Le(i600,i2830). +Le(i660,i2830). +Le(i670,i2830). +Le(i710,i2830). +Le(i740,i2830). +Le(i810,i2830). +Le(i850,i2830). +Le(i880,i2830). +Le(i890,i2830). +Le(i920,i2830). +Le(i960,i2830). +Le(i990,i2830). +Le(i1030,i2830). +Le(i1060,i2830). +Le(i1100,i2830). +Le(i1130,i2830). +Le(i1170,i2830). +Le(i1200,i2830). +Le(i1240,i2830). +Le(i1260,i2830). +Le(i1270,i2830). +Le(i1290,i2830). +Le(i1310,i2830). +Le(i1320,i2830). +Le(i1330,i2830). +Le(i1350,i2830). +Le(i1360,i2830). +Le(i1380,i2830). +Le(i1390,i2830). +Le(i1420,i2830). +Le(i1430,i2830). +Le(i1450,i2830). +Le(i1460,i2830). +Le(i1490,i2830). +Le(i1520,i2830). +Le(i1530,i2830). +Le(i1540,i2830). +Le(i1560,i2830). +Le(i1590,i2830). +Le(i1630,i2830). +Le(i1660,i2830). +Le(i1700,i2830). +Le(i1730,i2830). +Le(i1760,i2830). +Le(i1770,i2830). +Le(i1810,i2830). +Le(i1840,i2830). +Le(i1880,i2830). +Le(i1910,i2830). +Le(i1950,i2830). +Le(i1980,i2830). +Le(i2020,i2830). +Le(i2050,i2830). +Le(i2090,i2830). +Le(i2120,i2830). +Le(i2160,i2830). +Le(i2190,i2830). +Le(i2200,i2830). +Le(i2230,i2830). +Le(i2270,i2830). +Le(i2300,i2830). +Le(i2340,i2830). +Le(i2370,i2830). +Le(i2410,i2830). +Le(i2420,i2830). +Le(i2440,i2830). +Le(i2480,i2830). +Le(i2510,i2830). +Le(i2550,i2830). +Le(i2580,i2830). +Le(i2620,i2830). +Le(i2640,i2830). +Le(i2660,i2830). +Le(i2730,i2830). +Le(i2760,i2830). +Le(i2800,i2830). +Le(i-30,i2860). +Le(i0,i2860). +Le(i13,i2860). +Le(i26,i2860). +Le(i39,i2860). +Le(i52,i2860). +Le(i60,i2860). +Le(i65,i2860). +Le(i70,i2860). +Le(i78,i2860). +Le(i90,i2860). +Le(i91,i2860). +Le(i104,i2860). +Le(i117,i2860). +Le(i130,i2860). +Le(i143,i2860). +Le(i156,i2860). +Le(i169,i2860). +Le(i182,i2860). +Le(i195,i2860). +Le(i208,i2860). +Le(i221,i2860). +Le(i234,i2860). +Le(i247,i2860). +Le(i260,i2860). +Le(i460,i2860). +Le(i530,i2860). +Le(i600,i2860). +Le(i660,i2860). +Le(i670,i2860). +Le(i710,i2860). +Le(i740,i2860). +Le(i810,i2860). +Le(i850,i2860). +Le(i880,i2860). +Le(i890,i2860). +Le(i920,i2860). +Le(i960,i2860). +Le(i990,i2860). +Le(i1030,i2860). +Le(i1060,i2860). +Le(i1100,i2860). +Le(i1130,i2860). +Le(i1170,i2860). +Le(i1200,i2860). +Le(i1240,i2860). +Le(i1260,i2860). +Le(i1270,i2860). +Le(i1290,i2860). +Le(i1310,i2860). +Le(i1320,i2860). +Le(i1330,i2860). +Le(i1350,i2860). +Le(i1360,i2860). +Le(i1380,i2860). +Le(i1390,i2860). +Le(i1420,i2860). +Le(i1430,i2860). +Le(i1450,i2860). +Le(i1460,i2860). +Le(i1490,i2860). +Le(i1520,i2860). +Le(i1530,i2860). +Le(i1540,i2860). +Le(i1560,i2860). +Le(i1590,i2860). +Le(i1630,i2860). +Le(i1660,i2860). +Le(i1700,i2860). +Le(i1730,i2860). +Le(i1760,i2860). +Le(i1770,i2860). +Le(i1810,i2860). +Le(i1840,i2860). +Le(i1880,i2860). +Le(i1910,i2860). +Le(i1950,i2860). +Le(i1980,i2860). +Le(i2020,i2860). +Le(i2050,i2860). +Le(i2090,i2860). +Le(i2120,i2860). +Le(i2160,i2860). +Le(i2190,i2860). +Le(i2200,i2860). +Le(i2230,i2860). +Le(i2270,i2860). +Le(i2300,i2860). +Le(i2340,i2860). +Le(i2370,i2860). +Le(i2410,i2860). +Le(i2420,i2860). +Le(i2440,i2860). +Le(i2480,i2860). +Le(i2510,i2860). +Le(i2550,i2860). +Le(i2580,i2860). +Le(i2620,i2860). +Le(i2640,i2860). +Le(i2660,i2860). +Le(i2730,i2860). +Le(i2760,i2860). +Le(i2800,i2860). +Le(i2830,i2860). +Le(i-30,i2870). +Le(i0,i2870). +Le(i13,i2870). +Le(i26,i2870). +Le(i39,i2870). +Le(i52,i2870). +Le(i60,i2870). +Le(i65,i2870). +Le(i70,i2870). +Le(i78,i2870). +Le(i90,i2870). +Le(i91,i2870). +Le(i104,i2870). +Le(i117,i2870). +Le(i130,i2870). +Le(i143,i2870). +Le(i156,i2870). +Le(i169,i2870). +Le(i182,i2870). +Le(i195,i2870). +Le(i208,i2870). +Le(i221,i2870). +Le(i234,i2870). +Le(i247,i2870). +Le(i260,i2870). +Le(i460,i2870). +Le(i530,i2870). +Le(i600,i2870). +Le(i660,i2870). +Le(i670,i2870). +Le(i710,i2870). +Le(i740,i2870). +Le(i810,i2870). +Le(i850,i2870). +Le(i880,i2870). +Le(i890,i2870). +Le(i920,i2870). +Le(i960,i2870). +Le(i990,i2870). +Le(i1030,i2870). +Le(i1060,i2870). +Le(i1100,i2870). +Le(i1130,i2870). +Le(i1170,i2870). +Le(i1200,i2870). +Le(i1240,i2870). +Le(i1260,i2870). +Le(i1270,i2870). +Le(i1290,i2870). +Le(i1310,i2870). +Le(i1320,i2870). +Le(i1330,i2870). +Le(i1350,i2870). +Le(i1360,i2870). +Le(i1380,i2870). +Le(i1390,i2870). +Le(i1420,i2870). +Le(i1430,i2870). +Le(i1450,i2870). +Le(i1460,i2870). +Le(i1490,i2870). +Le(i1520,i2870). +Le(i1530,i2870). +Le(i1540,i2870). +Le(i1560,i2870). +Le(i1590,i2870). +Le(i1630,i2870). +Le(i1660,i2870). +Le(i1700,i2870). +Le(i1730,i2870). +Le(i1760,i2870). +Le(i1770,i2870). +Le(i1810,i2870). +Le(i1840,i2870). +Le(i1880,i2870). +Le(i1910,i2870). +Le(i1950,i2870). +Le(i1980,i2870). +Le(i2020,i2870). +Le(i2050,i2870). +Le(i2090,i2870). +Le(i2120,i2870). +Le(i2160,i2870). +Le(i2190,i2870). +Le(i2200,i2870). +Le(i2230,i2870). +Le(i2270,i2870). +Le(i2300,i2870). +Le(i2340,i2870). +Le(i2370,i2870). +Le(i2410,i2870). +Le(i2420,i2870). +Le(i2440,i2870). +Le(i2480,i2870). +Le(i2510,i2870). +Le(i2550,i2870). +Le(i2580,i2870). +Le(i2620,i2870). +Le(i2640,i2870). +Le(i2660,i2870). +Le(i2730,i2870). +Le(i2760,i2870). +Le(i2800,i2870). +Le(i2830,i2870). +Le(i2860,i2870). +Le(i-30,i2940). +Le(i0,i2940). +Le(i13,i2940). +Le(i26,i2940). +Le(i39,i2940). +Le(i52,i2940). +Le(i60,i2940). +Le(i65,i2940). +Le(i70,i2940). +Le(i78,i2940). +Le(i90,i2940). +Le(i91,i2940). +Le(i104,i2940). +Le(i117,i2940). +Le(i130,i2940). +Le(i143,i2940). +Le(i156,i2940). +Le(i169,i2940). +Le(i182,i2940). +Le(i195,i2940). +Le(i208,i2940). +Le(i221,i2940). +Le(i234,i2940). +Le(i247,i2940). +Le(i260,i2940). +Le(i460,i2940). +Le(i530,i2940). +Le(i600,i2940). +Le(i660,i2940). +Le(i670,i2940). +Le(i710,i2940). +Le(i740,i2940). +Le(i810,i2940). +Le(i850,i2940). +Le(i880,i2940). +Le(i890,i2940). +Le(i920,i2940). +Le(i960,i2940). +Le(i990,i2940). +Le(i1030,i2940). +Le(i1060,i2940). +Le(i1100,i2940). +Le(i1130,i2940). +Le(i1170,i2940). +Le(i1200,i2940). +Le(i1240,i2940). +Le(i1260,i2940). +Le(i1270,i2940). +Le(i1290,i2940). +Le(i1310,i2940). +Le(i1320,i2940). +Le(i1330,i2940). +Le(i1350,i2940). +Le(i1360,i2940). +Le(i1380,i2940). +Le(i1390,i2940). +Le(i1420,i2940). +Le(i1430,i2940). +Le(i1450,i2940). +Le(i1460,i2940). +Le(i1490,i2940). +Le(i1520,i2940). +Le(i1530,i2940). +Le(i1540,i2940). +Le(i1560,i2940). +Le(i1590,i2940). +Le(i1630,i2940). +Le(i1660,i2940). +Le(i1700,i2940). +Le(i1730,i2940). +Le(i1760,i2940). +Le(i1770,i2940). +Le(i1810,i2940). +Le(i1840,i2940). +Le(i1880,i2940). +Le(i1910,i2940). +Le(i1950,i2940). +Le(i1980,i2940). +Le(i2020,i2940). +Le(i2050,i2940). +Le(i2090,i2940). +Le(i2120,i2940). +Le(i2160,i2940). +Le(i2190,i2940). +Le(i2200,i2940). +Le(i2230,i2940). +Le(i2270,i2940). +Le(i2300,i2940). +Le(i2340,i2940). +Le(i2370,i2940). +Le(i2410,i2940). +Le(i2420,i2940). +Le(i2440,i2940). +Le(i2480,i2940). +Le(i2510,i2940). +Le(i2550,i2940). +Le(i2580,i2940). +Le(i2620,i2940). +Le(i2640,i2940). +Le(i2660,i2940). +Le(i2730,i2940). +Le(i2760,i2940). +Le(i2800,i2940). +Le(i2830,i2940). +Le(i2860,i2940). +Le(i2870,i2940). +Le(i-30,i2970). +Le(i0,i2970). +Le(i13,i2970). +Le(i26,i2970). +Le(i39,i2970). +Le(i52,i2970). +Le(i60,i2970). +Le(i65,i2970). +Le(i70,i2970). +Le(i78,i2970). +Le(i90,i2970). +Le(i91,i2970). +Le(i104,i2970). +Le(i117,i2970). +Le(i130,i2970). +Le(i143,i2970). +Le(i156,i2970). +Le(i169,i2970). +Le(i182,i2970). +Le(i195,i2970). +Le(i208,i2970). +Le(i221,i2970). +Le(i234,i2970). +Le(i247,i2970). +Le(i260,i2970). +Le(i460,i2970). +Le(i530,i2970). +Le(i600,i2970). +Le(i660,i2970). +Le(i670,i2970). +Le(i710,i2970). +Le(i740,i2970). +Le(i810,i2970). +Le(i850,i2970). +Le(i880,i2970). +Le(i890,i2970). +Le(i920,i2970). +Le(i960,i2970). +Le(i990,i2970). +Le(i1030,i2970). +Le(i1060,i2970). +Le(i1100,i2970). +Le(i1130,i2970). +Le(i1170,i2970). +Le(i1200,i2970). +Le(i1240,i2970). +Le(i1260,i2970). +Le(i1270,i2970). +Le(i1290,i2970). +Le(i1310,i2970). +Le(i1320,i2970). +Le(i1330,i2970). +Le(i1350,i2970). +Le(i1360,i2970). +Le(i1380,i2970). +Le(i1390,i2970). +Le(i1420,i2970). +Le(i1430,i2970). +Le(i1450,i2970). +Le(i1460,i2970). +Le(i1490,i2970). +Le(i1520,i2970). +Le(i1530,i2970). +Le(i1540,i2970). +Le(i1560,i2970). +Le(i1590,i2970). +Le(i1630,i2970). +Le(i1660,i2970). +Le(i1700,i2970). +Le(i1730,i2970). +Le(i1760,i2970). +Le(i1770,i2970). +Le(i1810,i2970). +Le(i1840,i2970). +Le(i1880,i2970). +Le(i1910,i2970). +Le(i1950,i2970). +Le(i1980,i2970). +Le(i2020,i2970). +Le(i2050,i2970). +Le(i2090,i2970). +Le(i2120,i2970). +Le(i2160,i2970). +Le(i2190,i2970). +Le(i2200,i2970). +Le(i2230,i2970). +Le(i2270,i2970). +Le(i2300,i2970). +Le(i2340,i2970). +Le(i2370,i2970). +Le(i2410,i2970). +Le(i2420,i2970). +Le(i2440,i2970). +Le(i2480,i2970). +Le(i2510,i2970). +Le(i2550,i2970). +Le(i2580,i2970). +Le(i2620,i2970). +Le(i2640,i2970). +Le(i2660,i2970). +Le(i2730,i2970). +Le(i2760,i2970). +Le(i2800,i2970). +Le(i2830,i2970). +Le(i2860,i2970). +Le(i2870,i2970). +Le(i2940,i2970). +Le(i-30,i3010). +Le(i0,i3010). +Le(i13,i3010). +Le(i26,i3010). +Le(i39,i3010). +Le(i52,i3010). +Le(i60,i3010). +Le(i65,i3010). +Le(i70,i3010). +Le(i78,i3010). +Le(i90,i3010). +Le(i91,i3010). +Le(i104,i3010). +Le(i117,i3010). +Le(i130,i3010). +Le(i143,i3010). +Le(i156,i3010). +Le(i169,i3010). +Le(i182,i3010). +Le(i195,i3010). +Le(i208,i3010). +Le(i221,i3010). +Le(i234,i3010). +Le(i247,i3010). +Le(i260,i3010). +Le(i460,i3010). +Le(i530,i3010). +Le(i600,i3010). +Le(i660,i3010). +Le(i670,i3010). +Le(i710,i3010). +Le(i740,i3010). +Le(i810,i3010). +Le(i850,i3010). +Le(i880,i3010). +Le(i890,i3010). +Le(i920,i3010). +Le(i960,i3010). +Le(i990,i3010). +Le(i1030,i3010). +Le(i1060,i3010). +Le(i1100,i3010). +Le(i1130,i3010). +Le(i1170,i3010). +Le(i1200,i3010). +Le(i1240,i3010). +Le(i1260,i3010). +Le(i1270,i3010). +Le(i1290,i3010). +Le(i1310,i3010). +Le(i1320,i3010). +Le(i1330,i3010). +Le(i1350,i3010). +Le(i1360,i3010). +Le(i1380,i3010). +Le(i1390,i3010). +Le(i1420,i3010). +Le(i1430,i3010). +Le(i1450,i3010). +Le(i1460,i3010). +Le(i1490,i3010). +Le(i1520,i3010). +Le(i1530,i3010). +Le(i1540,i3010). +Le(i1560,i3010). +Le(i1590,i3010). +Le(i1630,i3010). +Le(i1660,i3010). +Le(i1700,i3010). +Le(i1730,i3010). +Le(i1760,i3010). +Le(i1770,i3010). +Le(i1810,i3010). +Le(i1840,i3010). +Le(i1880,i3010). +Le(i1910,i3010). +Le(i1950,i3010). +Le(i1980,i3010). +Le(i2020,i3010). +Le(i2050,i3010). +Le(i2090,i3010). +Le(i2120,i3010). +Le(i2160,i3010). +Le(i2190,i3010). +Le(i2200,i3010). +Le(i2230,i3010). +Le(i2270,i3010). +Le(i2300,i3010). +Le(i2340,i3010). +Le(i2370,i3010). +Le(i2410,i3010). +Le(i2420,i3010). +Le(i2440,i3010). +Le(i2480,i3010). +Le(i2510,i3010). +Le(i2550,i3010). +Le(i2580,i3010). +Le(i2620,i3010). +Le(i2640,i3010). +Le(i2660,i3010). +Le(i2730,i3010). +Le(i2760,i3010). +Le(i2800,i3010). +Le(i2830,i3010). +Le(i2860,i3010). +Le(i2870,i3010). +Le(i2940,i3010). +Le(i2970,i3010). +Le(i-30,i3040). +Le(i0,i3040). +Le(i13,i3040). +Le(i26,i3040). +Le(i39,i3040). +Le(i52,i3040). +Le(i60,i3040). +Le(i65,i3040). +Le(i70,i3040). +Le(i78,i3040). +Le(i90,i3040). +Le(i91,i3040). +Le(i104,i3040). +Le(i117,i3040). +Le(i130,i3040). +Le(i143,i3040). +Le(i156,i3040). +Le(i169,i3040). +Le(i182,i3040). +Le(i195,i3040). +Le(i208,i3040). +Le(i221,i3040). +Le(i234,i3040). +Le(i247,i3040). +Le(i260,i3040). +Le(i460,i3040). +Le(i530,i3040). +Le(i600,i3040). +Le(i660,i3040). +Le(i670,i3040). +Le(i710,i3040). +Le(i740,i3040). +Le(i810,i3040). +Le(i850,i3040). +Le(i880,i3040). +Le(i890,i3040). +Le(i920,i3040). +Le(i960,i3040). +Le(i990,i3040). +Le(i1030,i3040). +Le(i1060,i3040). +Le(i1100,i3040). +Le(i1130,i3040). +Le(i1170,i3040). +Le(i1200,i3040). +Le(i1240,i3040). +Le(i1260,i3040). +Le(i1270,i3040). +Le(i1290,i3040). +Le(i1310,i3040). +Le(i1320,i3040). +Le(i1330,i3040). +Le(i1350,i3040). +Le(i1360,i3040). +Le(i1380,i3040). +Le(i1390,i3040). +Le(i1420,i3040). +Le(i1430,i3040). +Le(i1450,i3040). +Le(i1460,i3040). +Le(i1490,i3040). +Le(i1520,i3040). +Le(i1530,i3040). +Le(i1540,i3040). +Le(i1560,i3040). +Le(i1590,i3040). +Le(i1630,i3040). +Le(i1660,i3040). +Le(i1700,i3040). +Le(i1730,i3040). +Le(i1760,i3040). +Le(i1770,i3040). +Le(i1810,i3040). +Le(i1840,i3040). +Le(i1880,i3040). +Le(i1910,i3040). +Le(i1950,i3040). +Le(i1980,i3040). +Le(i2020,i3040). +Le(i2050,i3040). +Le(i2090,i3040). +Le(i2120,i3040). +Le(i2160,i3040). +Le(i2190,i3040). +Le(i2200,i3040). +Le(i2230,i3040). +Le(i2270,i3040). +Le(i2300,i3040). +Le(i2340,i3040). +Le(i2370,i3040). +Le(i2410,i3040). +Le(i2420,i3040). +Le(i2440,i3040). +Le(i2480,i3040). +Le(i2510,i3040). +Le(i2550,i3040). +Le(i2580,i3040). +Le(i2620,i3040). +Le(i2640,i3040). +Le(i2660,i3040). +Le(i2730,i3040). +Le(i2760,i3040). +Le(i2800,i3040). +Le(i2830,i3040). +Le(i2860,i3040). +Le(i2870,i3040). +Le(i2940,i3040). +Le(i2970,i3040). +Le(i3010,i3040). +Le(i-30,i3080). +Le(i0,i3080). +Le(i13,i3080). +Le(i26,i3080). +Le(i39,i3080). +Le(i52,i3080). +Le(i60,i3080). +Le(i65,i3080). +Le(i70,i3080). +Le(i78,i3080). +Le(i90,i3080). +Le(i91,i3080). +Le(i104,i3080). +Le(i117,i3080). +Le(i130,i3080). +Le(i143,i3080). +Le(i156,i3080). +Le(i169,i3080). +Le(i182,i3080). +Le(i195,i3080). +Le(i208,i3080). +Le(i221,i3080). +Le(i234,i3080). +Le(i247,i3080). +Le(i260,i3080). +Le(i460,i3080). +Le(i530,i3080). +Le(i600,i3080). +Le(i660,i3080). +Le(i670,i3080). +Le(i710,i3080). +Le(i740,i3080). +Le(i810,i3080). +Le(i850,i3080). +Le(i880,i3080). +Le(i890,i3080). +Le(i920,i3080). +Le(i960,i3080). +Le(i990,i3080). +Le(i1030,i3080). +Le(i1060,i3080). +Le(i1100,i3080). +Le(i1130,i3080). +Le(i1170,i3080). +Le(i1200,i3080). +Le(i1240,i3080). +Le(i1260,i3080). +Le(i1270,i3080). +Le(i1290,i3080). +Le(i1310,i3080). +Le(i1320,i3080). +Le(i1330,i3080). +Le(i1350,i3080). +Le(i1360,i3080). +Le(i1380,i3080). +Le(i1390,i3080). +Le(i1420,i3080). +Le(i1430,i3080). +Le(i1450,i3080). +Le(i1460,i3080). +Le(i1490,i3080). +Le(i1520,i3080). +Le(i1530,i3080). +Le(i1540,i3080). +Le(i1560,i3080). +Le(i1590,i3080). +Le(i1630,i3080). +Le(i1660,i3080). +Le(i1700,i3080). +Le(i1730,i3080). +Le(i1760,i3080). +Le(i1770,i3080). +Le(i1810,i3080). +Le(i1840,i3080). +Le(i1880,i3080). +Le(i1910,i3080). +Le(i1950,i3080). +Le(i1980,i3080). +Le(i2020,i3080). +Le(i2050,i3080). +Le(i2090,i3080). +Le(i2120,i3080). +Le(i2160,i3080). +Le(i2190,i3080). +Le(i2200,i3080). +Le(i2230,i3080). +Le(i2270,i3080). +Le(i2300,i3080). +Le(i2340,i3080). +Le(i2370,i3080). +Le(i2410,i3080). +Le(i2420,i3080). +Le(i2440,i3080). +Le(i2480,i3080). +Le(i2510,i3080). +Le(i2550,i3080). +Le(i2580,i3080). +Le(i2620,i3080). +Le(i2640,i3080). +Le(i2660,i3080). +Le(i2730,i3080). +Le(i2760,i3080). +Le(i2800,i3080). +Le(i2830,i3080). +Le(i2860,i3080). +Le(i2870,i3080). +Le(i2940,i3080). +Le(i2970,i3080). +Le(i3010,i3080). +Le(i3040,i3080). +Le(i-30,i3120). +Le(i0,i3120). +Le(i13,i3120). +Le(i26,i3120). +Le(i39,i3120). +Le(i52,i3120). +Le(i60,i3120). +Le(i65,i3120). +Le(i70,i3120). +Le(i78,i3120). +Le(i90,i3120). +Le(i91,i3120). +Le(i104,i3120). +Le(i117,i3120). +Le(i130,i3120). +Le(i143,i3120). +Le(i156,i3120). +Le(i169,i3120). +Le(i182,i3120). +Le(i195,i3120). +Le(i208,i3120). +Le(i221,i3120). +Le(i234,i3120). +Le(i247,i3120). +Le(i260,i3120). +Le(i460,i3120). +Le(i530,i3120). +Le(i600,i3120). +Le(i660,i3120). +Le(i670,i3120). +Le(i710,i3120). +Le(i740,i3120). +Le(i810,i3120). +Le(i850,i3120). +Le(i880,i3120). +Le(i890,i3120). +Le(i920,i3120). +Le(i960,i3120). +Le(i990,i3120). +Le(i1030,i3120). +Le(i1060,i3120). +Le(i1100,i3120). +Le(i1130,i3120). +Le(i1170,i3120). +Le(i1200,i3120). +Le(i1240,i3120). +Le(i1260,i3120). +Le(i1270,i3120). +Le(i1290,i3120). +Le(i1310,i3120). +Le(i1320,i3120). +Le(i1330,i3120). +Le(i1350,i3120). +Le(i1360,i3120). +Le(i1380,i3120). +Le(i1390,i3120). +Le(i1420,i3120). +Le(i1430,i3120). +Le(i1450,i3120). +Le(i1460,i3120). +Le(i1490,i3120). +Le(i1520,i3120). +Le(i1530,i3120). +Le(i1540,i3120). +Le(i1560,i3120). +Le(i1590,i3120). +Le(i1630,i3120). +Le(i1660,i3120). +Le(i1700,i3120). +Le(i1730,i3120). +Le(i1760,i3120). +Le(i1770,i3120). +Le(i1810,i3120). +Le(i1840,i3120). +Le(i1880,i3120). +Le(i1910,i3120). +Le(i1950,i3120). +Le(i1980,i3120). +Le(i2020,i3120). +Le(i2050,i3120). +Le(i2090,i3120). +Le(i2120,i3120). +Le(i2160,i3120). +Le(i2190,i3120). +Le(i2200,i3120). +Le(i2230,i3120). +Le(i2270,i3120). +Le(i2300,i3120). +Le(i2340,i3120). +Le(i2370,i3120). +Le(i2410,i3120). +Le(i2420,i3120). +Le(i2440,i3120). +Le(i2480,i3120). +Le(i2510,i3120). +Le(i2550,i3120). +Le(i2580,i3120). +Le(i2620,i3120). +Le(i2640,i3120). +Le(i2660,i3120). +Le(i2730,i3120). +Le(i2760,i3120). +Le(i2800,i3120). +Le(i2830,i3120). +Le(i2860,i3120). +Le(i2870,i3120). +Le(i2940,i3120). +Le(i2970,i3120). +Le(i3010,i3120). +Le(i3040,i3120). +Le(i3080,i3120). +Le(i-30,i3150). +Le(i0,i3150). +Le(i13,i3150). +Le(i26,i3150). +Le(i39,i3150). +Le(i52,i3150). +Le(i60,i3150). +Le(i65,i3150). +Le(i70,i3150). +Le(i78,i3150). +Le(i90,i3150). +Le(i91,i3150). +Le(i104,i3150). +Le(i117,i3150). +Le(i130,i3150). +Le(i143,i3150). +Le(i156,i3150). +Le(i169,i3150). +Le(i182,i3150). +Le(i195,i3150). +Le(i208,i3150). +Le(i221,i3150). +Le(i234,i3150). +Le(i247,i3150). +Le(i260,i3150). +Le(i460,i3150). +Le(i530,i3150). +Le(i600,i3150). +Le(i660,i3150). +Le(i670,i3150). +Le(i710,i3150). +Le(i740,i3150). +Le(i810,i3150). +Le(i850,i3150). +Le(i880,i3150). +Le(i890,i3150). +Le(i920,i3150). +Le(i960,i3150). +Le(i990,i3150). +Le(i1030,i3150). +Le(i1060,i3150). +Le(i1100,i3150). +Le(i1130,i3150). +Le(i1170,i3150). +Le(i1200,i3150). +Le(i1240,i3150). +Le(i1260,i3150). +Le(i1270,i3150). +Le(i1290,i3150). +Le(i1310,i3150). +Le(i1320,i3150). +Le(i1330,i3150). +Le(i1350,i3150). +Le(i1360,i3150). +Le(i1380,i3150). +Le(i1390,i3150). +Le(i1420,i3150). +Le(i1430,i3150). +Le(i1450,i3150). +Le(i1460,i3150). +Le(i1490,i3150). +Le(i1520,i3150). +Le(i1530,i3150). +Le(i1540,i3150). +Le(i1560,i3150). +Le(i1590,i3150). +Le(i1630,i3150). +Le(i1660,i3150). +Le(i1700,i3150). +Le(i1730,i3150). +Le(i1760,i3150). +Le(i1770,i3150). +Le(i1810,i3150). +Le(i1840,i3150). +Le(i1880,i3150). +Le(i1910,i3150). +Le(i1950,i3150). +Le(i1980,i3150). +Le(i2020,i3150). +Le(i2050,i3150). +Le(i2090,i3150). +Le(i2120,i3150). +Le(i2160,i3150). +Le(i2190,i3150). +Le(i2200,i3150). +Le(i2230,i3150). +Le(i2270,i3150). +Le(i2300,i3150). +Le(i2340,i3150). +Le(i2370,i3150). +Le(i2410,i3150). +Le(i2420,i3150). +Le(i2440,i3150). +Le(i2480,i3150). +Le(i2510,i3150). +Le(i2550,i3150). +Le(i2580,i3150). +Le(i2620,i3150). +Le(i2640,i3150). +Le(i2660,i3150). +Le(i2730,i3150). +Le(i2760,i3150). +Le(i2800,i3150). +Le(i2830,i3150). +Le(i2860,i3150). +Le(i2870,i3150). +Le(i2940,i3150). +Le(i2970,i3150). +Le(i3010,i3150). +Le(i3040,i3150). +Le(i3080,i3150). +Le(i3120,i3150). +Le(i-30,i3220). +Le(i0,i3220). +Le(i13,i3220). +Le(i26,i3220). +Le(i39,i3220). +Le(i52,i3220). +Le(i60,i3220). +Le(i65,i3220). +Le(i70,i3220). +Le(i78,i3220). +Le(i90,i3220). +Le(i91,i3220). +Le(i104,i3220). +Le(i117,i3220). +Le(i130,i3220). +Le(i143,i3220). +Le(i156,i3220). +Le(i169,i3220). +Le(i182,i3220). +Le(i195,i3220). +Le(i208,i3220). +Le(i221,i3220). +Le(i234,i3220). +Le(i247,i3220). +Le(i260,i3220). +Le(i460,i3220). +Le(i530,i3220). +Le(i600,i3220). +Le(i660,i3220). +Le(i670,i3220). +Le(i710,i3220). +Le(i740,i3220). +Le(i810,i3220). +Le(i850,i3220). +Le(i880,i3220). +Le(i890,i3220). +Le(i920,i3220). +Le(i960,i3220). +Le(i990,i3220). +Le(i1030,i3220). +Le(i1060,i3220). +Le(i1100,i3220). +Le(i1130,i3220). +Le(i1170,i3220). +Le(i1200,i3220). +Le(i1240,i3220). +Le(i1260,i3220). +Le(i1270,i3220). +Le(i1290,i3220). +Le(i1310,i3220). +Le(i1320,i3220). +Le(i1330,i3220). +Le(i1350,i3220). +Le(i1360,i3220). +Le(i1380,i3220). +Le(i1390,i3220). +Le(i1420,i3220). +Le(i1430,i3220). +Le(i1450,i3220). +Le(i1460,i3220). +Le(i1490,i3220). +Le(i1520,i3220). +Le(i1530,i3220). +Le(i1540,i3220). +Le(i1560,i3220). +Le(i1590,i3220). +Le(i1630,i3220). +Le(i1660,i3220). +Le(i1700,i3220). +Le(i1730,i3220). +Le(i1760,i3220). +Le(i1770,i3220). +Le(i1810,i3220). +Le(i1840,i3220). +Le(i1880,i3220). +Le(i1910,i3220). +Le(i1950,i3220). +Le(i1980,i3220). +Le(i2020,i3220). +Le(i2050,i3220). +Le(i2090,i3220). +Le(i2120,i3220). +Le(i2160,i3220). +Le(i2190,i3220). +Le(i2200,i3220). +Le(i2230,i3220). +Le(i2270,i3220). +Le(i2300,i3220). +Le(i2340,i3220). +Le(i2370,i3220). +Le(i2410,i3220). +Le(i2420,i3220). +Le(i2440,i3220). +Le(i2480,i3220). +Le(i2510,i3220). +Le(i2550,i3220). +Le(i2580,i3220). +Le(i2620,i3220). +Le(i2640,i3220). +Le(i2660,i3220). +Le(i2730,i3220). +Le(i2760,i3220). +Le(i2800,i3220). +Le(i2830,i3220). +Le(i2860,i3220). +Le(i2870,i3220). +Le(i2940,i3220). +Le(i2970,i3220). +Le(i3010,i3220). +Le(i3040,i3220). +Le(i3080,i3220). +Le(i3120,i3220). +Le(i3150,i3220). +Le(i-30,i3260). +Le(i0,i3260). +Le(i13,i3260). +Le(i26,i3260). +Le(i39,i3260). +Le(i52,i3260). +Le(i60,i3260). +Le(i65,i3260). +Le(i70,i3260). +Le(i78,i3260). +Le(i90,i3260). +Le(i91,i3260). +Le(i104,i3260). +Le(i117,i3260). +Le(i130,i3260). +Le(i143,i3260). +Le(i156,i3260). +Le(i169,i3260). +Le(i182,i3260). +Le(i195,i3260). +Le(i208,i3260). +Le(i221,i3260). +Le(i234,i3260). +Le(i247,i3260). +Le(i260,i3260). +Le(i460,i3260). +Le(i530,i3260). +Le(i600,i3260). +Le(i660,i3260). +Le(i670,i3260). +Le(i710,i3260). +Le(i740,i3260). +Le(i810,i3260). +Le(i850,i3260). +Le(i880,i3260). +Le(i890,i3260). +Le(i920,i3260). +Le(i960,i3260). +Le(i990,i3260). +Le(i1030,i3260). +Le(i1060,i3260). +Le(i1100,i3260). +Le(i1130,i3260). +Le(i1170,i3260). +Le(i1200,i3260). +Le(i1240,i3260). +Le(i1260,i3260). +Le(i1270,i3260). +Le(i1290,i3260). +Le(i1310,i3260). +Le(i1320,i3260). +Le(i1330,i3260). +Le(i1350,i3260). +Le(i1360,i3260). +Le(i1380,i3260). +Le(i1390,i3260). +Le(i1420,i3260). +Le(i1430,i3260). +Le(i1450,i3260). +Le(i1460,i3260). +Le(i1490,i3260). +Le(i1520,i3260). +Le(i1530,i3260). +Le(i1540,i3260). +Le(i1560,i3260). +Le(i1590,i3260). +Le(i1630,i3260). +Le(i1660,i3260). +Le(i1700,i3260). +Le(i1730,i3260). +Le(i1760,i3260). +Le(i1770,i3260). +Le(i1810,i3260). +Le(i1840,i3260). +Le(i1880,i3260). +Le(i1910,i3260). +Le(i1950,i3260). +Le(i1980,i3260). +Le(i2020,i3260). +Le(i2050,i3260). +Le(i2090,i3260). +Le(i2120,i3260). +Le(i2160,i3260). +Le(i2190,i3260). +Le(i2200,i3260). +Le(i2230,i3260). +Le(i2270,i3260). +Le(i2300,i3260). +Le(i2340,i3260). +Le(i2370,i3260). +Le(i2410,i3260). +Le(i2420,i3260). +Le(i2440,i3260). +Le(i2480,i3260). +Le(i2510,i3260). +Le(i2550,i3260). +Le(i2580,i3260). +Le(i2620,i3260). +Le(i2640,i3260). +Le(i2660,i3260). +Le(i2730,i3260). +Le(i2760,i3260). +Le(i2800,i3260). +Le(i2830,i3260). +Le(i2860,i3260). +Le(i2870,i3260). +Le(i2940,i3260). +Le(i2970,i3260). +Le(i3010,i3260). +Le(i3040,i3260). +Le(i3080,i3260). +Le(i3120,i3260). +Le(i3150,i3260). +Le(i3220,i3260). +Le(i-30,i3290). +Le(i0,i3290). +Le(i13,i3290). +Le(i26,i3290). +Le(i39,i3290). +Le(i52,i3290). +Le(i60,i3290). +Le(i65,i3290). +Le(i70,i3290). +Le(i78,i3290). +Le(i90,i3290). +Le(i91,i3290). +Le(i104,i3290). +Le(i117,i3290). +Le(i130,i3290). +Le(i143,i3290). +Le(i156,i3290). +Le(i169,i3290). +Le(i182,i3290). +Le(i195,i3290). +Le(i208,i3290). +Le(i221,i3290). +Le(i234,i3290). +Le(i247,i3290). +Le(i260,i3290). +Le(i460,i3290). +Le(i530,i3290). +Le(i600,i3290). +Le(i660,i3290). +Le(i670,i3290). +Le(i710,i3290). +Le(i740,i3290). +Le(i810,i3290). +Le(i850,i3290). +Le(i880,i3290). +Le(i890,i3290). +Le(i920,i3290). +Le(i960,i3290). +Le(i990,i3290). +Le(i1030,i3290). +Le(i1060,i3290). +Le(i1100,i3290). +Le(i1130,i3290). +Le(i1170,i3290). +Le(i1200,i3290). +Le(i1240,i3290). +Le(i1260,i3290). +Le(i1270,i3290). +Le(i1290,i3290). +Le(i1310,i3290). +Le(i1320,i3290). +Le(i1330,i3290). +Le(i1350,i3290). +Le(i1360,i3290). +Le(i1380,i3290). +Le(i1390,i3290). +Le(i1420,i3290). +Le(i1430,i3290). +Le(i1450,i3290). +Le(i1460,i3290). +Le(i1490,i3290). +Le(i1520,i3290). +Le(i1530,i3290). +Le(i1540,i3290). +Le(i1560,i3290). +Le(i1590,i3290). +Le(i1630,i3290). +Le(i1660,i3290). +Le(i1700,i3290). +Le(i1730,i3290). +Le(i1760,i3290). +Le(i1770,i3290). +Le(i1810,i3290). +Le(i1840,i3290). +Le(i1880,i3290). +Le(i1910,i3290). +Le(i1950,i3290). +Le(i1980,i3290). +Le(i2020,i3290). +Le(i2050,i3290). +Le(i2090,i3290). +Le(i2120,i3290). +Le(i2160,i3290). +Le(i2190,i3290). +Le(i2200,i3290). +Le(i2230,i3290). +Le(i2270,i3290). +Le(i2300,i3290). +Le(i2340,i3290). +Le(i2370,i3290). +Le(i2410,i3290). +Le(i2420,i3290). +Le(i2440,i3290). +Le(i2480,i3290). +Le(i2510,i3290). +Le(i2550,i3290). +Le(i2580,i3290). +Le(i2620,i3290). +Le(i2640,i3290). +Le(i2660,i3290). +Le(i2730,i3290). +Le(i2760,i3290). +Le(i2800,i3290). +Le(i2830,i3290). +Le(i2860,i3290). +Le(i2870,i3290). +Le(i2940,i3290). +Le(i2970,i3290). +Le(i3010,i3290). +Le(i3040,i3290). +Le(i3080,i3290). +Le(i3120,i3290). +Le(i3150,i3290). +Le(i3220,i3290). +Le(i3260,i3290). +Le(i-30,i3300). +Le(i0,i3300). +Le(i13,i3300). +Le(i26,i3300). +Le(i39,i3300). +Le(i52,i3300). +Le(i60,i3300). +Le(i65,i3300). +Le(i70,i3300). +Le(i78,i3300). +Le(i90,i3300). +Le(i91,i3300). +Le(i104,i3300). +Le(i117,i3300). +Le(i130,i3300). +Le(i143,i3300). +Le(i156,i3300). +Le(i169,i3300). +Le(i182,i3300). +Le(i195,i3300). +Le(i208,i3300). +Le(i221,i3300). +Le(i234,i3300). +Le(i247,i3300). +Le(i260,i3300). +Le(i460,i3300). +Le(i530,i3300). +Le(i600,i3300). +Le(i660,i3300). +Le(i670,i3300). +Le(i710,i3300). +Le(i740,i3300). +Le(i810,i3300). +Le(i850,i3300). +Le(i880,i3300). +Le(i890,i3300). +Le(i920,i3300). +Le(i960,i3300). +Le(i990,i3300). +Le(i1030,i3300). +Le(i1060,i3300). +Le(i1100,i3300). +Le(i1130,i3300). +Le(i1170,i3300). +Le(i1200,i3300). +Le(i1240,i3300). +Le(i1260,i3300). +Le(i1270,i3300). +Le(i1290,i3300). +Le(i1310,i3300). +Le(i1320,i3300). +Le(i1330,i3300). +Le(i1350,i3300). +Le(i1360,i3300). +Le(i1380,i3300). +Le(i1390,i3300). +Le(i1420,i3300). +Le(i1430,i3300). +Le(i1450,i3300). +Le(i1460,i3300). +Le(i1490,i3300). +Le(i1520,i3300). +Le(i1530,i3300). +Le(i1540,i3300). +Le(i1560,i3300). +Le(i1590,i3300). +Le(i1630,i3300). +Le(i1660,i3300). +Le(i1700,i3300). +Le(i1730,i3300). +Le(i1760,i3300). +Le(i1770,i3300). +Le(i1810,i3300). +Le(i1840,i3300). +Le(i1880,i3300). +Le(i1910,i3300). +Le(i1950,i3300). +Le(i1980,i3300). +Le(i2020,i3300). +Le(i2050,i3300). +Le(i2090,i3300). +Le(i2120,i3300). +Le(i2160,i3300). +Le(i2190,i3300). +Le(i2200,i3300). +Le(i2230,i3300). +Le(i2270,i3300). +Le(i2300,i3300). +Le(i2340,i3300). +Le(i2370,i3300). +Le(i2410,i3300). +Le(i2420,i3300). +Le(i2440,i3300). +Le(i2480,i3300). +Le(i2510,i3300). +Le(i2550,i3300). +Le(i2580,i3300). +Le(i2620,i3300). +Le(i2640,i3300). +Le(i2660,i3300). +Le(i2730,i3300). +Le(i2760,i3300). +Le(i2800,i3300). +Le(i2830,i3300). +Le(i2860,i3300). +Le(i2870,i3300). +Le(i2940,i3300). +Le(i2970,i3300). +Le(i3010,i3300). +Le(i3040,i3300). +Le(i3080,i3300). +Le(i3120,i3300). +Le(i3150,i3300). +Le(i3220,i3300). +Le(i3260,i3300). +Le(i3290,i3300). +Le(i-30,i3330). +Le(i0,i3330). +Le(i13,i3330). +Le(i26,i3330). +Le(i39,i3330). +Le(i52,i3330). +Le(i60,i3330). +Le(i65,i3330). +Le(i70,i3330). +Le(i78,i3330). +Le(i90,i3330). +Le(i91,i3330). +Le(i104,i3330). +Le(i117,i3330). +Le(i130,i3330). +Le(i143,i3330). +Le(i156,i3330). +Le(i169,i3330). +Le(i182,i3330). +Le(i195,i3330). +Le(i208,i3330). +Le(i221,i3330). +Le(i234,i3330). +Le(i247,i3330). +Le(i260,i3330). +Le(i460,i3330). +Le(i530,i3330). +Le(i600,i3330). +Le(i660,i3330). +Le(i670,i3330). +Le(i710,i3330). +Le(i740,i3330). +Le(i810,i3330). +Le(i850,i3330). +Le(i880,i3330). +Le(i890,i3330). +Le(i920,i3330). +Le(i960,i3330). +Le(i990,i3330). +Le(i1030,i3330). +Le(i1060,i3330). +Le(i1100,i3330). +Le(i1130,i3330). +Le(i1170,i3330). +Le(i1200,i3330). +Le(i1240,i3330). +Le(i1260,i3330). +Le(i1270,i3330). +Le(i1290,i3330). +Le(i1310,i3330). +Le(i1320,i3330). +Le(i1330,i3330). +Le(i1350,i3330). +Le(i1360,i3330). +Le(i1380,i3330). +Le(i1390,i3330). +Le(i1420,i3330). +Le(i1430,i3330). +Le(i1450,i3330). +Le(i1460,i3330). +Le(i1490,i3330). +Le(i1520,i3330). +Le(i1530,i3330). +Le(i1540,i3330). +Le(i1560,i3330). +Le(i1590,i3330). +Le(i1630,i3330). +Le(i1660,i3330). +Le(i1700,i3330). +Le(i1730,i3330). +Le(i1760,i3330). +Le(i1770,i3330). +Le(i1810,i3330). +Le(i1840,i3330). +Le(i1880,i3330). +Le(i1910,i3330). +Le(i1950,i3330). +Le(i1980,i3330). +Le(i2020,i3330). +Le(i2050,i3330). +Le(i2090,i3330). +Le(i2120,i3330). +Le(i2160,i3330). +Le(i2190,i3330). +Le(i2200,i3330). +Le(i2230,i3330). +Le(i2270,i3330). +Le(i2300,i3330). +Le(i2340,i3330). +Le(i2370,i3330). +Le(i2410,i3330). +Le(i2420,i3330). +Le(i2440,i3330). +Le(i2480,i3330). +Le(i2510,i3330). +Le(i2550,i3330). +Le(i2580,i3330). +Le(i2620,i3330). +Le(i2640,i3330). +Le(i2660,i3330). +Le(i2730,i3330). +Le(i2760,i3330). +Le(i2800,i3330). +Le(i2830,i3330). +Le(i2860,i3330). +Le(i2870,i3330). +Le(i2940,i3330). +Le(i2970,i3330). +Le(i3010,i3330). +Le(i3040,i3330). +Le(i3080,i3330). +Le(i3120,i3330). +Le(i3150,i3330). +Le(i3220,i3330). +Le(i3260,i3330). +Le(i3290,i3330). +Le(i3300,i3330). +Le(i-30,i3400). +Le(i0,i3400). +Le(i13,i3400). +Le(i26,i3400). +Le(i39,i3400). +Le(i52,i3400). +Le(i60,i3400). +Le(i65,i3400). +Le(i70,i3400). +Le(i78,i3400). +Le(i90,i3400). +Le(i91,i3400). +Le(i104,i3400). +Le(i117,i3400). +Le(i130,i3400). +Le(i143,i3400). +Le(i156,i3400). +Le(i169,i3400). +Le(i182,i3400). +Le(i195,i3400). +Le(i208,i3400). +Le(i221,i3400). +Le(i234,i3400). +Le(i247,i3400). +Le(i260,i3400). +Le(i460,i3400). +Le(i530,i3400). +Le(i600,i3400). +Le(i660,i3400). +Le(i670,i3400). +Le(i710,i3400). +Le(i740,i3400). +Le(i810,i3400). +Le(i850,i3400). +Le(i880,i3400). +Le(i890,i3400). +Le(i920,i3400). +Le(i960,i3400). +Le(i990,i3400). +Le(i1030,i3400). +Le(i1060,i3400). +Le(i1100,i3400). +Le(i1130,i3400). +Le(i1170,i3400). +Le(i1200,i3400). +Le(i1240,i3400). +Le(i1260,i3400). +Le(i1270,i3400). +Le(i1290,i3400). +Le(i1310,i3400). +Le(i1320,i3400). +Le(i1330,i3400). +Le(i1350,i3400). +Le(i1360,i3400). +Le(i1380,i3400). +Le(i1390,i3400). +Le(i1420,i3400). +Le(i1430,i3400). +Le(i1450,i3400). +Le(i1460,i3400). +Le(i1490,i3400). +Le(i1520,i3400). +Le(i1530,i3400). +Le(i1540,i3400). +Le(i1560,i3400). +Le(i1590,i3400). +Le(i1630,i3400). +Le(i1660,i3400). +Le(i1700,i3400). +Le(i1730,i3400). +Le(i1760,i3400). +Le(i1770,i3400). +Le(i1810,i3400). +Le(i1840,i3400). +Le(i1880,i3400). +Le(i1910,i3400). +Le(i1950,i3400). +Le(i1980,i3400). +Le(i2020,i3400). +Le(i2050,i3400). +Le(i2090,i3400). +Le(i2120,i3400). +Le(i2160,i3400). +Le(i2190,i3400). +Le(i2200,i3400). +Le(i2230,i3400). +Le(i2270,i3400). +Le(i2300,i3400). +Le(i2340,i3400). +Le(i2370,i3400). +Le(i2410,i3400). +Le(i2420,i3400). +Le(i2440,i3400). +Le(i2480,i3400). +Le(i2510,i3400). +Le(i2550,i3400). +Le(i2580,i3400). +Le(i2620,i3400). +Le(i2640,i3400). +Le(i2660,i3400). +Le(i2730,i3400). +Le(i2760,i3400). +Le(i2800,i3400). +Le(i2830,i3400). +Le(i2860,i3400). +Le(i2870,i3400). +Le(i2940,i3400). +Le(i2970,i3400). +Le(i3010,i3400). +Le(i3040,i3400). +Le(i3080,i3400). +Le(i3120,i3400). +Le(i3150,i3400). +Le(i3220,i3400). +Le(i3260,i3400). +Le(i3290,i3400). +Le(i3300,i3400). +Le(i3330,i3400). +Le(i-30,i3430). +Le(i0,i3430). +Le(i13,i3430). +Le(i26,i3430). +Le(i39,i3430). +Le(i52,i3430). +Le(i60,i3430). +Le(i65,i3430). +Le(i70,i3430). +Le(i78,i3430). +Le(i90,i3430). +Le(i91,i3430). +Le(i104,i3430). +Le(i117,i3430). +Le(i130,i3430). +Le(i143,i3430). +Le(i156,i3430). +Le(i169,i3430). +Le(i182,i3430). +Le(i195,i3430). +Le(i208,i3430). +Le(i221,i3430). +Le(i234,i3430). +Le(i247,i3430). +Le(i260,i3430). +Le(i460,i3430). +Le(i530,i3430). +Le(i600,i3430). +Le(i660,i3430). +Le(i670,i3430). +Le(i710,i3430). +Le(i740,i3430). +Le(i810,i3430). +Le(i850,i3430). +Le(i880,i3430). +Le(i890,i3430). +Le(i920,i3430). +Le(i960,i3430). +Le(i990,i3430). +Le(i1030,i3430). +Le(i1060,i3430). +Le(i1100,i3430). +Le(i1130,i3430). +Le(i1170,i3430). +Le(i1200,i3430). +Le(i1240,i3430). +Le(i1260,i3430). +Le(i1270,i3430). +Le(i1290,i3430). +Le(i1310,i3430). +Le(i1320,i3430). +Le(i1330,i3430). +Le(i1350,i3430). +Le(i1360,i3430). +Le(i1380,i3430). +Le(i1390,i3430). +Le(i1420,i3430). +Le(i1430,i3430). +Le(i1450,i3430). +Le(i1460,i3430). +Le(i1490,i3430). +Le(i1520,i3430). +Le(i1530,i3430). +Le(i1540,i3430). +Le(i1560,i3430). +Le(i1590,i3430). +Le(i1630,i3430). +Le(i1660,i3430). +Le(i1700,i3430). +Le(i1730,i3430). +Le(i1760,i3430). +Le(i1770,i3430). +Le(i1810,i3430). +Le(i1840,i3430). +Le(i1880,i3430). +Le(i1910,i3430). +Le(i1950,i3430). +Le(i1980,i3430). +Le(i2020,i3430). +Le(i2050,i3430). +Le(i2090,i3430). +Le(i2120,i3430). +Le(i2160,i3430). +Le(i2190,i3430). +Le(i2200,i3430). +Le(i2230,i3430). +Le(i2270,i3430). +Le(i2300,i3430). +Le(i2340,i3430). +Le(i2370,i3430). +Le(i2410,i3430). +Le(i2420,i3430). +Le(i2440,i3430). +Le(i2480,i3430). +Le(i2510,i3430). +Le(i2550,i3430). +Le(i2580,i3430). +Le(i2620,i3430). +Le(i2640,i3430). +Le(i2660,i3430). +Le(i2730,i3430). +Le(i2760,i3430). +Le(i2800,i3430). +Le(i2830,i3430). +Le(i2860,i3430). +Le(i2870,i3430). +Le(i2940,i3430). +Le(i2970,i3430). +Le(i3010,i3430). +Le(i3040,i3430). +Le(i3080,i3430). +Le(i3120,i3430). +Le(i3150,i3430). +Le(i3220,i3430). +Le(i3260,i3430). +Le(i3290,i3430). +Le(i3300,i3430). +Le(i3330,i3430). +Le(i3400,i3430). +Le(i-30,i3500). +Le(i0,i3500). +Le(i13,i3500). +Le(i26,i3500). +Le(i39,i3500). +Le(i52,i3500). +Le(i60,i3500). +Le(i65,i3500). +Le(i70,i3500). +Le(i78,i3500). +Le(i90,i3500). +Le(i91,i3500). +Le(i104,i3500). +Le(i117,i3500). +Le(i130,i3500). +Le(i143,i3500). +Le(i156,i3500). +Le(i169,i3500). +Le(i182,i3500). +Le(i195,i3500). +Le(i208,i3500). +Le(i221,i3500). +Le(i234,i3500). +Le(i247,i3500). +Le(i260,i3500). +Le(i460,i3500). +Le(i530,i3500). +Le(i600,i3500). +Le(i660,i3500). +Le(i670,i3500). +Le(i710,i3500). +Le(i740,i3500). +Le(i810,i3500). +Le(i850,i3500). +Le(i880,i3500). +Le(i890,i3500). +Le(i920,i3500). +Le(i960,i3500). +Le(i990,i3500). +Le(i1030,i3500). +Le(i1060,i3500). +Le(i1100,i3500). +Le(i1130,i3500). +Le(i1170,i3500). +Le(i1200,i3500). +Le(i1240,i3500). +Le(i1260,i3500). +Le(i1270,i3500). +Le(i1290,i3500). +Le(i1310,i3500). +Le(i1320,i3500). +Le(i1330,i3500). +Le(i1350,i3500). +Le(i1360,i3500). +Le(i1380,i3500). +Le(i1390,i3500). +Le(i1420,i3500). +Le(i1430,i3500). +Le(i1450,i3500). +Le(i1460,i3500). +Le(i1490,i3500). +Le(i1520,i3500). +Le(i1530,i3500). +Le(i1540,i3500). +Le(i1560,i3500). +Le(i1590,i3500). +Le(i1630,i3500). +Le(i1660,i3500). +Le(i1700,i3500). +Le(i1730,i3500). +Le(i1760,i3500). +Le(i1770,i3500). +Le(i1810,i3500). +Le(i1840,i3500). +Le(i1880,i3500). +Le(i1910,i3500). +Le(i1950,i3500). +Le(i1980,i3500). +Le(i2020,i3500). +Le(i2050,i3500). +Le(i2090,i3500). +Le(i2120,i3500). +Le(i2160,i3500). +Le(i2190,i3500). +Le(i2200,i3500). +Le(i2230,i3500). +Le(i2270,i3500). +Le(i2300,i3500). +Le(i2340,i3500). +Le(i2370,i3500). +Le(i2410,i3500). +Le(i2420,i3500). +Le(i2440,i3500). +Le(i2480,i3500). +Le(i2510,i3500). +Le(i2550,i3500). +Le(i2580,i3500). +Le(i2620,i3500). +Le(i2640,i3500). +Le(i2660,i3500). +Le(i2730,i3500). +Le(i2760,i3500). +Le(i2800,i3500). +Le(i2830,i3500). +Le(i2860,i3500). +Le(i2870,i3500). +Le(i2940,i3500). +Le(i2970,i3500). +Le(i3010,i3500). +Le(i3040,i3500). +Le(i3080,i3500). +Le(i3120,i3500). +Le(i3150,i3500). +Le(i3220,i3500). +Le(i3260,i3500). +Le(i3290,i3500). +Le(i3300,i3500). +Le(i3330,i3500). +Le(i3400,i3500). +Le(i3430,i3500). +Le(i-30,i3520). +Le(i0,i3520). +Le(i13,i3520). +Le(i26,i3520). +Le(i39,i3520). +Le(i52,i3520). +Le(i60,i3520). +Le(i65,i3520). +Le(i70,i3520). +Le(i78,i3520). +Le(i90,i3520). +Le(i91,i3520). +Le(i104,i3520). +Le(i117,i3520). +Le(i130,i3520). +Le(i143,i3520). +Le(i156,i3520). +Le(i169,i3520). +Le(i182,i3520). +Le(i195,i3520). +Le(i208,i3520). +Le(i221,i3520). +Le(i234,i3520). +Le(i247,i3520). +Le(i260,i3520). +Le(i460,i3520). +Le(i530,i3520). +Le(i600,i3520). +Le(i660,i3520). +Le(i670,i3520). +Le(i710,i3520). +Le(i740,i3520). +Le(i810,i3520). +Le(i850,i3520). +Le(i880,i3520). +Le(i890,i3520). +Le(i920,i3520). +Le(i960,i3520). +Le(i990,i3520). +Le(i1030,i3520). +Le(i1060,i3520). +Le(i1100,i3520). +Le(i1130,i3520). +Le(i1170,i3520). +Le(i1200,i3520). +Le(i1240,i3520). +Le(i1260,i3520). +Le(i1270,i3520). +Le(i1290,i3520). +Le(i1310,i3520). +Le(i1320,i3520). +Le(i1330,i3520). +Le(i1350,i3520). +Le(i1360,i3520). +Le(i1380,i3520). +Le(i1390,i3520). +Le(i1420,i3520). +Le(i1430,i3520). +Le(i1450,i3520). +Le(i1460,i3520). +Le(i1490,i3520). +Le(i1520,i3520). +Le(i1530,i3520). +Le(i1540,i3520). +Le(i1560,i3520). +Le(i1590,i3520). +Le(i1630,i3520). +Le(i1660,i3520). +Le(i1700,i3520). +Le(i1730,i3520). +Le(i1760,i3520). +Le(i1770,i3520). +Le(i1810,i3520). +Le(i1840,i3520). +Le(i1880,i3520). +Le(i1910,i3520). +Le(i1950,i3520). +Le(i1980,i3520). +Le(i2020,i3520). +Le(i2050,i3520). +Le(i2090,i3520). +Le(i2120,i3520). +Le(i2160,i3520). +Le(i2190,i3520). +Le(i2200,i3520). +Le(i2230,i3520). +Le(i2270,i3520). +Le(i2300,i3520). +Le(i2340,i3520). +Le(i2370,i3520). +Le(i2410,i3520). +Le(i2420,i3520). +Le(i2440,i3520). +Le(i2480,i3520). +Le(i2510,i3520). +Le(i2550,i3520). +Le(i2580,i3520). +Le(i2620,i3520). +Le(i2640,i3520). +Le(i2660,i3520). +Le(i2730,i3520). +Le(i2760,i3520). +Le(i2800,i3520). +Le(i2830,i3520). +Le(i2860,i3520). +Le(i2870,i3520). +Le(i2940,i3520). +Le(i2970,i3520). +Le(i3010,i3520). +Le(i3040,i3520). +Le(i3080,i3520). +Le(i3120,i3520). +Le(i3150,i3520). +Le(i3220,i3520). +Le(i3260,i3520). +Le(i3290,i3520). +Le(i3300,i3520). +Le(i3330,i3520). +Le(i3400,i3520). +Le(i3430,i3520). +Le(i3500,i3520). +Le(i-30,i3580). +Le(i0,i3580). +Le(i13,i3580). +Le(i26,i3580). +Le(i39,i3580). +Le(i52,i3580). +Le(i60,i3580). +Le(i65,i3580). +Le(i70,i3580). +Le(i78,i3580). +Le(i90,i3580). +Le(i91,i3580). +Le(i104,i3580). +Le(i117,i3580). +Le(i130,i3580). +Le(i143,i3580). +Le(i156,i3580). +Le(i169,i3580). +Le(i182,i3580). +Le(i195,i3580). +Le(i208,i3580). +Le(i221,i3580). +Le(i234,i3580). +Le(i247,i3580). +Le(i260,i3580). +Le(i460,i3580). +Le(i530,i3580). +Le(i600,i3580). +Le(i660,i3580). +Le(i670,i3580). +Le(i710,i3580). +Le(i740,i3580). +Le(i810,i3580). +Le(i850,i3580). +Le(i880,i3580). +Le(i890,i3580). +Le(i920,i3580). +Le(i960,i3580). +Le(i990,i3580). +Le(i1030,i3580). +Le(i1060,i3580). +Le(i1100,i3580). +Le(i1130,i3580). +Le(i1170,i3580). +Le(i1200,i3580). +Le(i1240,i3580). +Le(i1260,i3580). +Le(i1270,i3580). +Le(i1290,i3580). +Le(i1310,i3580). +Le(i1320,i3580). +Le(i1330,i3580). +Le(i1350,i3580). +Le(i1360,i3580). +Le(i1380,i3580). +Le(i1390,i3580). +Le(i1420,i3580). +Le(i1430,i3580). +Le(i1450,i3580). +Le(i1460,i3580). +Le(i1490,i3580). +Le(i1520,i3580). +Le(i1530,i3580). +Le(i1540,i3580). +Le(i1560,i3580). +Le(i1590,i3580). +Le(i1630,i3580). +Le(i1660,i3580). +Le(i1700,i3580). +Le(i1730,i3580). +Le(i1760,i3580). +Le(i1770,i3580). +Le(i1810,i3580). +Le(i1840,i3580). +Le(i1880,i3580). +Le(i1910,i3580). +Le(i1950,i3580). +Le(i1980,i3580). +Le(i2020,i3580). +Le(i2050,i3580). +Le(i2090,i3580). +Le(i2120,i3580). +Le(i2160,i3580). +Le(i2190,i3580). +Le(i2200,i3580). +Le(i2230,i3580). +Le(i2270,i3580). +Le(i2300,i3580). +Le(i2340,i3580). +Le(i2370,i3580). +Le(i2410,i3580). +Le(i2420,i3580). +Le(i2440,i3580). +Le(i2480,i3580). +Le(i2510,i3580). +Le(i2550,i3580). +Le(i2580,i3580). +Le(i2620,i3580). +Le(i2640,i3580). +Le(i2660,i3580). +Le(i2730,i3580). +Le(i2760,i3580). +Le(i2800,i3580). +Le(i2830,i3580). +Le(i2860,i3580). +Le(i2870,i3580). +Le(i2940,i3580). +Le(i2970,i3580). +Le(i3010,i3580). +Le(i3040,i3580). +Le(i3080,i3580). +Le(i3120,i3580). +Le(i3150,i3580). +Le(i3220,i3580). +Le(i3260,i3580). +Le(i3290,i3580). +Le(i3300,i3580). +Le(i3330,i3580). +Le(i3400,i3580). +Le(i3430,i3580). +Le(i3500,i3580). +Le(i3520,i3580). +Le(i-30,i3610). +Le(i0,i3610). +Le(i13,i3610). +Le(i26,i3610). +Le(i39,i3610). +Le(i52,i3610). +Le(i60,i3610). +Le(i65,i3610). +Le(i70,i3610). +Le(i78,i3610). +Le(i90,i3610). +Le(i91,i3610). +Le(i104,i3610). +Le(i117,i3610). +Le(i130,i3610). +Le(i143,i3610). +Le(i156,i3610). +Le(i169,i3610). +Le(i182,i3610). +Le(i195,i3610). +Le(i208,i3610). +Le(i221,i3610). +Le(i234,i3610). +Le(i247,i3610). +Le(i260,i3610). +Le(i460,i3610). +Le(i530,i3610). +Le(i600,i3610). +Le(i660,i3610). +Le(i670,i3610). +Le(i710,i3610). +Le(i740,i3610). +Le(i810,i3610). +Le(i850,i3610). +Le(i880,i3610). +Le(i890,i3610). +Le(i920,i3610). +Le(i960,i3610). +Le(i990,i3610). +Le(i1030,i3610). +Le(i1060,i3610). +Le(i1100,i3610). +Le(i1130,i3610). +Le(i1170,i3610). +Le(i1200,i3610). +Le(i1240,i3610). +Le(i1260,i3610). +Le(i1270,i3610). +Le(i1290,i3610). +Le(i1310,i3610). +Le(i1320,i3610). +Le(i1330,i3610). +Le(i1350,i3610). +Le(i1360,i3610). +Le(i1380,i3610). +Le(i1390,i3610). +Le(i1420,i3610). +Le(i1430,i3610). +Le(i1450,i3610). +Le(i1460,i3610). +Le(i1490,i3610). +Le(i1520,i3610). +Le(i1530,i3610). +Le(i1540,i3610). +Le(i1560,i3610). +Le(i1590,i3610). +Le(i1630,i3610). +Le(i1660,i3610). +Le(i1700,i3610). +Le(i1730,i3610). +Le(i1760,i3610). +Le(i1770,i3610). +Le(i1810,i3610). +Le(i1840,i3610). +Le(i1880,i3610). +Le(i1910,i3610). +Le(i1950,i3610). +Le(i1980,i3610). +Le(i2020,i3610). +Le(i2050,i3610). +Le(i2090,i3610). +Le(i2120,i3610). +Le(i2160,i3610). +Le(i2190,i3610). +Le(i2200,i3610). +Le(i2230,i3610). +Le(i2270,i3610). +Le(i2300,i3610). +Le(i2340,i3610). +Le(i2370,i3610). +Le(i2410,i3610). +Le(i2420,i3610). +Le(i2440,i3610). +Le(i2480,i3610). +Le(i2510,i3610). +Le(i2550,i3610). +Le(i2580,i3610). +Le(i2620,i3610). +Le(i2640,i3610). +Le(i2660,i3610). +Le(i2730,i3610). +Le(i2760,i3610). +Le(i2800,i3610). +Le(i2830,i3610). +Le(i2860,i3610). +Le(i2870,i3610). +Le(i2940,i3610). +Le(i2970,i3610). +Le(i3010,i3610). +Le(i3040,i3610). +Le(i3080,i3610). +Le(i3120,i3610). +Le(i3150,i3610). +Le(i3220,i3610). +Le(i3260,i3610). +Le(i3290,i3610). +Le(i3300,i3610). +Le(i3330,i3610). +Le(i3400,i3610). +Le(i3430,i3610). +Le(i3500,i3610). +Le(i3520,i3610). +Le(i3580,i3610). +Le(i-30,i3650). +Le(i0,i3650). +Le(i13,i3650). +Le(i26,i3650). +Le(i39,i3650). +Le(i52,i3650). +Le(i60,i3650). +Le(i65,i3650). +Le(i70,i3650). +Le(i78,i3650). +Le(i90,i3650). +Le(i91,i3650). +Le(i104,i3650). +Le(i117,i3650). +Le(i130,i3650). +Le(i143,i3650). +Le(i156,i3650). +Le(i169,i3650). +Le(i182,i3650). +Le(i195,i3650). +Le(i208,i3650). +Le(i221,i3650). +Le(i234,i3650). +Le(i247,i3650). +Le(i260,i3650). +Le(i460,i3650). +Le(i530,i3650). +Le(i600,i3650). +Le(i660,i3650). +Le(i670,i3650). +Le(i710,i3650). +Le(i740,i3650). +Le(i810,i3650). +Le(i850,i3650). +Le(i880,i3650). +Le(i890,i3650). +Le(i920,i3650). +Le(i960,i3650). +Le(i990,i3650). +Le(i1030,i3650). +Le(i1060,i3650). +Le(i1100,i3650). +Le(i1130,i3650). +Le(i1170,i3650). +Le(i1200,i3650). +Le(i1240,i3650). +Le(i1260,i3650). +Le(i1270,i3650). +Le(i1290,i3650). +Le(i1310,i3650). +Le(i1320,i3650). +Le(i1330,i3650). +Le(i1350,i3650). +Le(i1360,i3650). +Le(i1380,i3650). +Le(i1390,i3650). +Le(i1420,i3650). +Le(i1430,i3650). +Le(i1450,i3650). +Le(i1460,i3650). +Le(i1490,i3650). +Le(i1520,i3650). +Le(i1530,i3650). +Le(i1540,i3650). +Le(i1560,i3650). +Le(i1590,i3650). +Le(i1630,i3650). +Le(i1660,i3650). +Le(i1700,i3650). +Le(i1730,i3650). +Le(i1760,i3650). +Le(i1770,i3650). +Le(i1810,i3650). +Le(i1840,i3650). +Le(i1880,i3650). +Le(i1910,i3650). +Le(i1950,i3650). +Le(i1980,i3650). +Le(i2020,i3650). +Le(i2050,i3650). +Le(i2090,i3650). +Le(i2120,i3650). +Le(i2160,i3650). +Le(i2190,i3650). +Le(i2200,i3650). +Le(i2230,i3650). +Le(i2270,i3650). +Le(i2300,i3650). +Le(i2340,i3650). +Le(i2370,i3650). +Le(i2410,i3650). +Le(i2420,i3650). +Le(i2440,i3650). +Le(i2480,i3650). +Le(i2510,i3650). +Le(i2550,i3650). +Le(i2580,i3650). +Le(i2620,i3650). +Le(i2640,i3650). +Le(i2660,i3650). +Le(i2730,i3650). +Le(i2760,i3650). +Le(i2800,i3650). +Le(i2830,i3650). +Le(i2860,i3650). +Le(i2870,i3650). +Le(i2940,i3650). +Le(i2970,i3650). +Le(i3010,i3650). +Le(i3040,i3650). +Le(i3080,i3650). +Le(i3120,i3650). +Le(i3150,i3650). +Le(i3220,i3650). +Le(i3260,i3650). +Le(i3290,i3650). +Le(i3300,i3650). +Le(i3330,i3650). +Le(i3400,i3650). +Le(i3430,i3650). +Le(i3500,i3650). +Le(i3520,i3650). +Le(i3580,i3650). +Le(i3610,i3650). +Le(i-30,i3680). +Le(i0,i3680). +Le(i13,i3680). +Le(i26,i3680). +Le(i39,i3680). +Le(i52,i3680). +Le(i60,i3680). +Le(i65,i3680). +Le(i70,i3680). +Le(i78,i3680). +Le(i90,i3680). +Le(i91,i3680). +Le(i104,i3680). +Le(i117,i3680). +Le(i130,i3680). +Le(i143,i3680). +Le(i156,i3680). +Le(i169,i3680). +Le(i182,i3680). +Le(i195,i3680). +Le(i208,i3680). +Le(i221,i3680). +Le(i234,i3680). +Le(i247,i3680). +Le(i260,i3680). +Le(i460,i3680). +Le(i530,i3680). +Le(i600,i3680). +Le(i660,i3680). +Le(i670,i3680). +Le(i710,i3680). +Le(i740,i3680). +Le(i810,i3680). +Le(i850,i3680). +Le(i880,i3680). +Le(i890,i3680). +Le(i920,i3680). +Le(i960,i3680). +Le(i990,i3680). +Le(i1030,i3680). +Le(i1060,i3680). +Le(i1100,i3680). +Le(i1130,i3680). +Le(i1170,i3680). +Le(i1200,i3680). +Le(i1240,i3680). +Le(i1260,i3680). +Le(i1270,i3680). +Le(i1290,i3680). +Le(i1310,i3680). +Le(i1320,i3680). +Le(i1330,i3680). +Le(i1350,i3680). +Le(i1360,i3680). +Le(i1380,i3680). +Le(i1390,i3680). +Le(i1420,i3680). +Le(i1430,i3680). +Le(i1450,i3680). +Le(i1460,i3680). +Le(i1490,i3680). +Le(i1520,i3680). +Le(i1530,i3680). +Le(i1540,i3680). +Le(i1560,i3680). +Le(i1590,i3680). +Le(i1630,i3680). +Le(i1660,i3680). +Le(i1700,i3680). +Le(i1730,i3680). +Le(i1760,i3680). +Le(i1770,i3680). +Le(i1810,i3680). +Le(i1840,i3680). +Le(i1880,i3680). +Le(i1910,i3680). +Le(i1950,i3680). +Le(i1980,i3680). +Le(i2020,i3680). +Le(i2050,i3680). +Le(i2090,i3680). +Le(i2120,i3680). +Le(i2160,i3680). +Le(i2190,i3680). +Le(i2200,i3680). +Le(i2230,i3680). +Le(i2270,i3680). +Le(i2300,i3680). +Le(i2340,i3680). +Le(i2370,i3680). +Le(i2410,i3680). +Le(i2420,i3680). +Le(i2440,i3680). +Le(i2480,i3680). +Le(i2510,i3680). +Le(i2550,i3680). +Le(i2580,i3680). +Le(i2620,i3680). +Le(i2640,i3680). +Le(i2660,i3680). +Le(i2730,i3680). +Le(i2760,i3680). +Le(i2800,i3680). +Le(i2830,i3680). +Le(i2860,i3680). +Le(i2870,i3680). +Le(i2940,i3680). +Le(i2970,i3680). +Le(i3010,i3680). +Le(i3040,i3680). +Le(i3080,i3680). +Le(i3120,i3680). +Le(i3150,i3680). +Le(i3220,i3680). +Le(i3260,i3680). +Le(i3290,i3680). +Le(i3300,i3680). +Le(i3330,i3680). +Le(i3400,i3680). +Le(i3430,i3680). +Le(i3500,i3680). +Le(i3520,i3680). +Le(i3580,i3680). +Le(i3610,i3680). +Le(i3650,i3680). +Le(i-30,i3720). +Le(i0,i3720). +Le(i13,i3720). +Le(i26,i3720). +Le(i39,i3720). +Le(i52,i3720). +Le(i60,i3720). +Le(i65,i3720). +Le(i70,i3720). +Le(i78,i3720). +Le(i90,i3720). +Le(i91,i3720). +Le(i104,i3720). +Le(i117,i3720). +Le(i130,i3720). +Le(i143,i3720). +Le(i156,i3720). +Le(i169,i3720). +Le(i182,i3720). +Le(i195,i3720). +Le(i208,i3720). +Le(i221,i3720). +Le(i234,i3720). +Le(i247,i3720). +Le(i260,i3720). +Le(i460,i3720). +Le(i530,i3720). +Le(i600,i3720). +Le(i660,i3720). +Le(i670,i3720). +Le(i710,i3720). +Le(i740,i3720). +Le(i810,i3720). +Le(i850,i3720). +Le(i880,i3720). +Le(i890,i3720). +Le(i920,i3720). +Le(i960,i3720). +Le(i990,i3720). +Le(i1030,i3720). +Le(i1060,i3720). +Le(i1100,i3720). +Le(i1130,i3720). +Le(i1170,i3720). +Le(i1200,i3720). +Le(i1240,i3720). +Le(i1260,i3720). +Le(i1270,i3720). +Le(i1290,i3720). +Le(i1310,i3720). +Le(i1320,i3720). +Le(i1330,i3720). +Le(i1350,i3720). +Le(i1360,i3720). +Le(i1380,i3720). +Le(i1390,i3720). +Le(i1420,i3720). +Le(i1430,i3720). +Le(i1450,i3720). +Le(i1460,i3720). +Le(i1490,i3720). +Le(i1520,i3720). +Le(i1530,i3720). +Le(i1540,i3720). +Le(i1560,i3720). +Le(i1590,i3720). +Le(i1630,i3720). +Le(i1660,i3720). +Le(i1700,i3720). +Le(i1730,i3720). +Le(i1760,i3720). +Le(i1770,i3720). +Le(i1810,i3720). +Le(i1840,i3720). +Le(i1880,i3720). +Le(i1910,i3720). +Le(i1950,i3720). +Le(i1980,i3720). +Le(i2020,i3720). +Le(i2050,i3720). +Le(i2090,i3720). +Le(i2120,i3720). +Le(i2160,i3720). +Le(i2190,i3720). +Le(i2200,i3720). +Le(i2230,i3720). +Le(i2270,i3720). +Le(i2300,i3720). +Le(i2340,i3720). +Le(i2370,i3720). +Le(i2410,i3720). +Le(i2420,i3720). +Le(i2440,i3720). +Le(i2480,i3720). +Le(i2510,i3720). +Le(i2550,i3720). +Le(i2580,i3720). +Le(i2620,i3720). +Le(i2640,i3720). +Le(i2660,i3720). +Le(i2730,i3720). +Le(i2760,i3720). +Le(i2800,i3720). +Le(i2830,i3720). +Le(i2860,i3720). +Le(i2870,i3720). +Le(i2940,i3720). +Le(i2970,i3720). +Le(i3010,i3720). +Le(i3040,i3720). +Le(i3080,i3720). +Le(i3120,i3720). +Le(i3150,i3720). +Le(i3220,i3720). +Le(i3260,i3720). +Le(i3290,i3720). +Le(i3300,i3720). +Le(i3330,i3720). +Le(i3400,i3720). +Le(i3430,i3720). +Le(i3500,i3720). +Le(i3520,i3720). +Le(i3580,i3720). +Le(i3610,i3720). +Le(i3650,i3720). +Le(i3680,i3720). +Le(i-30,i3740). +Le(i0,i3740). +Le(i13,i3740). +Le(i26,i3740). +Le(i39,i3740). +Le(i52,i3740). +Le(i60,i3740). +Le(i65,i3740). +Le(i70,i3740). +Le(i78,i3740). +Le(i90,i3740). +Le(i91,i3740). +Le(i104,i3740). +Le(i117,i3740). +Le(i130,i3740). +Le(i143,i3740). +Le(i156,i3740). +Le(i169,i3740). +Le(i182,i3740). +Le(i195,i3740). +Le(i208,i3740). +Le(i221,i3740). +Le(i234,i3740). +Le(i247,i3740). +Le(i260,i3740). +Le(i460,i3740). +Le(i530,i3740). +Le(i600,i3740). +Le(i660,i3740). +Le(i670,i3740). +Le(i710,i3740). +Le(i740,i3740). +Le(i810,i3740). +Le(i850,i3740). +Le(i880,i3740). +Le(i890,i3740). +Le(i920,i3740). +Le(i960,i3740). +Le(i990,i3740). +Le(i1030,i3740). +Le(i1060,i3740). +Le(i1100,i3740). +Le(i1130,i3740). +Le(i1170,i3740). +Le(i1200,i3740). +Le(i1240,i3740). +Le(i1260,i3740). +Le(i1270,i3740). +Le(i1290,i3740). +Le(i1310,i3740). +Le(i1320,i3740). +Le(i1330,i3740). +Le(i1350,i3740). +Le(i1360,i3740). +Le(i1380,i3740). +Le(i1390,i3740). +Le(i1420,i3740). +Le(i1430,i3740). +Le(i1450,i3740). +Le(i1460,i3740). +Le(i1490,i3740). +Le(i1520,i3740). +Le(i1530,i3740). +Le(i1540,i3740). +Le(i1560,i3740). +Le(i1590,i3740). +Le(i1630,i3740). +Le(i1660,i3740). +Le(i1700,i3740). +Le(i1730,i3740). +Le(i1760,i3740). +Le(i1770,i3740). +Le(i1810,i3740). +Le(i1840,i3740). +Le(i1880,i3740). +Le(i1910,i3740). +Le(i1950,i3740). +Le(i1980,i3740). +Le(i2020,i3740). +Le(i2050,i3740). +Le(i2090,i3740). +Le(i2120,i3740). +Le(i2160,i3740). +Le(i2190,i3740). +Le(i2200,i3740). +Le(i2230,i3740). +Le(i2270,i3740). +Le(i2300,i3740). +Le(i2340,i3740). +Le(i2370,i3740). +Le(i2410,i3740). +Le(i2420,i3740). +Le(i2440,i3740). +Le(i2480,i3740). +Le(i2510,i3740). +Le(i2550,i3740). +Le(i2580,i3740). +Le(i2620,i3740). +Le(i2640,i3740). +Le(i2660,i3740). +Le(i2730,i3740). +Le(i2760,i3740). +Le(i2800,i3740). +Le(i2830,i3740). +Le(i2860,i3740). +Le(i2870,i3740). +Le(i2940,i3740). +Le(i2970,i3740). +Le(i3010,i3740). +Le(i3040,i3740). +Le(i3080,i3740). +Le(i3120,i3740). +Le(i3150,i3740). +Le(i3220,i3740). +Le(i3260,i3740). +Le(i3290,i3740). +Le(i3300,i3740). +Le(i3330,i3740). +Le(i3400,i3740). +Le(i3430,i3740). +Le(i3500,i3740). +Le(i3520,i3740). +Le(i3580,i3740). +Le(i3610,i3740). +Le(i3650,i3740). +Le(i3680,i3740). +Le(i3720,i3740). +Le(i-30,i3790). +Le(i0,i3790). +Le(i13,i3790). +Le(i26,i3790). +Le(i39,i3790). +Le(i52,i3790). +Le(i60,i3790). +Le(i65,i3790). +Le(i70,i3790). +Le(i78,i3790). +Le(i90,i3790). +Le(i91,i3790). +Le(i104,i3790). +Le(i117,i3790). +Le(i130,i3790). +Le(i143,i3790). +Le(i156,i3790). +Le(i169,i3790). +Le(i182,i3790). +Le(i195,i3790). +Le(i208,i3790). +Le(i221,i3790). +Le(i234,i3790). +Le(i247,i3790). +Le(i260,i3790). +Le(i460,i3790). +Le(i530,i3790). +Le(i600,i3790). +Le(i660,i3790). +Le(i670,i3790). +Le(i710,i3790). +Le(i740,i3790). +Le(i810,i3790). +Le(i850,i3790). +Le(i880,i3790). +Le(i890,i3790). +Le(i920,i3790). +Le(i960,i3790). +Le(i990,i3790). +Le(i1030,i3790). +Le(i1060,i3790). +Le(i1100,i3790). +Le(i1130,i3790). +Le(i1170,i3790). +Le(i1200,i3790). +Le(i1240,i3790). +Le(i1260,i3790). +Le(i1270,i3790). +Le(i1290,i3790). +Le(i1310,i3790). +Le(i1320,i3790). +Le(i1330,i3790). +Le(i1350,i3790). +Le(i1360,i3790). +Le(i1380,i3790). +Le(i1390,i3790). +Le(i1420,i3790). +Le(i1430,i3790). +Le(i1450,i3790). +Le(i1460,i3790). +Le(i1490,i3790). +Le(i1520,i3790). +Le(i1530,i3790). +Le(i1540,i3790). +Le(i1560,i3790). +Le(i1590,i3790). +Le(i1630,i3790). +Le(i1660,i3790). +Le(i1700,i3790). +Le(i1730,i3790). +Le(i1760,i3790). +Le(i1770,i3790). +Le(i1810,i3790). +Le(i1840,i3790). +Le(i1880,i3790). +Le(i1910,i3790). +Le(i1950,i3790). +Le(i1980,i3790). +Le(i2020,i3790). +Le(i2050,i3790). +Le(i2090,i3790). +Le(i2120,i3790). +Le(i2160,i3790). +Le(i2190,i3790). +Le(i2200,i3790). +Le(i2230,i3790). +Le(i2270,i3790). +Le(i2300,i3790). +Le(i2340,i3790). +Le(i2370,i3790). +Le(i2410,i3790). +Le(i2420,i3790). +Le(i2440,i3790). +Le(i2480,i3790). +Le(i2510,i3790). +Le(i2550,i3790). +Le(i2580,i3790). +Le(i2620,i3790). +Le(i2640,i3790). +Le(i2660,i3790). +Le(i2730,i3790). +Le(i2760,i3790). +Le(i2800,i3790). +Le(i2830,i3790). +Le(i2860,i3790). +Le(i2870,i3790). +Le(i2940,i3790). +Le(i2970,i3790). +Le(i3010,i3790). +Le(i3040,i3790). +Le(i3080,i3790). +Le(i3120,i3790). +Le(i3150,i3790). +Le(i3220,i3790). +Le(i3260,i3790). +Le(i3290,i3790). +Le(i3300,i3790). +Le(i3330,i3790). +Le(i3400,i3790). +Le(i3430,i3790). +Le(i3500,i3790). +Le(i3520,i3790). +Le(i3580,i3790). +Le(i3610,i3790). +Le(i3650,i3790). +Le(i3680,i3790). +Le(i3720,i3790). +Le(i3740,i3790). +Le(i-30,i3820). +Le(i0,i3820). +Le(i13,i3820). +Le(i26,i3820). +Le(i39,i3820). +Le(i52,i3820). +Le(i60,i3820). +Le(i65,i3820). +Le(i70,i3820). +Le(i78,i3820). +Le(i90,i3820). +Le(i91,i3820). +Le(i104,i3820). +Le(i117,i3820). +Le(i130,i3820). +Le(i143,i3820). +Le(i156,i3820). +Le(i169,i3820). +Le(i182,i3820). +Le(i195,i3820). +Le(i208,i3820). +Le(i221,i3820). +Le(i234,i3820). +Le(i247,i3820). +Le(i260,i3820). +Le(i460,i3820). +Le(i530,i3820). +Le(i600,i3820). +Le(i660,i3820). +Le(i670,i3820). +Le(i710,i3820). +Le(i740,i3820). +Le(i810,i3820). +Le(i850,i3820). +Le(i880,i3820). +Le(i890,i3820). +Le(i920,i3820). +Le(i960,i3820). +Le(i990,i3820). +Le(i1030,i3820). +Le(i1060,i3820). +Le(i1100,i3820). +Le(i1130,i3820). +Le(i1170,i3820). +Le(i1200,i3820). +Le(i1240,i3820). +Le(i1260,i3820). +Le(i1270,i3820). +Le(i1290,i3820). +Le(i1310,i3820). +Le(i1320,i3820). +Le(i1330,i3820). +Le(i1350,i3820). +Le(i1360,i3820). +Le(i1380,i3820). +Le(i1390,i3820). +Le(i1420,i3820). +Le(i1430,i3820). +Le(i1450,i3820). +Le(i1460,i3820). +Le(i1490,i3820). +Le(i1520,i3820). +Le(i1530,i3820). +Le(i1540,i3820). +Le(i1560,i3820). +Le(i1590,i3820). +Le(i1630,i3820). +Le(i1660,i3820). +Le(i1700,i3820). +Le(i1730,i3820). +Le(i1760,i3820). +Le(i1770,i3820). +Le(i1810,i3820). +Le(i1840,i3820). +Le(i1880,i3820). +Le(i1910,i3820). +Le(i1950,i3820). +Le(i1980,i3820). +Le(i2020,i3820). +Le(i2050,i3820). +Le(i2090,i3820). +Le(i2120,i3820). +Le(i2160,i3820). +Le(i2190,i3820). +Le(i2200,i3820). +Le(i2230,i3820). +Le(i2270,i3820). +Le(i2300,i3820). +Le(i2340,i3820). +Le(i2370,i3820). +Le(i2410,i3820). +Le(i2420,i3820). +Le(i2440,i3820). +Le(i2480,i3820). +Le(i2510,i3820). +Le(i2550,i3820). +Le(i2580,i3820). +Le(i2620,i3820). +Le(i2640,i3820). +Le(i2660,i3820). +Le(i2730,i3820). +Le(i2760,i3820). +Le(i2800,i3820). +Le(i2830,i3820). +Le(i2860,i3820). +Le(i2870,i3820). +Le(i2940,i3820). +Le(i2970,i3820). +Le(i3010,i3820). +Le(i3040,i3820). +Le(i3080,i3820). +Le(i3120,i3820). +Le(i3150,i3820). +Le(i3220,i3820). +Le(i3260,i3820). +Le(i3290,i3820). +Le(i3300,i3820). +Le(i3330,i3820). +Le(i3400,i3820). +Le(i3430,i3820). +Le(i3500,i3820). +Le(i3520,i3820). +Le(i3580,i3820). +Le(i3610,i3820). +Le(i3650,i3820). +Le(i3680,i3820). +Le(i3720,i3820). +Le(i3740,i3820). +Le(i3790,i3820). +Le(i-30,i3860). +Le(i0,i3860). +Le(i13,i3860). +Le(i26,i3860). +Le(i39,i3860). +Le(i52,i3860). +Le(i60,i3860). +Le(i65,i3860). +Le(i70,i3860). +Le(i78,i3860). +Le(i90,i3860). +Le(i91,i3860). +Le(i104,i3860). +Le(i117,i3860). +Le(i130,i3860). +Le(i143,i3860). +Le(i156,i3860). +Le(i169,i3860). +Le(i182,i3860). +Le(i195,i3860). +Le(i208,i3860). +Le(i221,i3860). +Le(i234,i3860). +Le(i247,i3860). +Le(i260,i3860). +Le(i460,i3860). +Le(i530,i3860). +Le(i600,i3860). +Le(i660,i3860). +Le(i670,i3860). +Le(i710,i3860). +Le(i740,i3860). +Le(i810,i3860). +Le(i850,i3860). +Le(i880,i3860). +Le(i890,i3860). +Le(i920,i3860). +Le(i960,i3860). +Le(i990,i3860). +Le(i1030,i3860). +Le(i1060,i3860). +Le(i1100,i3860). +Le(i1130,i3860). +Le(i1170,i3860). +Le(i1200,i3860). +Le(i1240,i3860). +Le(i1260,i3860). +Le(i1270,i3860). +Le(i1290,i3860). +Le(i1310,i3860). +Le(i1320,i3860). +Le(i1330,i3860). +Le(i1350,i3860). +Le(i1360,i3860). +Le(i1380,i3860). +Le(i1390,i3860). +Le(i1420,i3860). +Le(i1430,i3860). +Le(i1450,i3860). +Le(i1460,i3860). +Le(i1490,i3860). +Le(i1520,i3860). +Le(i1530,i3860). +Le(i1540,i3860). +Le(i1560,i3860). +Le(i1590,i3860). +Le(i1630,i3860). +Le(i1660,i3860). +Le(i1700,i3860). +Le(i1730,i3860). +Le(i1760,i3860). +Le(i1770,i3860). +Le(i1810,i3860). +Le(i1840,i3860). +Le(i1880,i3860). +Le(i1910,i3860). +Le(i1950,i3860). +Le(i1980,i3860). +Le(i2020,i3860). +Le(i2050,i3860). +Le(i2090,i3860). +Le(i2120,i3860). +Le(i2160,i3860). +Le(i2190,i3860). +Le(i2200,i3860). +Le(i2230,i3860). +Le(i2270,i3860). +Le(i2300,i3860). +Le(i2340,i3860). +Le(i2370,i3860). +Le(i2410,i3860). +Le(i2420,i3860). +Le(i2440,i3860). +Le(i2480,i3860). +Le(i2510,i3860). +Le(i2550,i3860). +Le(i2580,i3860). +Le(i2620,i3860). +Le(i2640,i3860). +Le(i2660,i3860). +Le(i2730,i3860). +Le(i2760,i3860). +Le(i2800,i3860). +Le(i2830,i3860). +Le(i2860,i3860). +Le(i2870,i3860). +Le(i2940,i3860). +Le(i2970,i3860). +Le(i3010,i3860). +Le(i3040,i3860). +Le(i3080,i3860). +Le(i3120,i3860). +Le(i3150,i3860). +Le(i3220,i3860). +Le(i3260,i3860). +Le(i3290,i3860). +Le(i3300,i3860). +Le(i3330,i3860). +Le(i3400,i3860). +Le(i3430,i3860). +Le(i3500,i3860). +Le(i3520,i3860). +Le(i3580,i3860). +Le(i3610,i3860). +Le(i3650,i3860). +Le(i3680,i3860). +Le(i3720,i3860). +Le(i3740,i3860). +Le(i3790,i3860). +Le(i3820,i3860). +Le(i-30,i3960). +Le(i0,i3960). +Le(i13,i3960). +Le(i26,i3960). +Le(i39,i3960). +Le(i52,i3960). +Le(i60,i3960). +Le(i65,i3960). +Le(i70,i3960). +Le(i78,i3960). +Le(i90,i3960). +Le(i91,i3960). +Le(i104,i3960). +Le(i117,i3960). +Le(i130,i3960). +Le(i143,i3960). +Le(i156,i3960). +Le(i169,i3960). +Le(i182,i3960). +Le(i195,i3960). +Le(i208,i3960). +Le(i221,i3960). +Le(i234,i3960). +Le(i247,i3960). +Le(i260,i3960). +Le(i460,i3960). +Le(i530,i3960). +Le(i600,i3960). +Le(i660,i3960). +Le(i670,i3960). +Le(i710,i3960). +Le(i740,i3960). +Le(i810,i3960). +Le(i850,i3960). +Le(i880,i3960). +Le(i890,i3960). +Le(i920,i3960). +Le(i960,i3960). +Le(i990,i3960). +Le(i1030,i3960). +Le(i1060,i3960). +Le(i1100,i3960). +Le(i1130,i3960). +Le(i1170,i3960). +Le(i1200,i3960). +Le(i1240,i3960). +Le(i1260,i3960). +Le(i1270,i3960). +Le(i1290,i3960). +Le(i1310,i3960). +Le(i1320,i3960). +Le(i1330,i3960). +Le(i1350,i3960). +Le(i1360,i3960). +Le(i1380,i3960). +Le(i1390,i3960). +Le(i1420,i3960). +Le(i1430,i3960). +Le(i1450,i3960). +Le(i1460,i3960). +Le(i1490,i3960). +Le(i1520,i3960). +Le(i1530,i3960). +Le(i1540,i3960). +Le(i1560,i3960). +Le(i1590,i3960). +Le(i1630,i3960). +Le(i1660,i3960). +Le(i1700,i3960). +Le(i1730,i3960). +Le(i1760,i3960). +Le(i1770,i3960). +Le(i1810,i3960). +Le(i1840,i3960). +Le(i1880,i3960). +Le(i1910,i3960). +Le(i1950,i3960). +Le(i1980,i3960). +Le(i2020,i3960). +Le(i2050,i3960). +Le(i2090,i3960). +Le(i2120,i3960). +Le(i2160,i3960). +Le(i2190,i3960). +Le(i2200,i3960). +Le(i2230,i3960). +Le(i2270,i3960). +Le(i2300,i3960). +Le(i2340,i3960). +Le(i2370,i3960). +Le(i2410,i3960). +Le(i2420,i3960). +Le(i2440,i3960). +Le(i2480,i3960). +Le(i2510,i3960). +Le(i2550,i3960). +Le(i2580,i3960). +Le(i2620,i3960). +Le(i2640,i3960). +Le(i2660,i3960). +Le(i2730,i3960). +Le(i2760,i3960). +Le(i2800,i3960). +Le(i2830,i3960). +Le(i2860,i3960). +Le(i2870,i3960). +Le(i2940,i3960). +Le(i2970,i3960). +Le(i3010,i3960). +Le(i3040,i3960). +Le(i3080,i3960). +Le(i3120,i3960). +Le(i3150,i3960). +Le(i3220,i3960). +Le(i3260,i3960). +Le(i3290,i3960). +Le(i3300,i3960). +Le(i3330,i3960). +Le(i3400,i3960). +Le(i3430,i3960). +Le(i3500,i3960). +Le(i3520,i3960). +Le(i3580,i3960). +Le(i3610,i3960). +Le(i3650,i3960). +Le(i3680,i3960). +Le(i3720,i3960). +Le(i3740,i3960). +Le(i3790,i3960). +Le(i3820,i3960). +Le(i3860,i3960). +Le(i-30,i4040). +Le(i0,i4040). +Le(i13,i4040). +Le(i26,i4040). +Le(i39,i4040). +Le(i52,i4040). +Le(i60,i4040). +Le(i65,i4040). +Le(i70,i4040). +Le(i78,i4040). +Le(i90,i4040). +Le(i91,i4040). +Le(i104,i4040). +Le(i117,i4040). +Le(i130,i4040). +Le(i143,i4040). +Le(i156,i4040). +Le(i169,i4040). +Le(i182,i4040). +Le(i195,i4040). +Le(i208,i4040). +Le(i221,i4040). +Le(i234,i4040). +Le(i247,i4040). +Le(i260,i4040). +Le(i460,i4040). +Le(i530,i4040). +Le(i600,i4040). +Le(i660,i4040). +Le(i670,i4040). +Le(i710,i4040). +Le(i740,i4040). +Le(i810,i4040). +Le(i850,i4040). +Le(i880,i4040). +Le(i890,i4040). +Le(i920,i4040). +Le(i960,i4040). +Le(i990,i4040). +Le(i1030,i4040). +Le(i1060,i4040). +Le(i1100,i4040). +Le(i1130,i4040). +Le(i1170,i4040). +Le(i1200,i4040). +Le(i1240,i4040). +Le(i1260,i4040). +Le(i1270,i4040). +Le(i1290,i4040). +Le(i1310,i4040). +Le(i1320,i4040). +Le(i1330,i4040). +Le(i1350,i4040). +Le(i1360,i4040). +Le(i1380,i4040). +Le(i1390,i4040). +Le(i1420,i4040). +Le(i1430,i4040). +Le(i1450,i4040). +Le(i1460,i4040). +Le(i1490,i4040). +Le(i1520,i4040). +Le(i1530,i4040). +Le(i1540,i4040). +Le(i1560,i4040). +Le(i1590,i4040). +Le(i1630,i4040). +Le(i1660,i4040). +Le(i1700,i4040). +Le(i1730,i4040). +Le(i1760,i4040). +Le(i1770,i4040). +Le(i1810,i4040). +Le(i1840,i4040). +Le(i1880,i4040). +Le(i1910,i4040). +Le(i1950,i4040). +Le(i1980,i4040). +Le(i2020,i4040). +Le(i2050,i4040). +Le(i2090,i4040). +Le(i2120,i4040). +Le(i2160,i4040). +Le(i2190,i4040). +Le(i2200,i4040). +Le(i2230,i4040). +Le(i2270,i4040). +Le(i2300,i4040). +Le(i2340,i4040). +Le(i2370,i4040). +Le(i2410,i4040). +Le(i2420,i4040). +Le(i2440,i4040). +Le(i2480,i4040). +Le(i2510,i4040). +Le(i2550,i4040). +Le(i2580,i4040). +Le(i2620,i4040). +Le(i2640,i4040). +Le(i2660,i4040). +Le(i2730,i4040). +Le(i2760,i4040). +Le(i2800,i4040). +Le(i2830,i4040). +Le(i2860,i4040). +Le(i2870,i4040). +Le(i2940,i4040). +Le(i2970,i4040). +Le(i3010,i4040). +Le(i3040,i4040). +Le(i3080,i4040). +Le(i3120,i4040). +Le(i3150,i4040). +Le(i3220,i4040). +Le(i3260,i4040). +Le(i3290,i4040). +Le(i3300,i4040). +Le(i3330,i4040). +Le(i3400,i4040). +Le(i3430,i4040). +Le(i3500,i4040). +Le(i3520,i4040). +Le(i3580,i4040). +Le(i3610,i4040). +Le(i3650,i4040). +Le(i3680,i4040). +Le(i3720,i4040). +Le(i3740,i4040). +Le(i3790,i4040). +Le(i3820,i4040). +Le(i3860,i4040). +Le(i3960,i4040). +Le(i-30,i4140). +Le(i0,i4140). +Le(i13,i4140). +Le(i26,i4140). +Le(i39,i4140). +Le(i52,i4140). +Le(i60,i4140). +Le(i65,i4140). +Le(i70,i4140). +Le(i78,i4140). +Le(i90,i4140). +Le(i91,i4140). +Le(i104,i4140). +Le(i117,i4140). +Le(i130,i4140). +Le(i143,i4140). +Le(i156,i4140). +Le(i169,i4140). +Le(i182,i4140). +Le(i195,i4140). +Le(i208,i4140). +Le(i221,i4140). +Le(i234,i4140). +Le(i247,i4140). +Le(i260,i4140). +Le(i460,i4140). +Le(i530,i4140). +Le(i600,i4140). +Le(i660,i4140). +Le(i670,i4140). +Le(i710,i4140). +Le(i740,i4140). +Le(i810,i4140). +Le(i850,i4140). +Le(i880,i4140). +Le(i890,i4140). +Le(i920,i4140). +Le(i960,i4140). +Le(i990,i4140). +Le(i1030,i4140). +Le(i1060,i4140). +Le(i1100,i4140). +Le(i1130,i4140). +Le(i1170,i4140). +Le(i1200,i4140). +Le(i1240,i4140). +Le(i1260,i4140). +Le(i1270,i4140). +Le(i1290,i4140). +Le(i1310,i4140). +Le(i1320,i4140). +Le(i1330,i4140). +Le(i1350,i4140). +Le(i1360,i4140). +Le(i1380,i4140). +Le(i1390,i4140). +Le(i1420,i4140). +Le(i1430,i4140). +Le(i1450,i4140). +Le(i1460,i4140). +Le(i1490,i4140). +Le(i1520,i4140). +Le(i1530,i4140). +Le(i1540,i4140). +Le(i1560,i4140). +Le(i1590,i4140). +Le(i1630,i4140). +Le(i1660,i4140). +Le(i1700,i4140). +Le(i1730,i4140). +Le(i1760,i4140). +Le(i1770,i4140). +Le(i1810,i4140). +Le(i1840,i4140). +Le(i1880,i4140). +Le(i1910,i4140). +Le(i1950,i4140). +Le(i1980,i4140). +Le(i2020,i4140). +Le(i2050,i4140). +Le(i2090,i4140). +Le(i2120,i4140). +Le(i2160,i4140). +Le(i2190,i4140). +Le(i2200,i4140). +Le(i2230,i4140). +Le(i2270,i4140). +Le(i2300,i4140). +Le(i2340,i4140). +Le(i2370,i4140). +Le(i2410,i4140). +Le(i2420,i4140). +Le(i2440,i4140). +Le(i2480,i4140). +Le(i2510,i4140). +Le(i2550,i4140). +Le(i2580,i4140). +Le(i2620,i4140). +Le(i2640,i4140). +Le(i2660,i4140). +Le(i2730,i4140). +Le(i2760,i4140). +Le(i2800,i4140). +Le(i2830,i4140). +Le(i2860,i4140). +Le(i2870,i4140). +Le(i2940,i4140). +Le(i2970,i4140). +Le(i3010,i4140). +Le(i3040,i4140). +Le(i3080,i4140). +Le(i3120,i4140). +Le(i3150,i4140). +Le(i3220,i4140). +Le(i3260,i4140). +Le(i3290,i4140). +Le(i3300,i4140). +Le(i3330,i4140). +Le(i3400,i4140). +Le(i3430,i4140). +Le(i3500,i4140). +Le(i3520,i4140). +Le(i3580,i4140). +Le(i3610,i4140). +Le(i3650,i4140). +Le(i3680,i4140). +Le(i3720,i4140). +Le(i3740,i4140). +Le(i3790,i4140). +Le(i3820,i4140). +Le(i3860,i4140). +Le(i3960,i4140). +Le(i4040,i4140). +Le(i-30,i4180). +Le(i0,i4180). +Le(i13,i4180). +Le(i26,i4180). +Le(i39,i4180). +Le(i52,i4180). +Le(i60,i4180). +Le(i65,i4180). +Le(i70,i4180). +Le(i78,i4180). +Le(i90,i4180). +Le(i91,i4180). +Le(i104,i4180). +Le(i117,i4180). +Le(i130,i4180). +Le(i143,i4180). +Le(i156,i4180). +Le(i169,i4180). +Le(i182,i4180). +Le(i195,i4180). +Le(i208,i4180). +Le(i221,i4180). +Le(i234,i4180). +Le(i247,i4180). +Le(i260,i4180). +Le(i460,i4180). +Le(i530,i4180). +Le(i600,i4180). +Le(i660,i4180). +Le(i670,i4180). +Le(i710,i4180). +Le(i740,i4180). +Le(i810,i4180). +Le(i850,i4180). +Le(i880,i4180). +Le(i890,i4180). +Le(i920,i4180). +Le(i960,i4180). +Le(i990,i4180). +Le(i1030,i4180). +Le(i1060,i4180). +Le(i1100,i4180). +Le(i1130,i4180). +Le(i1170,i4180). +Le(i1200,i4180). +Le(i1240,i4180). +Le(i1260,i4180). +Le(i1270,i4180). +Le(i1290,i4180). +Le(i1310,i4180). +Le(i1320,i4180). +Le(i1330,i4180). +Le(i1350,i4180). +Le(i1360,i4180). +Le(i1380,i4180). +Le(i1390,i4180). +Le(i1420,i4180). +Le(i1430,i4180). +Le(i1450,i4180). +Le(i1460,i4180). +Le(i1490,i4180). +Le(i1520,i4180). +Le(i1530,i4180). +Le(i1540,i4180). +Le(i1560,i4180). +Le(i1590,i4180). +Le(i1630,i4180). +Le(i1660,i4180). +Le(i1700,i4180). +Le(i1730,i4180). +Le(i1760,i4180). +Le(i1770,i4180). +Le(i1810,i4180). +Le(i1840,i4180). +Le(i1880,i4180). +Le(i1910,i4180). +Le(i1950,i4180). +Le(i1980,i4180). +Le(i2020,i4180). +Le(i2050,i4180). +Le(i2090,i4180). +Le(i2120,i4180). +Le(i2160,i4180). +Le(i2190,i4180). +Le(i2200,i4180). +Le(i2230,i4180). +Le(i2270,i4180). +Le(i2300,i4180). +Le(i2340,i4180). +Le(i2370,i4180). +Le(i2410,i4180). +Le(i2420,i4180). +Le(i2440,i4180). +Le(i2480,i4180). +Le(i2510,i4180). +Le(i2550,i4180). +Le(i2580,i4180). +Le(i2620,i4180). +Le(i2640,i4180). +Le(i2660,i4180). +Le(i2730,i4180). +Le(i2760,i4180). +Le(i2800,i4180). +Le(i2830,i4180). +Le(i2860,i4180). +Le(i2870,i4180). +Le(i2940,i4180). +Le(i2970,i4180). +Le(i3010,i4180). +Le(i3040,i4180). +Le(i3080,i4180). +Le(i3120,i4180). +Le(i3150,i4180). +Le(i3220,i4180). +Le(i3260,i4180). +Le(i3290,i4180). +Le(i3300,i4180). +Le(i3330,i4180). +Le(i3400,i4180). +Le(i3430,i4180). +Le(i3500,i4180). +Le(i3520,i4180). +Le(i3580,i4180). +Le(i3610,i4180). +Le(i3650,i4180). +Le(i3680,i4180). +Le(i3720,i4180). +Le(i3740,i4180). +Le(i3790,i4180). +Le(i3820,i4180). +Le(i3860,i4180). +Le(i3960,i4180). +Le(i4040,i4180). +Le(i4140,i4180). +Le(i-30,i4400). +Le(i0,i4400). +Le(i13,i4400). +Le(i26,i4400). +Le(i39,i4400). +Le(i52,i4400). +Le(i60,i4400). +Le(i65,i4400). +Le(i70,i4400). +Le(i78,i4400). +Le(i90,i4400). +Le(i91,i4400). +Le(i104,i4400). +Le(i117,i4400). +Le(i130,i4400). +Le(i143,i4400). +Le(i156,i4400). +Le(i169,i4400). +Le(i182,i4400). +Le(i195,i4400). +Le(i208,i4400). +Le(i221,i4400). +Le(i234,i4400). +Le(i247,i4400). +Le(i260,i4400). +Le(i460,i4400). +Le(i530,i4400). +Le(i600,i4400). +Le(i660,i4400). +Le(i670,i4400). +Le(i710,i4400). +Le(i740,i4400). +Le(i810,i4400). +Le(i850,i4400). +Le(i880,i4400). +Le(i890,i4400). +Le(i920,i4400). +Le(i960,i4400). +Le(i990,i4400). +Le(i1030,i4400). +Le(i1060,i4400). +Le(i1100,i4400). +Le(i1130,i4400). +Le(i1170,i4400). +Le(i1200,i4400). +Le(i1240,i4400). +Le(i1260,i4400). +Le(i1270,i4400). +Le(i1290,i4400). +Le(i1310,i4400). +Le(i1320,i4400). +Le(i1330,i4400). +Le(i1350,i4400). +Le(i1360,i4400). +Le(i1380,i4400). +Le(i1390,i4400). +Le(i1420,i4400). +Le(i1430,i4400). +Le(i1450,i4400). +Le(i1460,i4400). +Le(i1490,i4400). +Le(i1520,i4400). +Le(i1530,i4400). +Le(i1540,i4400). +Le(i1560,i4400). +Le(i1590,i4400). +Le(i1630,i4400). +Le(i1660,i4400). +Le(i1700,i4400). +Le(i1730,i4400). +Le(i1760,i4400). +Le(i1770,i4400). +Le(i1810,i4400). +Le(i1840,i4400). +Le(i1880,i4400). +Le(i1910,i4400). +Le(i1950,i4400). +Le(i1980,i4400). +Le(i2020,i4400). +Le(i2050,i4400). +Le(i2090,i4400). +Le(i2120,i4400). +Le(i2160,i4400). +Le(i2190,i4400). +Le(i2200,i4400). +Le(i2230,i4400). +Le(i2270,i4400). +Le(i2300,i4400). +Le(i2340,i4400). +Le(i2370,i4400). +Le(i2410,i4400). +Le(i2420,i4400). +Le(i2440,i4400). +Le(i2480,i4400). +Le(i2510,i4400). +Le(i2550,i4400). +Le(i2580,i4400). +Le(i2620,i4400). +Le(i2640,i4400). +Le(i2660,i4400). +Le(i2730,i4400). +Le(i2760,i4400). +Le(i2800,i4400). +Le(i2830,i4400). +Le(i2860,i4400). +Le(i2870,i4400). +Le(i2940,i4400). +Le(i2970,i4400). +Le(i3010,i4400). +Le(i3040,i4400). +Le(i3080,i4400). +Le(i3120,i4400). +Le(i3150,i4400). +Le(i3220,i4400). +Le(i3260,i4400). +Le(i3290,i4400). +Le(i3300,i4400). +Le(i3330,i4400). +Le(i3400,i4400). +Le(i3430,i4400). +Le(i3500,i4400). +Le(i3520,i4400). +Le(i3580,i4400). +Le(i3610,i4400). +Le(i3650,i4400). +Le(i3680,i4400). +Le(i3720,i4400). +Le(i3740,i4400). +Le(i3790,i4400). +Le(i3820,i4400). +Le(i3860,i4400). +Le(i3960,i4400). +Le(i4040,i4400). +Le(i4140,i4400). +Le(i4180,i4400). +Le(i-30,i4620). +Le(i0,i4620). +Le(i13,i4620). +Le(i26,i4620). +Le(i39,i4620). +Le(i52,i4620). +Le(i60,i4620). +Le(i65,i4620). +Le(i70,i4620). +Le(i78,i4620). +Le(i90,i4620). +Le(i91,i4620). +Le(i104,i4620). +Le(i117,i4620). +Le(i130,i4620). +Le(i143,i4620). +Le(i156,i4620). +Le(i169,i4620). +Le(i182,i4620). +Le(i195,i4620). +Le(i208,i4620). +Le(i221,i4620). +Le(i234,i4620). +Le(i247,i4620). +Le(i260,i4620). +Le(i460,i4620). +Le(i530,i4620). +Le(i600,i4620). +Le(i660,i4620). +Le(i670,i4620). +Le(i710,i4620). +Le(i740,i4620). +Le(i810,i4620). +Le(i850,i4620). +Le(i880,i4620). +Le(i890,i4620). +Le(i920,i4620). +Le(i960,i4620). +Le(i990,i4620). +Le(i1030,i4620). +Le(i1060,i4620). +Le(i1100,i4620). +Le(i1130,i4620). +Le(i1170,i4620). +Le(i1200,i4620). +Le(i1240,i4620). +Le(i1260,i4620). +Le(i1270,i4620). +Le(i1290,i4620). +Le(i1310,i4620). +Le(i1320,i4620). +Le(i1330,i4620). +Le(i1350,i4620). +Le(i1360,i4620). +Le(i1380,i4620). +Le(i1390,i4620). +Le(i1420,i4620). +Le(i1430,i4620). +Le(i1450,i4620). +Le(i1460,i4620). +Le(i1490,i4620). +Le(i1520,i4620). +Le(i1530,i4620). +Le(i1540,i4620). +Le(i1560,i4620). +Le(i1590,i4620). +Le(i1630,i4620). +Le(i1660,i4620). +Le(i1700,i4620). +Le(i1730,i4620). +Le(i1760,i4620). +Le(i1770,i4620). +Le(i1810,i4620). +Le(i1840,i4620). +Le(i1880,i4620). +Le(i1910,i4620). +Le(i1950,i4620). +Le(i1980,i4620). +Le(i2020,i4620). +Le(i2050,i4620). +Le(i2090,i4620). +Le(i2120,i4620). +Le(i2160,i4620). +Le(i2190,i4620). +Le(i2200,i4620). +Le(i2230,i4620). +Le(i2270,i4620). +Le(i2300,i4620). +Le(i2340,i4620). +Le(i2370,i4620). +Le(i2410,i4620). +Le(i2420,i4620). +Le(i2440,i4620). +Le(i2480,i4620). +Le(i2510,i4620). +Le(i2550,i4620). +Le(i2580,i4620). +Le(i2620,i4620). +Le(i2640,i4620). +Le(i2660,i4620). +Le(i2730,i4620). +Le(i2760,i4620). +Le(i2800,i4620). +Le(i2830,i4620). +Le(i2860,i4620). +Le(i2870,i4620). +Le(i2940,i4620). +Le(i2970,i4620). +Le(i3010,i4620). +Le(i3040,i4620). +Le(i3080,i4620). +Le(i3120,i4620). +Le(i3150,i4620). +Le(i3220,i4620). +Le(i3260,i4620). +Le(i3290,i4620). +Le(i3300,i4620). +Le(i3330,i4620). +Le(i3400,i4620). +Le(i3430,i4620). +Le(i3500,i4620). +Le(i3520,i4620). +Le(i3580,i4620). +Le(i3610,i4620). +Le(i3650,i4620). +Le(i3680,i4620). +Le(i3720,i4620). +Le(i3740,i4620). +Le(i3790,i4620). +Le(i3820,i4620). +Le(i3860,i4620). +Le(i3960,i4620). +Le(i4040,i4620). +Le(i4140,i4620). +Le(i4180,i4620). +Le(i4400,i4620). +Le(i-30,i4840). +Le(i0,i4840). +Le(i13,i4840). +Le(i26,i4840). +Le(i39,i4840). +Le(i52,i4840). +Le(i60,i4840). +Le(i65,i4840). +Le(i70,i4840). +Le(i78,i4840). +Le(i90,i4840). +Le(i91,i4840). +Le(i104,i4840). +Le(i117,i4840). +Le(i130,i4840). +Le(i143,i4840). +Le(i156,i4840). +Le(i169,i4840). +Le(i182,i4840). +Le(i195,i4840). +Le(i208,i4840). +Le(i221,i4840). +Le(i234,i4840). +Le(i247,i4840). +Le(i260,i4840). +Le(i460,i4840). +Le(i530,i4840). +Le(i600,i4840). +Le(i660,i4840). +Le(i670,i4840). +Le(i710,i4840). +Le(i740,i4840). +Le(i810,i4840). +Le(i850,i4840). +Le(i880,i4840). +Le(i890,i4840). +Le(i920,i4840). +Le(i960,i4840). +Le(i990,i4840). +Le(i1030,i4840). +Le(i1060,i4840). +Le(i1100,i4840). +Le(i1130,i4840). +Le(i1170,i4840). +Le(i1200,i4840). +Le(i1240,i4840). +Le(i1260,i4840). +Le(i1270,i4840). +Le(i1290,i4840). +Le(i1310,i4840). +Le(i1320,i4840). +Le(i1330,i4840). +Le(i1350,i4840). +Le(i1360,i4840). +Le(i1380,i4840). +Le(i1390,i4840). +Le(i1420,i4840). +Le(i1430,i4840). +Le(i1450,i4840). +Le(i1460,i4840). +Le(i1490,i4840). +Le(i1520,i4840). +Le(i1530,i4840). +Le(i1540,i4840). +Le(i1560,i4840). +Le(i1590,i4840). +Le(i1630,i4840). +Le(i1660,i4840). +Le(i1700,i4840). +Le(i1730,i4840). +Le(i1760,i4840). +Le(i1770,i4840). +Le(i1810,i4840). +Le(i1840,i4840). +Le(i1880,i4840). +Le(i1910,i4840). +Le(i1950,i4840). +Le(i1980,i4840). +Le(i2020,i4840). +Le(i2050,i4840). +Le(i2090,i4840). +Le(i2120,i4840). +Le(i2160,i4840). +Le(i2190,i4840). +Le(i2200,i4840). +Le(i2230,i4840). +Le(i2270,i4840). +Le(i2300,i4840). +Le(i2340,i4840). +Le(i2370,i4840). +Le(i2410,i4840). +Le(i2420,i4840). +Le(i2440,i4840). +Le(i2480,i4840). +Le(i2510,i4840). +Le(i2550,i4840). +Le(i2580,i4840). +Le(i2620,i4840). +Le(i2640,i4840). +Le(i2660,i4840). +Le(i2730,i4840). +Le(i2760,i4840). +Le(i2800,i4840). +Le(i2830,i4840). +Le(i2860,i4840). +Le(i2870,i4840). +Le(i2940,i4840). +Le(i2970,i4840). +Le(i3010,i4840). +Le(i3040,i4840). +Le(i3080,i4840). +Le(i3120,i4840). +Le(i3150,i4840). +Le(i3220,i4840). +Le(i3260,i4840). +Le(i3290,i4840). +Le(i3300,i4840). +Le(i3330,i4840). +Le(i3400,i4840). +Le(i3430,i4840). +Le(i3500,i4840). +Le(i3520,i4840). +Le(i3580,i4840). +Le(i3610,i4840). +Le(i3650,i4840). +Le(i3680,i4840). +Le(i3720,i4840). +Le(i3740,i4840). +Le(i3790,i4840). +Le(i3820,i4840). +Le(i3860,i4840). +Le(i3960,i4840). +Le(i4040,i4840). +Le(i4140,i4840). +Le(i4180,i4840). +Le(i4400,i4840). +Le(i4620,i4840). +Le(i-30,i5060). +Le(i0,i5060). +Le(i13,i5060). +Le(i26,i5060). +Le(i39,i5060). +Le(i52,i5060). +Le(i60,i5060). +Le(i65,i5060). +Le(i70,i5060). +Le(i78,i5060). +Le(i90,i5060). +Le(i91,i5060). +Le(i104,i5060). +Le(i117,i5060). +Le(i130,i5060). +Le(i143,i5060). +Le(i156,i5060). +Le(i169,i5060). +Le(i182,i5060). +Le(i195,i5060). +Le(i208,i5060). +Le(i221,i5060). +Le(i234,i5060). +Le(i247,i5060). +Le(i260,i5060). +Le(i460,i5060). +Le(i530,i5060). +Le(i600,i5060). +Le(i660,i5060). +Le(i670,i5060). +Le(i710,i5060). +Le(i740,i5060). +Le(i810,i5060). +Le(i850,i5060). +Le(i880,i5060). +Le(i890,i5060). +Le(i920,i5060). +Le(i960,i5060). +Le(i990,i5060). +Le(i1030,i5060). +Le(i1060,i5060). +Le(i1100,i5060). +Le(i1130,i5060). +Le(i1170,i5060). +Le(i1200,i5060). +Le(i1240,i5060). +Le(i1260,i5060). +Le(i1270,i5060). +Le(i1290,i5060). +Le(i1310,i5060). +Le(i1320,i5060). +Le(i1330,i5060). +Le(i1350,i5060). +Le(i1360,i5060). +Le(i1380,i5060). +Le(i1390,i5060). +Le(i1420,i5060). +Le(i1430,i5060). +Le(i1450,i5060). +Le(i1460,i5060). +Le(i1490,i5060). +Le(i1520,i5060). +Le(i1530,i5060). +Le(i1540,i5060). +Le(i1560,i5060). +Le(i1590,i5060). +Le(i1630,i5060). +Le(i1660,i5060). +Le(i1700,i5060). +Le(i1730,i5060). +Le(i1760,i5060). +Le(i1770,i5060). +Le(i1810,i5060). +Le(i1840,i5060). +Le(i1880,i5060). +Le(i1910,i5060). +Le(i1950,i5060). +Le(i1980,i5060). +Le(i2020,i5060). +Le(i2050,i5060). +Le(i2090,i5060). +Le(i2120,i5060). +Le(i2160,i5060). +Le(i2190,i5060). +Le(i2200,i5060). +Le(i2230,i5060). +Le(i2270,i5060). +Le(i2300,i5060). +Le(i2340,i5060). +Le(i2370,i5060). +Le(i2410,i5060). +Le(i2420,i5060). +Le(i2440,i5060). +Le(i2480,i5060). +Le(i2510,i5060). +Le(i2550,i5060). +Le(i2580,i5060). +Le(i2620,i5060). +Le(i2640,i5060). +Le(i2660,i5060). +Le(i2730,i5060). +Le(i2760,i5060). +Le(i2800,i5060). +Le(i2830,i5060). +Le(i2860,i5060). +Le(i2870,i5060). +Le(i2940,i5060). +Le(i2970,i5060). +Le(i3010,i5060). +Le(i3040,i5060). +Le(i3080,i5060). +Le(i3120,i5060). +Le(i3150,i5060). +Le(i3220,i5060). +Le(i3260,i5060). +Le(i3290,i5060). +Le(i3300,i5060). +Le(i3330,i5060). +Le(i3400,i5060). +Le(i3430,i5060). +Le(i3500,i5060). +Le(i3520,i5060). +Le(i3580,i5060). +Le(i3610,i5060). +Le(i3650,i5060). +Le(i3680,i5060). +Le(i3720,i5060). +Le(i3740,i5060). +Le(i3790,i5060). +Le(i3820,i5060). +Le(i3860,i5060). +Le(i3960,i5060). +Le(i4040,i5060). +Le(i4140,i5060). +Le(i4180,i5060). +Le(i4400,i5060). +Le(i4620,i5060). +Le(i4840,i5060). +Le(i-30,i5280). +Le(i0,i5280). +Le(i13,i5280). +Le(i26,i5280). +Le(i39,i5280). +Le(i52,i5280). +Le(i60,i5280). +Le(i65,i5280). +Le(i70,i5280). +Le(i78,i5280). +Le(i90,i5280). +Le(i91,i5280). +Le(i104,i5280). +Le(i117,i5280). +Le(i130,i5280). +Le(i143,i5280). +Le(i156,i5280). +Le(i169,i5280). +Le(i182,i5280). +Le(i195,i5280). +Le(i208,i5280). +Le(i221,i5280). +Le(i234,i5280). +Le(i247,i5280). +Le(i260,i5280). +Le(i460,i5280). +Le(i530,i5280). +Le(i600,i5280). +Le(i660,i5280). +Le(i670,i5280). +Le(i710,i5280). +Le(i740,i5280). +Le(i810,i5280). +Le(i850,i5280). +Le(i880,i5280). +Le(i890,i5280). +Le(i920,i5280). +Le(i960,i5280). +Le(i990,i5280). +Le(i1030,i5280). +Le(i1060,i5280). +Le(i1100,i5280). +Le(i1130,i5280). +Le(i1170,i5280). +Le(i1200,i5280). +Le(i1240,i5280). +Le(i1260,i5280). +Le(i1270,i5280). +Le(i1290,i5280). +Le(i1310,i5280). +Le(i1320,i5280). +Le(i1330,i5280). +Le(i1350,i5280). +Le(i1360,i5280). +Le(i1380,i5280). +Le(i1390,i5280). +Le(i1420,i5280). +Le(i1430,i5280). +Le(i1450,i5280). +Le(i1460,i5280). +Le(i1490,i5280). +Le(i1520,i5280). +Le(i1530,i5280). +Le(i1540,i5280). +Le(i1560,i5280). +Le(i1590,i5280). +Le(i1630,i5280). +Le(i1660,i5280). +Le(i1700,i5280). +Le(i1730,i5280). +Le(i1760,i5280). +Le(i1770,i5280). +Le(i1810,i5280). +Le(i1840,i5280). +Le(i1880,i5280). +Le(i1910,i5280). +Le(i1950,i5280). +Le(i1980,i5280). +Le(i2020,i5280). +Le(i2050,i5280). +Le(i2090,i5280). +Le(i2120,i5280). +Le(i2160,i5280). +Le(i2190,i5280). +Le(i2200,i5280). +Le(i2230,i5280). +Le(i2270,i5280). +Le(i2300,i5280). +Le(i2340,i5280). +Le(i2370,i5280). +Le(i2410,i5280). +Le(i2420,i5280). +Le(i2440,i5280). +Le(i2480,i5280). +Le(i2510,i5280). +Le(i2550,i5280). +Le(i2580,i5280). +Le(i2620,i5280). +Le(i2640,i5280). +Le(i2660,i5280). +Le(i2730,i5280). +Le(i2760,i5280). +Le(i2800,i5280). +Le(i2830,i5280). +Le(i2860,i5280). +Le(i2870,i5280). +Le(i2940,i5280). +Le(i2970,i5280). +Le(i3010,i5280). +Le(i3040,i5280). +Le(i3080,i5280). +Le(i3120,i5280). +Le(i3150,i5280). +Le(i3220,i5280). +Le(i3260,i5280). +Le(i3290,i5280). +Le(i3300,i5280). +Le(i3330,i5280). +Le(i3400,i5280). +Le(i3430,i5280). +Le(i3500,i5280). +Le(i3520,i5280). +Le(i3580,i5280). +Le(i3610,i5280). +Le(i3650,i5280). +Le(i3680,i5280). +Le(i3720,i5280). +Le(i3740,i5280). +Le(i3790,i5280). +Le(i3820,i5280). +Le(i3860,i5280). +Le(i3960,i5280). +Le(i4040,i5280). +Le(i4140,i5280). +Le(i4180,i5280). +Le(i4400,i5280). +Le(i4620,i5280). +Le(i4840,i5280). +Le(i5060,i5280). +Le(i-30,i5500). +Le(i0,i5500). +Le(i13,i5500). +Le(i26,i5500). +Le(i39,i5500). +Le(i52,i5500). +Le(i60,i5500). +Le(i65,i5500). +Le(i70,i5500). +Le(i78,i5500). +Le(i90,i5500). +Le(i91,i5500). +Le(i104,i5500). +Le(i117,i5500). +Le(i130,i5500). +Le(i143,i5500). +Le(i156,i5500). +Le(i169,i5500). +Le(i182,i5500). +Le(i195,i5500). +Le(i208,i5500). +Le(i221,i5500). +Le(i234,i5500). +Le(i247,i5500). +Le(i260,i5500). +Le(i460,i5500). +Le(i530,i5500). +Le(i600,i5500). +Le(i660,i5500). +Le(i670,i5500). +Le(i710,i5500). +Le(i740,i5500). +Le(i810,i5500). +Le(i850,i5500). +Le(i880,i5500). +Le(i890,i5500). +Le(i920,i5500). +Le(i960,i5500). +Le(i990,i5500). +Le(i1030,i5500). +Le(i1060,i5500). +Le(i1100,i5500). +Le(i1130,i5500). +Le(i1170,i5500). +Le(i1200,i5500). +Le(i1240,i5500). +Le(i1260,i5500). +Le(i1270,i5500). +Le(i1290,i5500). +Le(i1310,i5500). +Le(i1320,i5500). +Le(i1330,i5500). +Le(i1350,i5500). +Le(i1360,i5500). +Le(i1380,i5500). +Le(i1390,i5500). +Le(i1420,i5500). +Le(i1430,i5500). +Le(i1450,i5500). +Le(i1460,i5500). +Le(i1490,i5500). +Le(i1520,i5500). +Le(i1530,i5500). +Le(i1540,i5500). +Le(i1560,i5500). +Le(i1590,i5500). +Le(i1630,i5500). +Le(i1660,i5500). +Le(i1700,i5500). +Le(i1730,i5500). +Le(i1760,i5500). +Le(i1770,i5500). +Le(i1810,i5500). +Le(i1840,i5500). +Le(i1880,i5500). +Le(i1910,i5500). +Le(i1950,i5500). +Le(i1980,i5500). +Le(i2020,i5500). +Le(i2050,i5500). +Le(i2090,i5500). +Le(i2120,i5500). +Le(i2160,i5500). +Le(i2190,i5500). +Le(i2200,i5500). +Le(i2230,i5500). +Le(i2270,i5500). +Le(i2300,i5500). +Le(i2340,i5500). +Le(i2370,i5500). +Le(i2410,i5500). +Le(i2420,i5500). +Le(i2440,i5500). +Le(i2480,i5500). +Le(i2510,i5500). +Le(i2550,i5500). +Le(i2580,i5500). +Le(i2620,i5500). +Le(i2640,i5500). +Le(i2660,i5500). +Le(i2730,i5500). +Le(i2760,i5500). +Le(i2800,i5500). +Le(i2830,i5500). +Le(i2860,i5500). +Le(i2870,i5500). +Le(i2940,i5500). +Le(i2970,i5500). +Le(i3010,i5500). +Le(i3040,i5500). +Le(i3080,i5500). +Le(i3120,i5500). +Le(i3150,i5500). +Le(i3220,i5500). +Le(i3260,i5500). +Le(i3290,i5500). +Le(i3300,i5500). +Le(i3330,i5500). +Le(i3400,i5500). +Le(i3430,i5500). +Le(i3500,i5500). +Le(i3520,i5500). +Le(i3580,i5500). +Le(i3610,i5500). +Le(i3650,i5500). +Le(i3680,i5500). +Le(i3720,i5500). +Le(i3740,i5500). +Le(i3790,i5500). +Le(i3820,i5500). +Le(i3860,i5500). +Le(i3960,i5500). +Le(i4040,i5500). +Le(i4140,i5500). +Le(i4180,i5500). +Le(i4400,i5500). +Le(i4620,i5500). +Le(i4840,i5500). +Le(i5060,i5500). +Le(i5280,i5500). +Le(i-30,i5720). +Le(i0,i5720). +Le(i13,i5720). +Le(i26,i5720). +Le(i39,i5720). +Le(i52,i5720). +Le(i60,i5720). +Le(i65,i5720). +Le(i70,i5720). +Le(i78,i5720). +Le(i90,i5720). +Le(i91,i5720). +Le(i104,i5720). +Le(i117,i5720). +Le(i130,i5720). +Le(i143,i5720). +Le(i156,i5720). +Le(i169,i5720). +Le(i182,i5720). +Le(i195,i5720). +Le(i208,i5720). +Le(i221,i5720). +Le(i234,i5720). +Le(i247,i5720). +Le(i260,i5720). +Le(i460,i5720). +Le(i530,i5720). +Le(i600,i5720). +Le(i660,i5720). +Le(i670,i5720). +Le(i710,i5720). +Le(i740,i5720). +Le(i810,i5720). +Le(i850,i5720). +Le(i880,i5720). +Le(i890,i5720). +Le(i920,i5720). +Le(i960,i5720). +Le(i990,i5720). +Le(i1030,i5720). +Le(i1060,i5720). +Le(i1100,i5720). +Le(i1130,i5720). +Le(i1170,i5720). +Le(i1200,i5720). +Le(i1240,i5720). +Le(i1260,i5720). +Le(i1270,i5720). +Le(i1290,i5720). +Le(i1310,i5720). +Le(i1320,i5720). +Le(i1330,i5720). +Le(i1350,i5720). +Le(i1360,i5720). +Le(i1380,i5720). +Le(i1390,i5720). +Le(i1420,i5720). +Le(i1430,i5720). +Le(i1450,i5720). +Le(i1460,i5720). +Le(i1490,i5720). +Le(i1520,i5720). +Le(i1530,i5720). +Le(i1540,i5720). +Le(i1560,i5720). +Le(i1590,i5720). +Le(i1630,i5720). +Le(i1660,i5720). +Le(i1700,i5720). +Le(i1730,i5720). +Le(i1760,i5720). +Le(i1770,i5720). +Le(i1810,i5720). +Le(i1840,i5720). +Le(i1880,i5720). +Le(i1910,i5720). +Le(i1950,i5720). +Le(i1980,i5720). +Le(i2020,i5720). +Le(i2050,i5720). +Le(i2090,i5720). +Le(i2120,i5720). +Le(i2160,i5720). +Le(i2190,i5720). +Le(i2200,i5720). +Le(i2230,i5720). +Le(i2270,i5720). +Le(i2300,i5720). +Le(i2340,i5720). +Le(i2370,i5720). +Le(i2410,i5720). +Le(i2420,i5720). +Le(i2440,i5720). +Le(i2480,i5720). +Le(i2510,i5720). +Le(i2550,i5720). +Le(i2580,i5720). +Le(i2620,i5720). +Le(i2640,i5720). +Le(i2660,i5720). +Le(i2730,i5720). +Le(i2760,i5720). +Le(i2800,i5720). +Le(i2830,i5720). +Le(i2860,i5720). +Le(i2870,i5720). +Le(i2940,i5720). +Le(i2970,i5720). +Le(i3010,i5720). +Le(i3040,i5720). +Le(i3080,i5720). +Le(i3120,i5720). +Le(i3150,i5720). +Le(i3220,i5720). +Le(i3260,i5720). +Le(i3290,i5720). +Le(i3300,i5720). +Le(i3330,i5720). +Le(i3400,i5720). +Le(i3430,i5720). +Le(i3500,i5720). +Le(i3520,i5720). +Le(i3580,i5720). +Le(i3610,i5720). +Le(i3650,i5720). +Le(i3680,i5720). +Le(i3720,i5720). +Le(i3740,i5720). +Le(i3790,i5720). +Le(i3820,i5720). +Le(i3860,i5720). +Le(i3960,i5720). +Le(i4040,i5720). +Le(i4140,i5720). +Le(i4180,i5720). +Le(i4400,i5720). +Le(i4620,i5720). +Le(i4840,i5720). +Le(i5060,i5720). +Le(i5280,i5720). +Le(i5500,i5720). +Le(i-30,i5940). +Le(i0,i5940). +Le(i13,i5940). +Le(i26,i5940). +Le(i39,i5940). +Le(i52,i5940). +Le(i60,i5940). +Le(i65,i5940). +Le(i70,i5940). +Le(i78,i5940). +Le(i90,i5940). +Le(i91,i5940). +Le(i104,i5940). +Le(i117,i5940). +Le(i130,i5940). +Le(i143,i5940). +Le(i156,i5940). +Le(i169,i5940). +Le(i182,i5940). +Le(i195,i5940). +Le(i208,i5940). +Le(i221,i5940). +Le(i234,i5940). +Le(i247,i5940). +Le(i260,i5940). +Le(i460,i5940). +Le(i530,i5940). +Le(i600,i5940). +Le(i660,i5940). +Le(i670,i5940). +Le(i710,i5940). +Le(i740,i5940). +Le(i810,i5940). +Le(i850,i5940). +Le(i880,i5940). +Le(i890,i5940). +Le(i920,i5940). +Le(i960,i5940). +Le(i990,i5940). +Le(i1030,i5940). +Le(i1060,i5940). +Le(i1100,i5940). +Le(i1130,i5940). +Le(i1170,i5940). +Le(i1200,i5940). +Le(i1240,i5940). +Le(i1260,i5940). +Le(i1270,i5940). +Le(i1290,i5940). +Le(i1310,i5940). +Le(i1320,i5940). +Le(i1330,i5940). +Le(i1350,i5940). +Le(i1360,i5940). +Le(i1380,i5940). +Le(i1390,i5940). +Le(i1420,i5940). +Le(i1430,i5940). +Le(i1450,i5940). +Le(i1460,i5940). +Le(i1490,i5940). +Le(i1520,i5940). +Le(i1530,i5940). +Le(i1540,i5940). +Le(i1560,i5940). +Le(i1590,i5940). +Le(i1630,i5940). +Le(i1660,i5940). +Le(i1700,i5940). +Le(i1730,i5940). +Le(i1760,i5940). +Le(i1770,i5940). +Le(i1810,i5940). +Le(i1840,i5940). +Le(i1880,i5940). +Le(i1910,i5940). +Le(i1950,i5940). +Le(i1980,i5940). +Le(i2020,i5940). +Le(i2050,i5940). +Le(i2090,i5940). +Le(i2120,i5940). +Le(i2160,i5940). +Le(i2190,i5940). +Le(i2200,i5940). +Le(i2230,i5940). +Le(i2270,i5940). +Le(i2300,i5940). +Le(i2340,i5940). +Le(i2370,i5940). +Le(i2410,i5940). +Le(i2420,i5940). +Le(i2440,i5940). +Le(i2480,i5940). +Le(i2510,i5940). +Le(i2550,i5940). +Le(i2580,i5940). +Le(i2620,i5940). +Le(i2640,i5940). +Le(i2660,i5940). +Le(i2730,i5940). +Le(i2760,i5940). +Le(i2800,i5940). +Le(i2830,i5940). +Le(i2860,i5940). +Le(i2870,i5940). +Le(i2940,i5940). +Le(i2970,i5940). +Le(i3010,i5940). +Le(i3040,i5940). +Le(i3080,i5940). +Le(i3120,i5940). +Le(i3150,i5940). +Le(i3220,i5940). +Le(i3260,i5940). +Le(i3290,i5940). +Le(i3300,i5940). +Le(i3330,i5940). +Le(i3400,i5940). +Le(i3430,i5940). +Le(i3500,i5940). +Le(i3520,i5940). +Le(i3580,i5940). +Le(i3610,i5940). +Le(i3650,i5940). +Le(i3680,i5940). +Le(i3720,i5940). +Le(i3740,i5940). +Le(i3790,i5940). +Le(i3820,i5940). +Le(i3860,i5940). +Le(i3960,i5940). +Le(i4040,i5940). +Le(i4140,i5940). +Le(i4180,i5940). +Le(i4400,i5940). +Le(i4620,i5940). +Le(i4840,i5940). +Le(i5060,i5940). +Le(i5280,i5940). +Le(i5500,i5940). +Le(i5720,i5940). +Le(i-30,i6160). +Le(i0,i6160). +Le(i13,i6160). +Le(i26,i6160). +Le(i39,i6160). +Le(i52,i6160). +Le(i60,i6160). +Le(i65,i6160). +Le(i70,i6160). +Le(i78,i6160). +Le(i90,i6160). +Le(i91,i6160). +Le(i104,i6160). +Le(i117,i6160). +Le(i130,i6160). +Le(i143,i6160). +Le(i156,i6160). +Le(i169,i6160). +Le(i182,i6160). +Le(i195,i6160). +Le(i208,i6160). +Le(i221,i6160). +Le(i234,i6160). +Le(i247,i6160). +Le(i260,i6160). +Le(i460,i6160). +Le(i530,i6160). +Le(i600,i6160). +Le(i660,i6160). +Le(i670,i6160). +Le(i710,i6160). +Le(i740,i6160). +Le(i810,i6160). +Le(i850,i6160). +Le(i880,i6160). +Le(i890,i6160). +Le(i920,i6160). +Le(i960,i6160). +Le(i990,i6160). +Le(i1030,i6160). +Le(i1060,i6160). +Le(i1100,i6160). +Le(i1130,i6160). +Le(i1170,i6160). +Le(i1200,i6160). +Le(i1240,i6160). +Le(i1260,i6160). +Le(i1270,i6160). +Le(i1290,i6160). +Le(i1310,i6160). +Le(i1320,i6160). +Le(i1330,i6160). +Le(i1350,i6160). +Le(i1360,i6160). +Le(i1380,i6160). +Le(i1390,i6160). +Le(i1420,i6160). +Le(i1430,i6160). +Le(i1450,i6160). +Le(i1460,i6160). +Le(i1490,i6160). +Le(i1520,i6160). +Le(i1530,i6160). +Le(i1540,i6160). +Le(i1560,i6160). +Le(i1590,i6160). +Le(i1630,i6160). +Le(i1660,i6160). +Le(i1700,i6160). +Le(i1730,i6160). +Le(i1760,i6160). +Le(i1770,i6160). +Le(i1810,i6160). +Le(i1840,i6160). +Le(i1880,i6160). +Le(i1910,i6160). +Le(i1950,i6160). +Le(i1980,i6160). +Le(i2020,i6160). +Le(i2050,i6160). +Le(i2090,i6160). +Le(i2120,i6160). +Le(i2160,i6160). +Le(i2190,i6160). +Le(i2200,i6160). +Le(i2230,i6160). +Le(i2270,i6160). +Le(i2300,i6160). +Le(i2340,i6160). +Le(i2370,i6160). +Le(i2410,i6160). +Le(i2420,i6160). +Le(i2440,i6160). +Le(i2480,i6160). +Le(i2510,i6160). +Le(i2550,i6160). +Le(i2580,i6160). +Le(i2620,i6160). +Le(i2640,i6160). +Le(i2660,i6160). +Le(i2730,i6160). +Le(i2760,i6160). +Le(i2800,i6160). +Le(i2830,i6160). +Le(i2860,i6160). +Le(i2870,i6160). +Le(i2940,i6160). +Le(i2970,i6160). +Le(i3010,i6160). +Le(i3040,i6160). +Le(i3080,i6160). +Le(i3120,i6160). +Le(i3150,i6160). +Le(i3220,i6160). +Le(i3260,i6160). +Le(i3290,i6160). +Le(i3300,i6160). +Le(i3330,i6160). +Le(i3400,i6160). +Le(i3430,i6160). +Le(i3500,i6160). +Le(i3520,i6160). +Le(i3580,i6160). +Le(i3610,i6160). +Le(i3650,i6160). +Le(i3680,i6160). +Le(i3720,i6160). +Le(i3740,i6160). +Le(i3790,i6160). +Le(i3820,i6160). +Le(i3860,i6160). +Le(i3960,i6160). +Le(i4040,i6160). +Le(i4140,i6160). +Le(i4180,i6160). +Le(i4400,i6160). +Le(i4620,i6160). +Le(i4840,i6160). +Le(i5060,i6160). +Le(i5280,i6160). +Le(i5500,i6160). +Le(i5720,i6160). +Le(i5940,i6160). +Le(i-30,i6380). +Le(i0,i6380). +Le(i13,i6380). +Le(i26,i6380). +Le(i39,i6380). +Le(i52,i6380). +Le(i60,i6380). +Le(i65,i6380). +Le(i70,i6380). +Le(i78,i6380). +Le(i90,i6380). +Le(i91,i6380). +Le(i104,i6380). +Le(i117,i6380). +Le(i130,i6380). +Le(i143,i6380). +Le(i156,i6380). +Le(i169,i6380). +Le(i182,i6380). +Le(i195,i6380). +Le(i208,i6380). +Le(i221,i6380). +Le(i234,i6380). +Le(i247,i6380). +Le(i260,i6380). +Le(i460,i6380). +Le(i530,i6380). +Le(i600,i6380). +Le(i660,i6380). +Le(i670,i6380). +Le(i710,i6380). +Le(i740,i6380). +Le(i810,i6380). +Le(i850,i6380). +Le(i880,i6380). +Le(i890,i6380). +Le(i920,i6380). +Le(i960,i6380). +Le(i990,i6380). +Le(i1030,i6380). +Le(i1060,i6380). +Le(i1100,i6380). +Le(i1130,i6380). +Le(i1170,i6380). +Le(i1200,i6380). +Le(i1240,i6380). +Le(i1260,i6380). +Le(i1270,i6380). +Le(i1290,i6380). +Le(i1310,i6380). +Le(i1320,i6380). +Le(i1330,i6380). +Le(i1350,i6380). +Le(i1360,i6380). +Le(i1380,i6380). +Le(i1390,i6380). +Le(i1420,i6380). +Le(i1430,i6380). +Le(i1450,i6380). +Le(i1460,i6380). +Le(i1490,i6380). +Le(i1520,i6380). +Le(i1530,i6380). +Le(i1540,i6380). +Le(i1560,i6380). +Le(i1590,i6380). +Le(i1630,i6380). +Le(i1660,i6380). +Le(i1700,i6380). +Le(i1730,i6380). +Le(i1760,i6380). +Le(i1770,i6380). +Le(i1810,i6380). +Le(i1840,i6380). +Le(i1880,i6380). +Le(i1910,i6380). +Le(i1950,i6380). +Le(i1980,i6380). +Le(i2020,i6380). +Le(i2050,i6380). +Le(i2090,i6380). +Le(i2120,i6380). +Le(i2160,i6380). +Le(i2190,i6380). +Le(i2200,i6380). +Le(i2230,i6380). +Le(i2270,i6380). +Le(i2300,i6380). +Le(i2340,i6380). +Le(i2370,i6380). +Le(i2410,i6380). +Le(i2420,i6380). +Le(i2440,i6380). +Le(i2480,i6380). +Le(i2510,i6380). +Le(i2550,i6380). +Le(i2580,i6380). +Le(i2620,i6380). +Le(i2640,i6380). +Le(i2660,i6380). +Le(i2730,i6380). +Le(i2760,i6380). +Le(i2800,i6380). +Le(i2830,i6380). +Le(i2860,i6380). +Le(i2870,i6380). +Le(i2940,i6380). +Le(i2970,i6380). +Le(i3010,i6380). +Le(i3040,i6380). +Le(i3080,i6380). +Le(i3120,i6380). +Le(i3150,i6380). +Le(i3220,i6380). +Le(i3260,i6380). +Le(i3290,i6380). +Le(i3300,i6380). +Le(i3330,i6380). +Le(i3400,i6380). +Le(i3430,i6380). +Le(i3500,i6380). +Le(i3520,i6380). +Le(i3580,i6380). +Le(i3610,i6380). +Le(i3650,i6380). +Le(i3680,i6380). +Le(i3720,i6380). +Le(i3740,i6380). +Le(i3790,i6380). +Le(i3820,i6380). +Le(i3860,i6380). +Le(i3960,i6380). +Le(i4040,i6380). +Le(i4140,i6380). +Le(i4180,i6380). +Le(i4400,i6380). +Le(i4620,i6380). +Le(i4840,i6380). +Le(i5060,i6380). +Le(i5280,i6380). +Le(i5500,i6380). +Le(i5720,i6380). +Le(i5940,i6380). +Le(i6160,i6380). +Le(i-30,i6600). +Le(i0,i6600). +Le(i13,i6600). +Le(i26,i6600). +Le(i39,i6600). +Le(i52,i6600). +Le(i60,i6600). +Le(i65,i6600). +Le(i70,i6600). +Le(i78,i6600). +Le(i90,i6600). +Le(i91,i6600). +Le(i104,i6600). +Le(i117,i6600). +Le(i130,i6600). +Le(i143,i6600). +Le(i156,i6600). +Le(i169,i6600). +Le(i182,i6600). +Le(i195,i6600). +Le(i208,i6600). +Le(i221,i6600). +Le(i234,i6600). +Le(i247,i6600). +Le(i260,i6600). +Le(i460,i6600). +Le(i530,i6600). +Le(i600,i6600). +Le(i660,i6600). +Le(i670,i6600). +Le(i710,i6600). +Le(i740,i6600). +Le(i810,i6600). +Le(i850,i6600). +Le(i880,i6600). +Le(i890,i6600). +Le(i920,i6600). +Le(i960,i6600). +Le(i990,i6600). +Le(i1030,i6600). +Le(i1060,i6600). +Le(i1100,i6600). +Le(i1130,i6600). +Le(i1170,i6600). +Le(i1200,i6600). +Le(i1240,i6600). +Le(i1260,i6600). +Le(i1270,i6600). +Le(i1290,i6600). +Le(i1310,i6600). +Le(i1320,i6600). +Le(i1330,i6600). +Le(i1350,i6600). +Le(i1360,i6600). +Le(i1380,i6600). +Le(i1390,i6600). +Le(i1420,i6600). +Le(i1430,i6600). +Le(i1450,i6600). +Le(i1460,i6600). +Le(i1490,i6600). +Le(i1520,i6600). +Le(i1530,i6600). +Le(i1540,i6600). +Le(i1560,i6600). +Le(i1590,i6600). +Le(i1630,i6600). +Le(i1660,i6600). +Le(i1700,i6600). +Le(i1730,i6600). +Le(i1760,i6600). +Le(i1770,i6600). +Le(i1810,i6600). +Le(i1840,i6600). +Le(i1880,i6600). +Le(i1910,i6600). +Le(i1950,i6600). +Le(i1980,i6600). +Le(i2020,i6600). +Le(i2050,i6600). +Le(i2090,i6600). +Le(i2120,i6600). +Le(i2160,i6600). +Le(i2190,i6600). +Le(i2200,i6600). +Le(i2230,i6600). +Le(i2270,i6600). +Le(i2300,i6600). +Le(i2340,i6600). +Le(i2370,i6600). +Le(i2410,i6600). +Le(i2420,i6600). +Le(i2440,i6600). +Le(i2480,i6600). +Le(i2510,i6600). +Le(i2550,i6600). +Le(i2580,i6600). +Le(i2620,i6600). +Le(i2640,i6600). +Le(i2660,i6600). +Le(i2730,i6600). +Le(i2760,i6600). +Le(i2800,i6600). +Le(i2830,i6600). +Le(i2860,i6600). +Le(i2870,i6600). +Le(i2940,i6600). +Le(i2970,i6600). +Le(i3010,i6600). +Le(i3040,i6600). +Le(i3080,i6600). +Le(i3120,i6600). +Le(i3150,i6600). +Le(i3220,i6600). +Le(i3260,i6600). +Le(i3290,i6600). +Le(i3300,i6600). +Le(i3330,i6600). +Le(i3400,i6600). +Le(i3430,i6600). +Le(i3500,i6600). +Le(i3520,i6600). +Le(i3580,i6600). +Le(i3610,i6600). +Le(i3650,i6600). +Le(i3680,i6600). +Le(i3720,i6600). +Le(i3740,i6600). +Le(i3790,i6600). +Le(i3820,i6600). +Le(i3860,i6600). +Le(i3960,i6600). +Le(i4040,i6600). +Le(i4140,i6600). +Le(i4180,i6600). +Le(i4400,i6600). +Le(i4620,i6600). +Le(i4840,i6600). +Le(i5060,i6600). +Le(i5280,i6600). +Le(i5500,i6600). +Le(i5720,i6600). +Le(i5940,i6600). +Le(i6160,i6600). +Le(i6380,i6600). +Le(i-30,i6820). +Le(i0,i6820). +Le(i13,i6820). +Le(i26,i6820). +Le(i39,i6820). +Le(i52,i6820). +Le(i60,i6820). +Le(i65,i6820). +Le(i70,i6820). +Le(i78,i6820). +Le(i90,i6820). +Le(i91,i6820). +Le(i104,i6820). +Le(i117,i6820). +Le(i130,i6820). +Le(i143,i6820). +Le(i156,i6820). +Le(i169,i6820). +Le(i182,i6820). +Le(i195,i6820). +Le(i208,i6820). +Le(i221,i6820). +Le(i234,i6820). +Le(i247,i6820). +Le(i260,i6820). +Le(i460,i6820). +Le(i530,i6820). +Le(i600,i6820). +Le(i660,i6820). +Le(i670,i6820). +Le(i710,i6820). +Le(i740,i6820). +Le(i810,i6820). +Le(i850,i6820). +Le(i880,i6820). +Le(i890,i6820). +Le(i920,i6820). +Le(i960,i6820). +Le(i990,i6820). +Le(i1030,i6820). +Le(i1060,i6820). +Le(i1100,i6820). +Le(i1130,i6820). +Le(i1170,i6820). +Le(i1200,i6820). +Le(i1240,i6820). +Le(i1260,i6820). +Le(i1270,i6820). +Le(i1290,i6820). +Le(i1310,i6820). +Le(i1320,i6820). +Le(i1330,i6820). +Le(i1350,i6820). +Le(i1360,i6820). +Le(i1380,i6820). +Le(i1390,i6820). +Le(i1420,i6820). +Le(i1430,i6820). +Le(i1450,i6820). +Le(i1460,i6820). +Le(i1490,i6820). +Le(i1520,i6820). +Le(i1530,i6820). +Le(i1540,i6820). +Le(i1560,i6820). +Le(i1590,i6820). +Le(i1630,i6820). +Le(i1660,i6820). +Le(i1700,i6820). +Le(i1730,i6820). +Le(i1760,i6820). +Le(i1770,i6820). +Le(i1810,i6820). +Le(i1840,i6820). +Le(i1880,i6820). +Le(i1910,i6820). +Le(i1950,i6820). +Le(i1980,i6820). +Le(i2020,i6820). +Le(i2050,i6820). +Le(i2090,i6820). +Le(i2120,i6820). +Le(i2160,i6820). +Le(i2190,i6820). +Le(i2200,i6820). +Le(i2230,i6820). +Le(i2270,i6820). +Le(i2300,i6820). +Le(i2340,i6820). +Le(i2370,i6820). +Le(i2410,i6820). +Le(i2420,i6820). +Le(i2440,i6820). +Le(i2480,i6820). +Le(i2510,i6820). +Le(i2550,i6820). +Le(i2580,i6820). +Le(i2620,i6820). +Le(i2640,i6820). +Le(i2660,i6820). +Le(i2730,i6820). +Le(i2760,i6820). +Le(i2800,i6820). +Le(i2830,i6820). +Le(i2860,i6820). +Le(i2870,i6820). +Le(i2940,i6820). +Le(i2970,i6820). +Le(i3010,i6820). +Le(i3040,i6820). +Le(i3080,i6820). +Le(i3120,i6820). +Le(i3150,i6820). +Le(i3220,i6820). +Le(i3260,i6820). +Le(i3290,i6820). +Le(i3300,i6820). +Le(i3330,i6820). +Le(i3400,i6820). +Le(i3430,i6820). +Le(i3500,i6820). +Le(i3520,i6820). +Le(i3580,i6820). +Le(i3610,i6820). +Le(i3650,i6820). +Le(i3680,i6820). +Le(i3720,i6820). +Le(i3740,i6820). +Le(i3790,i6820). +Le(i3820,i6820). +Le(i3860,i6820). +Le(i3960,i6820). +Le(i4040,i6820). +Le(i4140,i6820). +Le(i4180,i6820). +Le(i4400,i6820). +Le(i4620,i6820). +Le(i4840,i6820). +Le(i5060,i6820). +Le(i5280,i6820). +Le(i5500,i6820). +Le(i5720,i6820). +Le(i5940,i6820). +Le(i6160,i6820). +Le(i6380,i6820). +Le(i6600,i6820). +Le(i-30,i7040). +Le(i0,i7040). +Le(i13,i7040). +Le(i26,i7040). +Le(i39,i7040). +Le(i52,i7040). +Le(i60,i7040). +Le(i65,i7040). +Le(i70,i7040). +Le(i78,i7040). +Le(i90,i7040). +Le(i91,i7040). +Le(i104,i7040). +Le(i117,i7040). +Le(i130,i7040). +Le(i143,i7040). +Le(i156,i7040). +Le(i169,i7040). +Le(i182,i7040). +Le(i195,i7040). +Le(i208,i7040). +Le(i221,i7040). +Le(i234,i7040). +Le(i247,i7040). +Le(i260,i7040). +Le(i460,i7040). +Le(i530,i7040). +Le(i600,i7040). +Le(i660,i7040). +Le(i670,i7040). +Le(i710,i7040). +Le(i740,i7040). +Le(i810,i7040). +Le(i850,i7040). +Le(i880,i7040). +Le(i890,i7040). +Le(i920,i7040). +Le(i960,i7040). +Le(i990,i7040). +Le(i1030,i7040). +Le(i1060,i7040). +Le(i1100,i7040). +Le(i1130,i7040). +Le(i1170,i7040). +Le(i1200,i7040). +Le(i1240,i7040). +Le(i1260,i7040). +Le(i1270,i7040). +Le(i1290,i7040). +Le(i1310,i7040). +Le(i1320,i7040). +Le(i1330,i7040). +Le(i1350,i7040). +Le(i1360,i7040). +Le(i1380,i7040). +Le(i1390,i7040). +Le(i1420,i7040). +Le(i1430,i7040). +Le(i1450,i7040). +Le(i1460,i7040). +Le(i1490,i7040). +Le(i1520,i7040). +Le(i1530,i7040). +Le(i1540,i7040). +Le(i1560,i7040). +Le(i1590,i7040). +Le(i1630,i7040). +Le(i1660,i7040). +Le(i1700,i7040). +Le(i1730,i7040). +Le(i1760,i7040). +Le(i1770,i7040). +Le(i1810,i7040). +Le(i1840,i7040). +Le(i1880,i7040). +Le(i1910,i7040). +Le(i1950,i7040). +Le(i1980,i7040). +Le(i2020,i7040). +Le(i2050,i7040). +Le(i2090,i7040). +Le(i2120,i7040). +Le(i2160,i7040). +Le(i2190,i7040). +Le(i2200,i7040). +Le(i2230,i7040). +Le(i2270,i7040). +Le(i2300,i7040). +Le(i2340,i7040). +Le(i2370,i7040). +Le(i2410,i7040). +Le(i2420,i7040). +Le(i2440,i7040). +Le(i2480,i7040). +Le(i2510,i7040). +Le(i2550,i7040). +Le(i2580,i7040). +Le(i2620,i7040). +Le(i2640,i7040). +Le(i2660,i7040). +Le(i2730,i7040). +Le(i2760,i7040). +Le(i2800,i7040). +Le(i2830,i7040). +Le(i2860,i7040). +Le(i2870,i7040). +Le(i2940,i7040). +Le(i2970,i7040). +Le(i3010,i7040). +Le(i3040,i7040). +Le(i3080,i7040). +Le(i3120,i7040). +Le(i3150,i7040). +Le(i3220,i7040). +Le(i3260,i7040). +Le(i3290,i7040). +Le(i3300,i7040). +Le(i3330,i7040). +Le(i3400,i7040). +Le(i3430,i7040). +Le(i3500,i7040). +Le(i3520,i7040). +Le(i3580,i7040). +Le(i3610,i7040). +Le(i3650,i7040). +Le(i3680,i7040). +Le(i3720,i7040). +Le(i3740,i7040). +Le(i3790,i7040). +Le(i3820,i7040). +Le(i3860,i7040). +Le(i3960,i7040). +Le(i4040,i7040). +Le(i4140,i7040). +Le(i4180,i7040). +Le(i4400,i7040). +Le(i4620,i7040). +Le(i4840,i7040). +Le(i5060,i7040). +Le(i5280,i7040). +Le(i5500,i7040). +Le(i5720,i7040). +Le(i5940,i7040). +Le(i6160,i7040). +Le(i6380,i7040). +Le(i6600,i7040). +Le(i6820,i7040). +Le(i-30,i7260). +Le(i0,i7260). +Le(i13,i7260). +Le(i26,i7260). +Le(i39,i7260). +Le(i52,i7260). +Le(i60,i7260). +Le(i65,i7260). +Le(i70,i7260). +Le(i78,i7260). +Le(i90,i7260). +Le(i91,i7260). +Le(i104,i7260). +Le(i117,i7260). +Le(i130,i7260). +Le(i143,i7260). +Le(i156,i7260). +Le(i169,i7260). +Le(i182,i7260). +Le(i195,i7260). +Le(i208,i7260). +Le(i221,i7260). +Le(i234,i7260). +Le(i247,i7260). +Le(i260,i7260). +Le(i460,i7260). +Le(i530,i7260). +Le(i600,i7260). +Le(i660,i7260). +Le(i670,i7260). +Le(i710,i7260). +Le(i740,i7260). +Le(i810,i7260). +Le(i850,i7260). +Le(i880,i7260). +Le(i890,i7260). +Le(i920,i7260). +Le(i960,i7260). +Le(i990,i7260). +Le(i1030,i7260). +Le(i1060,i7260). +Le(i1100,i7260). +Le(i1130,i7260). +Le(i1170,i7260). +Le(i1200,i7260). +Le(i1240,i7260). +Le(i1260,i7260). +Le(i1270,i7260). +Le(i1290,i7260). +Le(i1310,i7260). +Le(i1320,i7260). +Le(i1330,i7260). +Le(i1350,i7260). +Le(i1360,i7260). +Le(i1380,i7260). +Le(i1390,i7260). +Le(i1420,i7260). +Le(i1430,i7260). +Le(i1450,i7260). +Le(i1460,i7260). +Le(i1490,i7260). +Le(i1520,i7260). +Le(i1530,i7260). +Le(i1540,i7260). +Le(i1560,i7260). +Le(i1590,i7260). +Le(i1630,i7260). +Le(i1660,i7260). +Le(i1700,i7260). +Le(i1730,i7260). +Le(i1760,i7260). +Le(i1770,i7260). +Le(i1810,i7260). +Le(i1840,i7260). +Le(i1880,i7260). +Le(i1910,i7260). +Le(i1950,i7260). +Le(i1980,i7260). +Le(i2020,i7260). +Le(i2050,i7260). +Le(i2090,i7260). +Le(i2120,i7260). +Le(i2160,i7260). +Le(i2190,i7260). +Le(i2200,i7260). +Le(i2230,i7260). +Le(i2270,i7260). +Le(i2300,i7260). +Le(i2340,i7260). +Le(i2370,i7260). +Le(i2410,i7260). +Le(i2420,i7260). +Le(i2440,i7260). +Le(i2480,i7260). +Le(i2510,i7260). +Le(i2550,i7260). +Le(i2580,i7260). +Le(i2620,i7260). +Le(i2640,i7260). +Le(i2660,i7260). +Le(i2730,i7260). +Le(i2760,i7260). +Le(i2800,i7260). +Le(i2830,i7260). +Le(i2860,i7260). +Le(i2870,i7260). +Le(i2940,i7260). +Le(i2970,i7260). +Le(i3010,i7260). +Le(i3040,i7260). +Le(i3080,i7260). +Le(i3120,i7260). +Le(i3150,i7260). +Le(i3220,i7260). +Le(i3260,i7260). +Le(i3290,i7260). +Le(i3300,i7260). +Le(i3330,i7260). +Le(i3400,i7260). +Le(i3430,i7260). +Le(i3500,i7260). +Le(i3520,i7260). +Le(i3580,i7260). +Le(i3610,i7260). +Le(i3650,i7260). +Le(i3680,i7260). +Le(i3720,i7260). +Le(i3740,i7260). +Le(i3790,i7260). +Le(i3820,i7260). +Le(i3860,i7260). +Le(i3960,i7260). +Le(i4040,i7260). +Le(i4140,i7260). +Le(i4180,i7260). +Le(i4400,i7260). +Le(i4620,i7260). +Le(i4840,i7260). +Le(i5060,i7260). +Le(i5280,i7260). +Le(i5500,i7260). +Le(i5720,i7260). +Le(i5940,i7260). +Le(i6160,i7260). +Le(i6380,i7260). +Le(i6600,i7260). +Le(i6820,i7260). +Le(i7040,i7260). +Le(i-30,i7480). +Le(i0,i7480). +Le(i13,i7480). +Le(i26,i7480). +Le(i39,i7480). +Le(i52,i7480). +Le(i60,i7480). +Le(i65,i7480). +Le(i70,i7480). +Le(i78,i7480). +Le(i90,i7480). +Le(i91,i7480). +Le(i104,i7480). +Le(i117,i7480). +Le(i130,i7480). +Le(i143,i7480). +Le(i156,i7480). +Le(i169,i7480). +Le(i182,i7480). +Le(i195,i7480). +Le(i208,i7480). +Le(i221,i7480). +Le(i234,i7480). +Le(i247,i7480). +Le(i260,i7480). +Le(i460,i7480). +Le(i530,i7480). +Le(i600,i7480). +Le(i660,i7480). +Le(i670,i7480). +Le(i710,i7480). +Le(i740,i7480). +Le(i810,i7480). +Le(i850,i7480). +Le(i880,i7480). +Le(i890,i7480). +Le(i920,i7480). +Le(i960,i7480). +Le(i990,i7480). +Le(i1030,i7480). +Le(i1060,i7480). +Le(i1100,i7480). +Le(i1130,i7480). +Le(i1170,i7480). +Le(i1200,i7480). +Le(i1240,i7480). +Le(i1260,i7480). +Le(i1270,i7480). +Le(i1290,i7480). +Le(i1310,i7480). +Le(i1320,i7480). +Le(i1330,i7480). +Le(i1350,i7480). +Le(i1360,i7480). +Le(i1380,i7480). +Le(i1390,i7480). +Le(i1420,i7480). +Le(i1430,i7480). +Le(i1450,i7480). +Le(i1460,i7480). +Le(i1490,i7480). +Le(i1520,i7480). +Le(i1530,i7480). +Le(i1540,i7480). +Le(i1560,i7480). +Le(i1590,i7480). +Le(i1630,i7480). +Le(i1660,i7480). +Le(i1700,i7480). +Le(i1730,i7480). +Le(i1760,i7480). +Le(i1770,i7480). +Le(i1810,i7480). +Le(i1840,i7480). +Le(i1880,i7480). +Le(i1910,i7480). +Le(i1950,i7480). +Le(i1980,i7480). +Le(i2020,i7480). +Le(i2050,i7480). +Le(i2090,i7480). +Le(i2120,i7480). +Le(i2160,i7480). +Le(i2190,i7480). +Le(i2200,i7480). +Le(i2230,i7480). +Le(i2270,i7480). +Le(i2300,i7480). +Le(i2340,i7480). +Le(i2370,i7480). +Le(i2410,i7480). +Le(i2420,i7480). +Le(i2440,i7480). +Le(i2480,i7480). +Le(i2510,i7480). +Le(i2550,i7480). +Le(i2580,i7480). +Le(i2620,i7480). +Le(i2640,i7480). +Le(i2660,i7480). +Le(i2730,i7480). +Le(i2760,i7480). +Le(i2800,i7480). +Le(i2830,i7480). +Le(i2860,i7480). +Le(i2870,i7480). +Le(i2940,i7480). +Le(i2970,i7480). +Le(i3010,i7480). +Le(i3040,i7480). +Le(i3080,i7480). +Le(i3120,i7480). +Le(i3150,i7480). +Le(i3220,i7480). +Le(i3260,i7480). +Le(i3290,i7480). +Le(i3300,i7480). +Le(i3330,i7480). +Le(i3400,i7480). +Le(i3430,i7480). +Le(i3500,i7480). +Le(i3520,i7480). +Le(i3580,i7480). +Le(i3610,i7480). +Le(i3650,i7480). +Le(i3680,i7480). +Le(i3720,i7480). +Le(i3740,i7480). +Le(i3790,i7480). +Le(i3820,i7480). +Le(i3860,i7480). +Le(i3960,i7480). +Le(i4040,i7480). +Le(i4140,i7480). +Le(i4180,i7480). +Le(i4400,i7480). +Le(i4620,i7480). +Le(i4840,i7480). +Le(i5060,i7480). +Le(i5280,i7480). +Le(i5500,i7480). +Le(i5720,i7480). +Le(i5940,i7480). +Le(i6160,i7480). +Le(i6380,i7480). +Le(i6600,i7480). +Le(i6820,i7480). +Le(i7040,i7480). +Le(i7260,i7480). +Le(i-30,i7700). +Le(i0,i7700). +Le(i13,i7700). +Le(i26,i7700). +Le(i39,i7700). +Le(i52,i7700). +Le(i60,i7700). +Le(i65,i7700). +Le(i70,i7700). +Le(i78,i7700). +Le(i90,i7700). +Le(i91,i7700). +Le(i104,i7700). +Le(i117,i7700). +Le(i130,i7700). +Le(i143,i7700). +Le(i156,i7700). +Le(i169,i7700). +Le(i182,i7700). +Le(i195,i7700). +Le(i208,i7700). +Le(i221,i7700). +Le(i234,i7700). +Le(i247,i7700). +Le(i260,i7700). +Le(i460,i7700). +Le(i530,i7700). +Le(i600,i7700). +Le(i660,i7700). +Le(i670,i7700). +Le(i710,i7700). +Le(i740,i7700). +Le(i810,i7700). +Le(i850,i7700). +Le(i880,i7700). +Le(i890,i7700). +Le(i920,i7700). +Le(i960,i7700). +Le(i990,i7700). +Le(i1030,i7700). +Le(i1060,i7700). +Le(i1100,i7700). +Le(i1130,i7700). +Le(i1170,i7700). +Le(i1200,i7700). +Le(i1240,i7700). +Le(i1260,i7700). +Le(i1270,i7700). +Le(i1290,i7700). +Le(i1310,i7700). +Le(i1320,i7700). +Le(i1330,i7700). +Le(i1350,i7700). +Le(i1360,i7700). +Le(i1380,i7700). +Le(i1390,i7700). +Le(i1420,i7700). +Le(i1430,i7700). +Le(i1450,i7700). +Le(i1460,i7700). +Le(i1490,i7700). +Le(i1520,i7700). +Le(i1530,i7700). +Le(i1540,i7700). +Le(i1560,i7700). +Le(i1590,i7700). +Le(i1630,i7700). +Le(i1660,i7700). +Le(i1700,i7700). +Le(i1730,i7700). +Le(i1760,i7700). +Le(i1770,i7700). +Le(i1810,i7700). +Le(i1840,i7700). +Le(i1880,i7700). +Le(i1910,i7700). +Le(i1950,i7700). +Le(i1980,i7700). +Le(i2020,i7700). +Le(i2050,i7700). +Le(i2090,i7700). +Le(i2120,i7700). +Le(i2160,i7700). +Le(i2190,i7700). +Le(i2200,i7700). +Le(i2230,i7700). +Le(i2270,i7700). +Le(i2300,i7700). +Le(i2340,i7700). +Le(i2370,i7700). +Le(i2410,i7700). +Le(i2420,i7700). +Le(i2440,i7700). +Le(i2480,i7700). +Le(i2510,i7700). +Le(i2550,i7700). +Le(i2580,i7700). +Le(i2620,i7700). +Le(i2640,i7700). +Le(i2660,i7700). +Le(i2730,i7700). +Le(i2760,i7700). +Le(i2800,i7700). +Le(i2830,i7700). +Le(i2860,i7700). +Le(i2870,i7700). +Le(i2940,i7700). +Le(i2970,i7700). +Le(i3010,i7700). +Le(i3040,i7700). +Le(i3080,i7700). +Le(i3120,i7700). +Le(i3150,i7700). +Le(i3220,i7700). +Le(i3260,i7700). +Le(i3290,i7700). +Le(i3300,i7700). +Le(i3330,i7700). +Le(i3400,i7700). +Le(i3430,i7700). +Le(i3500,i7700). +Le(i3520,i7700). +Le(i3580,i7700). +Le(i3610,i7700). +Le(i3650,i7700). +Le(i3680,i7700). +Le(i3720,i7700). +Le(i3740,i7700). +Le(i3790,i7700). +Le(i3820,i7700). +Le(i3860,i7700). +Le(i3960,i7700). +Le(i4040,i7700). +Le(i4140,i7700). +Le(i4180,i7700). +Le(i4400,i7700). +Le(i4620,i7700). +Le(i4840,i7700). +Le(i5060,i7700). +Le(i5280,i7700). +Le(i5500,i7700). +Le(i5720,i7700). +Le(i5940,i7700). +Le(i6160,i7700). +Le(i6380,i7700). +Le(i6600,i7700). +Le(i6820,i7700). +Le(i7040,i7700). +Le(i7260,i7700). +Le(i7480,i7700). +Le(i-30,i7920). +Le(i0,i7920). +Le(i13,i7920). +Le(i26,i7920). +Le(i39,i7920). +Le(i52,i7920). +Le(i60,i7920). +Le(i65,i7920). +Le(i70,i7920). +Le(i78,i7920). +Le(i90,i7920). +Le(i91,i7920). +Le(i104,i7920). +Le(i117,i7920). +Le(i130,i7920). +Le(i143,i7920). +Le(i156,i7920). +Le(i169,i7920). +Le(i182,i7920). +Le(i195,i7920). +Le(i208,i7920). +Le(i221,i7920). +Le(i234,i7920). +Le(i247,i7920). +Le(i260,i7920). +Le(i460,i7920). +Le(i530,i7920). +Le(i600,i7920). +Le(i660,i7920). +Le(i670,i7920). +Le(i710,i7920). +Le(i740,i7920). +Le(i810,i7920). +Le(i850,i7920). +Le(i880,i7920). +Le(i890,i7920). +Le(i920,i7920). +Le(i960,i7920). +Le(i990,i7920). +Le(i1030,i7920). +Le(i1060,i7920). +Le(i1100,i7920). +Le(i1130,i7920). +Le(i1170,i7920). +Le(i1200,i7920). +Le(i1240,i7920). +Le(i1260,i7920). +Le(i1270,i7920). +Le(i1290,i7920). +Le(i1310,i7920). +Le(i1320,i7920). +Le(i1330,i7920). +Le(i1350,i7920). +Le(i1360,i7920). +Le(i1380,i7920). +Le(i1390,i7920). +Le(i1420,i7920). +Le(i1430,i7920). +Le(i1450,i7920). +Le(i1460,i7920). +Le(i1490,i7920). +Le(i1520,i7920). +Le(i1530,i7920). +Le(i1540,i7920). +Le(i1560,i7920). +Le(i1590,i7920). +Le(i1630,i7920). +Le(i1660,i7920). +Le(i1700,i7920). +Le(i1730,i7920). +Le(i1760,i7920). +Le(i1770,i7920). +Le(i1810,i7920). +Le(i1840,i7920). +Le(i1880,i7920). +Le(i1910,i7920). +Le(i1950,i7920). +Le(i1980,i7920). +Le(i2020,i7920). +Le(i2050,i7920). +Le(i2090,i7920). +Le(i2120,i7920). +Le(i2160,i7920). +Le(i2190,i7920). +Le(i2200,i7920). +Le(i2230,i7920). +Le(i2270,i7920). +Le(i2300,i7920). +Le(i2340,i7920). +Le(i2370,i7920). +Le(i2410,i7920). +Le(i2420,i7920). +Le(i2440,i7920). +Le(i2480,i7920). +Le(i2510,i7920). +Le(i2550,i7920). +Le(i2580,i7920). +Le(i2620,i7920). +Le(i2640,i7920). +Le(i2660,i7920). +Le(i2730,i7920). +Le(i2760,i7920). +Le(i2800,i7920). +Le(i2830,i7920). +Le(i2860,i7920). +Le(i2870,i7920). +Le(i2940,i7920). +Le(i2970,i7920). +Le(i3010,i7920). +Le(i3040,i7920). +Le(i3080,i7920). +Le(i3120,i7920). +Le(i3150,i7920). +Le(i3220,i7920). +Le(i3260,i7920). +Le(i3290,i7920). +Le(i3300,i7920). +Le(i3330,i7920). +Le(i3400,i7920). +Le(i3430,i7920). +Le(i3500,i7920). +Le(i3520,i7920). +Le(i3580,i7920). +Le(i3610,i7920). +Le(i3650,i7920). +Le(i3680,i7920). +Le(i3720,i7920). +Le(i3740,i7920). +Le(i3790,i7920). +Le(i3820,i7920). +Le(i3860,i7920). +Le(i3960,i7920). +Le(i4040,i7920). +Le(i4140,i7920). +Le(i4180,i7920). +Le(i4400,i7920). +Le(i4620,i7920). +Le(i4840,i7920). +Le(i5060,i7920). +Le(i5280,i7920). +Le(i5500,i7920). +Le(i5720,i7920). +Le(i5940,i7920). +Le(i6160,i7920). +Le(i6380,i7920). +Le(i6600,i7920). +Le(i6820,i7920). +Le(i7040,i7920). +Le(i7260,i7920). +Le(i7480,i7920). +Le(i7700,i7920). +Le(i-30,i8140). +Le(i0,i8140). +Le(i13,i8140). +Le(i26,i8140). +Le(i39,i8140). +Le(i52,i8140). +Le(i60,i8140). +Le(i65,i8140). +Le(i70,i8140). +Le(i78,i8140). +Le(i90,i8140). +Le(i91,i8140). +Le(i104,i8140). +Le(i117,i8140). +Le(i130,i8140). +Le(i143,i8140). +Le(i156,i8140). +Le(i169,i8140). +Le(i182,i8140). +Le(i195,i8140). +Le(i208,i8140). +Le(i221,i8140). +Le(i234,i8140). +Le(i247,i8140). +Le(i260,i8140). +Le(i460,i8140). +Le(i530,i8140). +Le(i600,i8140). +Le(i660,i8140). +Le(i670,i8140). +Le(i710,i8140). +Le(i740,i8140). +Le(i810,i8140). +Le(i850,i8140). +Le(i880,i8140). +Le(i890,i8140). +Le(i920,i8140). +Le(i960,i8140). +Le(i990,i8140). +Le(i1030,i8140). +Le(i1060,i8140). +Le(i1100,i8140). +Le(i1130,i8140). +Le(i1170,i8140). +Le(i1200,i8140). +Le(i1240,i8140). +Le(i1260,i8140). +Le(i1270,i8140). +Le(i1290,i8140). +Le(i1310,i8140). +Le(i1320,i8140). +Le(i1330,i8140). +Le(i1350,i8140). +Le(i1360,i8140). +Le(i1380,i8140). +Le(i1390,i8140). +Le(i1420,i8140). +Le(i1430,i8140). +Le(i1450,i8140). +Le(i1460,i8140). +Le(i1490,i8140). +Le(i1520,i8140). +Le(i1530,i8140). +Le(i1540,i8140). +Le(i1560,i8140). +Le(i1590,i8140). +Le(i1630,i8140). +Le(i1660,i8140). +Le(i1700,i8140). +Le(i1730,i8140). +Le(i1760,i8140). +Le(i1770,i8140). +Le(i1810,i8140). +Le(i1840,i8140). +Le(i1880,i8140). +Le(i1910,i8140). +Le(i1950,i8140). +Le(i1980,i8140). +Le(i2020,i8140). +Le(i2050,i8140). +Le(i2090,i8140). +Le(i2120,i8140). +Le(i2160,i8140). +Le(i2190,i8140). +Le(i2200,i8140). +Le(i2230,i8140). +Le(i2270,i8140). +Le(i2300,i8140). +Le(i2340,i8140). +Le(i2370,i8140). +Le(i2410,i8140). +Le(i2420,i8140). +Le(i2440,i8140). +Le(i2480,i8140). +Le(i2510,i8140). +Le(i2550,i8140). +Le(i2580,i8140). +Le(i2620,i8140). +Le(i2640,i8140). +Le(i2660,i8140). +Le(i2730,i8140). +Le(i2760,i8140). +Le(i2800,i8140). +Le(i2830,i8140). +Le(i2860,i8140). +Le(i2870,i8140). +Le(i2940,i8140). +Le(i2970,i8140). +Le(i3010,i8140). +Le(i3040,i8140). +Le(i3080,i8140). +Le(i3120,i8140). +Le(i3150,i8140). +Le(i3220,i8140). +Le(i3260,i8140). +Le(i3290,i8140). +Le(i3300,i8140). +Le(i3330,i8140). +Le(i3400,i8140). +Le(i3430,i8140). +Le(i3500,i8140). +Le(i3520,i8140). +Le(i3580,i8140). +Le(i3610,i8140). +Le(i3650,i8140). +Le(i3680,i8140). +Le(i3720,i8140). +Le(i3740,i8140). +Le(i3790,i8140). +Le(i3820,i8140). +Le(i3860,i8140). +Le(i3960,i8140). +Le(i4040,i8140). +Le(i4140,i8140). +Le(i4180,i8140). +Le(i4400,i8140). +Le(i4620,i8140). +Le(i4840,i8140). +Le(i5060,i8140). +Le(i5280,i8140). +Le(i5500,i8140). +Le(i5720,i8140). +Le(i5940,i8140). +Le(i6160,i8140). +Le(i6380,i8140). +Le(i6600,i8140). +Le(i6820,i8140). +Le(i7040,i8140). +Le(i7260,i8140). +Le(i7480,i8140). +Le(i7700,i8140). +Le(i7920,i8140). +Le(i-30,i8360). +Le(i0,i8360). +Le(i13,i8360). +Le(i26,i8360). +Le(i39,i8360). +Le(i52,i8360). +Le(i60,i8360). +Le(i65,i8360). +Le(i70,i8360). +Le(i78,i8360). +Le(i90,i8360). +Le(i91,i8360). +Le(i104,i8360). +Le(i117,i8360). +Le(i130,i8360). +Le(i143,i8360). +Le(i156,i8360). +Le(i169,i8360). +Le(i182,i8360). +Le(i195,i8360). +Le(i208,i8360). +Le(i221,i8360). +Le(i234,i8360). +Le(i247,i8360). +Le(i260,i8360). +Le(i460,i8360). +Le(i530,i8360). +Le(i600,i8360). +Le(i660,i8360). +Le(i670,i8360). +Le(i710,i8360). +Le(i740,i8360). +Le(i810,i8360). +Le(i850,i8360). +Le(i880,i8360). +Le(i890,i8360). +Le(i920,i8360). +Le(i960,i8360). +Le(i990,i8360). +Le(i1030,i8360). +Le(i1060,i8360). +Le(i1100,i8360). +Le(i1130,i8360). +Le(i1170,i8360). +Le(i1200,i8360). +Le(i1240,i8360). +Le(i1260,i8360). +Le(i1270,i8360). +Le(i1290,i8360). +Le(i1310,i8360). +Le(i1320,i8360). +Le(i1330,i8360). +Le(i1350,i8360). +Le(i1360,i8360). +Le(i1380,i8360). +Le(i1390,i8360). +Le(i1420,i8360). +Le(i1430,i8360). +Le(i1450,i8360). +Le(i1460,i8360). +Le(i1490,i8360). +Le(i1520,i8360). +Le(i1530,i8360). +Le(i1540,i8360). +Le(i1560,i8360). +Le(i1590,i8360). +Le(i1630,i8360). +Le(i1660,i8360). +Le(i1700,i8360). +Le(i1730,i8360). +Le(i1760,i8360). +Le(i1770,i8360). +Le(i1810,i8360). +Le(i1840,i8360). +Le(i1880,i8360). +Le(i1910,i8360). +Le(i1950,i8360). +Le(i1980,i8360). +Le(i2020,i8360). +Le(i2050,i8360). +Le(i2090,i8360). +Le(i2120,i8360). +Le(i2160,i8360). +Le(i2190,i8360). +Le(i2200,i8360). +Le(i2230,i8360). +Le(i2270,i8360). +Le(i2300,i8360). +Le(i2340,i8360). +Le(i2370,i8360). +Le(i2410,i8360). +Le(i2420,i8360). +Le(i2440,i8360). +Le(i2480,i8360). +Le(i2510,i8360). +Le(i2550,i8360). +Le(i2580,i8360). +Le(i2620,i8360). +Le(i2640,i8360). +Le(i2660,i8360). +Le(i2730,i8360). +Le(i2760,i8360). +Le(i2800,i8360). +Le(i2830,i8360). +Le(i2860,i8360). +Le(i2870,i8360). +Le(i2940,i8360). +Le(i2970,i8360). +Le(i3010,i8360). +Le(i3040,i8360). +Le(i3080,i8360). +Le(i3120,i8360). +Le(i3150,i8360). +Le(i3220,i8360). +Le(i3260,i8360). +Le(i3290,i8360). +Le(i3300,i8360). +Le(i3330,i8360). +Le(i3400,i8360). +Le(i3430,i8360). +Le(i3500,i8360). +Le(i3520,i8360). +Le(i3580,i8360). +Le(i3610,i8360). +Le(i3650,i8360). +Le(i3680,i8360). +Le(i3720,i8360). +Le(i3740,i8360). +Le(i3790,i8360). +Le(i3820,i8360). +Le(i3860,i8360). +Le(i3960,i8360). +Le(i4040,i8360). +Le(i4140,i8360). +Le(i4180,i8360). +Le(i4400,i8360). +Le(i4620,i8360). +Le(i4840,i8360). +Le(i5060,i8360). +Le(i5280,i8360). +Le(i5500,i8360). +Le(i5720,i8360). +Le(i5940,i8360). +Le(i6160,i8360). +Le(i6380,i8360). +Le(i6600,i8360). +Le(i6820,i8360). +Le(i7040,i8360). +Le(i7260,i8360). +Le(i7480,i8360). +Le(i7700,i8360). +Le(i7920,i8360). +Le(i8140,i8360). +Le(i-30,i8580). +Le(i0,i8580). +Le(i13,i8580). +Le(i26,i8580). +Le(i39,i8580). +Le(i52,i8580). +Le(i60,i8580). +Le(i65,i8580). +Le(i70,i8580). +Le(i78,i8580). +Le(i90,i8580). +Le(i91,i8580). +Le(i104,i8580). +Le(i117,i8580). +Le(i130,i8580). +Le(i143,i8580). +Le(i156,i8580). +Le(i169,i8580). +Le(i182,i8580). +Le(i195,i8580). +Le(i208,i8580). +Le(i221,i8580). +Le(i234,i8580). +Le(i247,i8580). +Le(i260,i8580). +Le(i460,i8580). +Le(i530,i8580). +Le(i600,i8580). +Le(i660,i8580). +Le(i670,i8580). +Le(i710,i8580). +Le(i740,i8580). +Le(i810,i8580). +Le(i850,i8580). +Le(i880,i8580). +Le(i890,i8580). +Le(i920,i8580). +Le(i960,i8580). +Le(i990,i8580). +Le(i1030,i8580). +Le(i1060,i8580). +Le(i1100,i8580). +Le(i1130,i8580). +Le(i1170,i8580). +Le(i1200,i8580). +Le(i1240,i8580). +Le(i1260,i8580). +Le(i1270,i8580). +Le(i1290,i8580). +Le(i1310,i8580). +Le(i1320,i8580). +Le(i1330,i8580). +Le(i1350,i8580). +Le(i1360,i8580). +Le(i1380,i8580). +Le(i1390,i8580). +Le(i1420,i8580). +Le(i1430,i8580). +Le(i1450,i8580). +Le(i1460,i8580). +Le(i1490,i8580). +Le(i1520,i8580). +Le(i1530,i8580). +Le(i1540,i8580). +Le(i1560,i8580). +Le(i1590,i8580). +Le(i1630,i8580). +Le(i1660,i8580). +Le(i1700,i8580). +Le(i1730,i8580). +Le(i1760,i8580). +Le(i1770,i8580). +Le(i1810,i8580). +Le(i1840,i8580). +Le(i1880,i8580). +Le(i1910,i8580). +Le(i1950,i8580). +Le(i1980,i8580). +Le(i2020,i8580). +Le(i2050,i8580). +Le(i2090,i8580). +Le(i2120,i8580). +Le(i2160,i8580). +Le(i2190,i8580). +Le(i2200,i8580). +Le(i2230,i8580). +Le(i2270,i8580). +Le(i2300,i8580). +Le(i2340,i8580). +Le(i2370,i8580). +Le(i2410,i8580). +Le(i2420,i8580). +Le(i2440,i8580). +Le(i2480,i8580). +Le(i2510,i8580). +Le(i2550,i8580). +Le(i2580,i8580). +Le(i2620,i8580). +Le(i2640,i8580). +Le(i2660,i8580). +Le(i2730,i8580). +Le(i2760,i8580). +Le(i2800,i8580). +Le(i2830,i8580). +Le(i2860,i8580). +Le(i2870,i8580). +Le(i2940,i8580). +Le(i2970,i8580). +Le(i3010,i8580). +Le(i3040,i8580). +Le(i3080,i8580). +Le(i3120,i8580). +Le(i3150,i8580). +Le(i3220,i8580). +Le(i3260,i8580). +Le(i3290,i8580). +Le(i3300,i8580). +Le(i3330,i8580). +Le(i3400,i8580). +Le(i3430,i8580). +Le(i3500,i8580). +Le(i3520,i8580). +Le(i3580,i8580). +Le(i3610,i8580). +Le(i3650,i8580). +Le(i3680,i8580). +Le(i3720,i8580). +Le(i3740,i8580). +Le(i3790,i8580). +Le(i3820,i8580). +Le(i3860,i8580). +Le(i3960,i8580). +Le(i4040,i8580). +Le(i4140,i8580). +Le(i4180,i8580). +Le(i4400,i8580). +Le(i4620,i8580). +Le(i4840,i8580). +Le(i5060,i8580). +Le(i5280,i8580). +Le(i5500,i8580). +Le(i5720,i8580). +Le(i5940,i8580). +Le(i6160,i8580). +Le(i6380,i8580). +Le(i6600,i8580). +Le(i6820,i8580). +Le(i7040,i8580). +Le(i7260,i8580). +Le(i7480,i8580). +Le(i7700,i8580). +Le(i7920,i8580). +Le(i8140,i8580). +Le(i8360,i8580). +Le(cr0_0,i-30). +Le(cr0_0,i0). +Le(cr0_0,i13). +Le(cr0_0,i26). +Le(cr0_0,i39). +Le(cr0_0,i52). +Le(cr0_0,i60). +Le(cr0_0,i65). +Le(cr0_0,i70). +Le(cr0_0,i78). +Le(cr0_0,i90). +Le(cr0_0,i91). +Le(cr0_0,i104). +Le(cr0_0,i117). +Le(cr0_0,i130). +Le(cr0_0,i143). +Le(cr0_0,i156). +Le(cr0_0,i169). +Le(cr0_0,i182). +Le(cr0_0,i195). +Le(cr0_0,i208). +Le(cr0_0,i221). +Le(cr0_0,i234). +Le(cr0_0,i247). +Le(cr0_0,i260). +Le(cr0_0,i460). +Le(cr0_0,i530). +Le(cr0_0,i600). +Le(cr0_0,i660). +Le(cr0_0,i670). +Le(cr0_0,i710). +Le(cr0_0,i740). +Le(cr0_0,i810). +Le(cr0_0,i850). +Le(cr0_0,i880). +Le(cr0_0,i890). +Le(cr0_0,i920). +Le(cr0_0,i960). +Le(cr0_0,i990). +Le(cr0_0,i1030). +Le(cr0_0,i1060). +Le(cr0_0,i1100). +Le(cr0_0,i1130). +Le(cr0_0,i1170). +Le(cr0_0,i1200). +Le(cr0_0,i1240). +Le(cr0_0,i1260). +Le(cr0_0,i1270). +Le(cr0_0,i1290). +Le(cr0_0,i1310). +Le(cr0_0,i1320). +Le(cr0_0,i1330). +Le(cr0_0,i1350). +Le(cr0_0,i1360). +Le(cr0_0,i1380). +Le(cr0_0,i1390). +Le(cr0_0,i1420). +Le(cr0_0,i1430). +Le(cr0_0,i1450). +Le(cr0_0,i1460). +Le(cr0_0,i1490). +Le(cr0_0,i1520). +Le(cr0_0,i1530). +Le(cr0_0,i1540). +Le(cr0_0,i1560). +Le(cr0_0,i1590). +Le(cr0_0,i1630). +Le(cr0_0,i1660). +Le(cr0_0,i1700). +Le(cr0_0,i1730). +Le(cr0_0,i1760). +Le(cr0_0,i1770). +Le(cr0_0,i1810). +Le(cr0_0,i1840). +Le(cr0_0,i1880). +Le(cr0_0,i1910). +Le(cr0_0,i1950). +Le(cr0_0,i1980). +Le(cr0_0,i2020). +Le(cr0_0,i2050). +Le(cr0_0,i2090). +Le(cr0_0,i2120). +Le(cr0_0,i2160). +Le(cr0_0,i2190). +Le(cr0_0,i2200). +Le(cr0_0,i2230). +Le(cr0_0,i2270). +Le(cr0_0,i2300). +Le(cr0_0,i2340). +Le(cr0_0,i2370). +Le(cr0_0,i2410). +Le(cr0_0,i2420). +Le(cr0_0,i2440). +Le(cr0_0,i2480). +Le(cr0_0,i2510). +Le(cr0_0,i2550). +Le(cr0_0,i2580). +Le(cr0_0,i2620). +Le(cr0_0,i2640). +Le(cr0_0,i2660). +Le(cr0_0,i2730). +Le(cr0_0,i2760). +Le(cr0_0,i2800). +Le(cr0_0,i2830). +Le(cr0_0,i2860). +Le(cr0_0,i2870). +Le(cr0_0,i2940). +Le(cr0_0,i2970). +Le(cr0_0,i3010). +Le(cr0_0,i3040). +Le(cr0_0,i3080). +Le(cr0_0,i3120). +Le(cr0_0,i3150). +Le(cr0_0,i3220). +Le(cr0_0,i3260). +Le(cr0_0,i3290). +Le(cr0_0,i3300). +Le(cr0_0,i3330). +Le(cr0_0,i3400). +Le(cr0_0,i3430). +Le(cr0_0,i3500). +Le(cr0_0,i3520). +Le(cr0_0,i3580). +Le(cr0_0,i3610). +Le(cr0_0,i3650). +Le(cr0_0,i3680). +Le(cr0_0,i3720). +Le(cr0_0,i3740). +Le(cr0_0,i3790). +Le(cr0_0,i3820). +Le(cr0_0,i3860). +Le(cr0_0,i3960). +Le(cr0_0,i4040). +Le(cr0_0,i4140). +Le(cr0_0,i4180). +Le(cr0_0,i4400). +Le(cr0_0,i4620). +Le(cr0_0,i4840). +Le(cr0_0,i5060). +Le(cr0_0,i5280). +Le(cr0_0,i5500). +Le(cr0_0,i5720). +Le(cr0_0,i5940). +Le(cr0_0,i6160). +Le(cr0_0,i6380). +Le(cr0_0,i6600). +Le(cr0_0,i6820). +Le(cr0_0,i7040). +Le(cr0_0,i7260). +Le(cr0_0,i7480). +Le(cr0_0,i7700). +Le(cr0_0,i7920). +Le(cr0_0,i8140). +Le(cr0_0,i8360). +Le(cr0_0,i8580). +Eq(i-30,i-30). +Eq(i0,i0). +Le(i0,cr2_0). +Le(cr2_0,i13). +Le(i-30,cr2_0). +Le(cr2_0,i26). +Le(cr2_0,i39). +Le(cr2_0,i52). +Le(cr2_0,i60). +Le(cr2_0,i65). +Le(cr2_0,i70). +Le(cr2_0,i78). +Le(cr2_0,i90). +Le(cr2_0,i91). +Le(cr2_0,i104). +Le(cr2_0,i117). +Le(cr2_0,i130). +Le(cr2_0,i143). +Le(cr2_0,i156). +Le(cr2_0,i169). +Le(cr2_0,i182). +Le(cr2_0,i195). +Le(cr2_0,i208). +Le(cr2_0,i221). +Le(cr2_0,i234). +Le(cr2_0,i247). +Le(cr2_0,i260). +Le(cr2_0,i460). +Le(cr2_0,i530). +Le(cr2_0,i600). +Le(cr2_0,i660). +Le(cr2_0,i670). +Le(cr2_0,i710). +Le(cr2_0,i740). +Le(cr2_0,i810). +Le(cr2_0,i850). +Le(cr2_0,i880). +Le(cr2_0,i890). +Le(cr2_0,i920). +Le(cr2_0,i960). +Le(cr2_0,i990). +Le(cr2_0,i1030). +Le(cr2_0,i1060). +Le(cr2_0,i1100). +Le(cr2_0,i1130). +Le(cr2_0,i1170). +Le(cr2_0,i1200). +Le(cr2_0,i1240). +Le(cr2_0,i1260). +Le(cr2_0,i1270). +Le(cr2_0,i1290). +Le(cr2_0,i1310). +Le(cr2_0,i1320). +Le(cr2_0,i1330). +Le(cr2_0,i1350). +Le(cr2_0,i1360). +Le(cr2_0,i1380). +Le(cr2_0,i1390). +Le(cr2_0,i1420). +Le(cr2_0,i1430). +Le(cr2_0,i1450). +Le(cr2_0,i1460). +Le(cr2_0,i1490). +Le(cr2_0,i1520). +Le(cr2_0,i1530). +Le(cr2_0,i1540). +Le(cr2_0,i1560). +Le(cr2_0,i1590). +Le(cr2_0,i1630). +Le(cr2_0,i1660). +Le(cr2_0,i1700). +Le(cr2_0,i1730). +Le(cr2_0,i1760). +Le(cr2_0,i1770). +Le(cr2_0,i1810). +Le(cr2_0,i1840). +Le(cr2_0,i1880). +Le(cr2_0,i1910). +Le(cr2_0,i1950). +Le(cr2_0,i1980). +Le(cr2_0,i2020). +Le(cr2_0,i2050). +Le(cr2_0,i2090). +Le(cr2_0,i2120). +Le(cr2_0,i2160). +Le(cr2_0,i2190). +Le(cr2_0,i2200). +Le(cr2_0,i2230). +Le(cr2_0,i2270). +Le(cr2_0,i2300). +Le(cr2_0,i2340). +Le(cr2_0,i2370). +Le(cr2_0,i2410). +Le(cr2_0,i2420). +Le(cr2_0,i2440). +Le(cr2_0,i2480). +Le(cr2_0,i2510). +Le(cr2_0,i2550). +Le(cr2_0,i2580). +Le(cr2_0,i2620). +Le(cr2_0,i2640). +Le(cr2_0,i2660). +Le(cr2_0,i2730). +Le(cr2_0,i2760). +Le(cr2_0,i2800). +Le(cr2_0,i2830). +Le(cr2_0,i2860). +Le(cr2_0,i2870). +Le(cr2_0,i2940). +Le(cr2_0,i2970). +Le(cr2_0,i3010). +Le(cr2_0,i3040). +Le(cr2_0,i3080). +Le(cr2_0,i3120). +Le(cr2_0,i3150). +Le(cr2_0,i3220). +Le(cr2_0,i3260). +Le(cr2_0,i3290). +Le(cr2_0,i3300). +Le(cr2_0,i3330). +Le(cr2_0,i3400). +Le(cr2_0,i3430). +Le(cr2_0,i3500). +Le(cr2_0,i3520). +Le(cr2_0,i3580). +Le(cr2_0,i3610). +Le(cr2_0,i3650). +Le(cr2_0,i3680). +Le(cr2_0,i3720). +Le(cr2_0,i3740). +Le(cr2_0,i3790). +Le(cr2_0,i3820). +Le(cr2_0,i3860). +Le(cr2_0,i3960). +Le(cr2_0,i4040). +Le(cr2_0,i4140). +Le(cr2_0,i4180). +Le(cr2_0,i4400). +Le(cr2_0,i4620). +Le(cr2_0,i4840). +Le(cr2_0,i5060). +Le(cr2_0,i5280). +Le(cr2_0,i5500). +Le(cr2_0,i5720). +Le(cr2_0,i5940). +Le(cr2_0,i6160). +Le(cr2_0,i6380). +Le(cr2_0,i6600). +Le(cr2_0,i6820). +Le(cr2_0,i7040). +Le(cr2_0,i7260). +Le(cr2_0,i7480). +Le(cr2_0,i7700). +Le(cr2_0,i7920). +Le(cr2_0,i8140). +Le(cr2_0,i8360). +Le(cr2_0,i8580). +Eq(i13,i13). +Le(i13,cr3_0). +Le(cr3_0,i26). +Le(i-30,cr3_0). +Le(i0,cr3_0). +Le(cr3_0,i39). +Le(cr3_0,i52). +Le(cr3_0,i60). +Le(cr3_0,i65). +Le(cr3_0,i70). +Le(cr3_0,i78). +Le(cr3_0,i90). +Le(cr3_0,i91). +Le(cr3_0,i104). +Le(cr3_0,i117). +Le(cr3_0,i130). +Le(cr3_0,i143). +Le(cr3_0,i156). +Le(cr3_0,i169). +Le(cr3_0,i182). +Le(cr3_0,i195). +Le(cr3_0,i208). +Le(cr3_0,i221). +Le(cr3_0,i234). +Le(cr3_0,i247). +Le(cr3_0,i260). +Le(cr3_0,i460). +Le(cr3_0,i530). +Le(cr3_0,i600). +Le(cr3_0,i660). +Le(cr3_0,i670). +Le(cr3_0,i710). +Le(cr3_0,i740). +Le(cr3_0,i810). +Le(cr3_0,i850). +Le(cr3_0,i880). +Le(cr3_0,i890). +Le(cr3_0,i920). +Le(cr3_0,i960). +Le(cr3_0,i990). +Le(cr3_0,i1030). +Le(cr3_0,i1060). +Le(cr3_0,i1100). +Le(cr3_0,i1130). +Le(cr3_0,i1170). +Le(cr3_0,i1200). +Le(cr3_0,i1240). +Le(cr3_0,i1260). +Le(cr3_0,i1270). +Le(cr3_0,i1290). +Le(cr3_0,i1310). +Le(cr3_0,i1320). +Le(cr3_0,i1330). +Le(cr3_0,i1350). +Le(cr3_0,i1360). +Le(cr3_0,i1380). +Le(cr3_0,i1390). +Le(cr3_0,i1420). +Le(cr3_0,i1430). +Le(cr3_0,i1450). +Le(cr3_0,i1460). +Le(cr3_0,i1490). +Le(cr3_0,i1520). +Le(cr3_0,i1530). +Le(cr3_0,i1540). +Le(cr3_0,i1560). +Le(cr3_0,i1590). +Le(cr3_0,i1630). +Le(cr3_0,i1660). +Le(cr3_0,i1700). +Le(cr3_0,i1730). +Le(cr3_0,i1760). +Le(cr3_0,i1770). +Le(cr3_0,i1810). +Le(cr3_0,i1840). +Le(cr3_0,i1880). +Le(cr3_0,i1910). +Le(cr3_0,i1950). +Le(cr3_0,i1980). +Le(cr3_0,i2020). +Le(cr3_0,i2050). +Le(cr3_0,i2090). +Le(cr3_0,i2120). +Le(cr3_0,i2160). +Le(cr3_0,i2190). +Le(cr3_0,i2200). +Le(cr3_0,i2230). +Le(cr3_0,i2270). +Le(cr3_0,i2300). +Le(cr3_0,i2340). +Le(cr3_0,i2370). +Le(cr3_0,i2410). +Le(cr3_0,i2420). +Le(cr3_0,i2440). +Le(cr3_0,i2480). +Le(cr3_0,i2510). +Le(cr3_0,i2550). +Le(cr3_0,i2580). +Le(cr3_0,i2620). +Le(cr3_0,i2640). +Le(cr3_0,i2660). +Le(cr3_0,i2730). +Le(cr3_0,i2760). +Le(cr3_0,i2800). +Le(cr3_0,i2830). +Le(cr3_0,i2860). +Le(cr3_0,i2870). +Le(cr3_0,i2940). +Le(cr3_0,i2970). +Le(cr3_0,i3010). +Le(cr3_0,i3040). +Le(cr3_0,i3080). +Le(cr3_0,i3120). +Le(cr3_0,i3150). +Le(cr3_0,i3220). +Le(cr3_0,i3260). +Le(cr3_0,i3290). +Le(cr3_0,i3300). +Le(cr3_0,i3330). +Le(cr3_0,i3400). +Le(cr3_0,i3430). +Le(cr3_0,i3500). +Le(cr3_0,i3520). +Le(cr3_0,i3580). +Le(cr3_0,i3610). +Le(cr3_0,i3650). +Le(cr3_0,i3680). +Le(cr3_0,i3720). +Le(cr3_0,i3740). +Le(cr3_0,i3790). +Le(cr3_0,i3820). +Le(cr3_0,i3860). +Le(cr3_0,i3960). +Le(cr3_0,i4040). +Le(cr3_0,i4140). +Le(cr3_0,i4180). +Le(cr3_0,i4400). +Le(cr3_0,i4620). +Le(cr3_0,i4840). +Le(cr3_0,i5060). +Le(cr3_0,i5280). +Le(cr3_0,i5500). +Le(cr3_0,i5720). +Le(cr3_0,i5940). +Le(cr3_0,i6160). +Le(cr3_0,i6380). +Le(cr3_0,i6600). +Le(cr3_0,i6820). +Le(cr3_0,i7040). +Le(cr3_0,i7260). +Le(cr3_0,i7480). +Le(cr3_0,i7700). +Le(cr3_0,i7920). +Le(cr3_0,i8140). +Le(cr3_0,i8360). +Le(cr3_0,i8580). +Eq(i26,i26). +Le(i26,cr4_0). +Le(cr4_0,i39). +Le(i-30,cr4_0). +Le(i0,cr4_0). +Le(i13,cr4_0). +Le(cr4_0,i52). +Le(cr4_0,i60). +Le(cr4_0,i65). +Le(cr4_0,i70). +Le(cr4_0,i78). +Le(cr4_0,i90). +Le(cr4_0,i91). +Le(cr4_0,i104). +Le(cr4_0,i117). +Le(cr4_0,i130). +Le(cr4_0,i143). +Le(cr4_0,i156). +Le(cr4_0,i169). +Le(cr4_0,i182). +Le(cr4_0,i195). +Le(cr4_0,i208). +Le(cr4_0,i221). +Le(cr4_0,i234). +Le(cr4_0,i247). +Le(cr4_0,i260). +Le(cr4_0,i460). +Le(cr4_0,i530). +Le(cr4_0,i600). +Le(cr4_0,i660). +Le(cr4_0,i670). +Le(cr4_0,i710). +Le(cr4_0,i740). +Le(cr4_0,i810). +Le(cr4_0,i850). +Le(cr4_0,i880). +Le(cr4_0,i890). +Le(cr4_0,i920). +Le(cr4_0,i960). +Le(cr4_0,i990). +Le(cr4_0,i1030). +Le(cr4_0,i1060). +Le(cr4_0,i1100). +Le(cr4_0,i1130). +Le(cr4_0,i1170). +Le(cr4_0,i1200). +Le(cr4_0,i1240). +Le(cr4_0,i1260). +Le(cr4_0,i1270). +Le(cr4_0,i1290). +Le(cr4_0,i1310). +Le(cr4_0,i1320). +Le(cr4_0,i1330). +Le(cr4_0,i1350). +Le(cr4_0,i1360). +Le(cr4_0,i1380). +Le(cr4_0,i1390). +Le(cr4_0,i1420). +Le(cr4_0,i1430). +Le(cr4_0,i1450). +Le(cr4_0,i1460). +Le(cr4_0,i1490). +Le(cr4_0,i1520). +Le(cr4_0,i1530). +Le(cr4_0,i1540). +Le(cr4_0,i1560). +Le(cr4_0,i1590). +Le(cr4_0,i1630). +Le(cr4_0,i1660). +Le(cr4_0,i1700). +Le(cr4_0,i1730). +Le(cr4_0,i1760). +Le(cr4_0,i1770). +Le(cr4_0,i1810). +Le(cr4_0,i1840). +Le(cr4_0,i1880). +Le(cr4_0,i1910). +Le(cr4_0,i1950). +Le(cr4_0,i1980). +Le(cr4_0,i2020). +Le(cr4_0,i2050). +Le(cr4_0,i2090). +Le(cr4_0,i2120). +Le(cr4_0,i2160). +Le(cr4_0,i2190). +Le(cr4_0,i2200). +Le(cr4_0,i2230). +Le(cr4_0,i2270). +Le(cr4_0,i2300). +Le(cr4_0,i2340). +Le(cr4_0,i2370). +Le(cr4_0,i2410). +Le(cr4_0,i2420). +Le(cr4_0,i2440). +Le(cr4_0,i2480). +Le(cr4_0,i2510). +Le(cr4_0,i2550). +Le(cr4_0,i2580). +Le(cr4_0,i2620). +Le(cr4_0,i2640). +Le(cr4_0,i2660). +Le(cr4_0,i2730). +Le(cr4_0,i2760). +Le(cr4_0,i2800). +Le(cr4_0,i2830). +Le(cr4_0,i2860). +Le(cr4_0,i2870). +Le(cr4_0,i2940). +Le(cr4_0,i2970). +Le(cr4_0,i3010). +Le(cr4_0,i3040). +Le(cr4_0,i3080). +Le(cr4_0,i3120). +Le(cr4_0,i3150). +Le(cr4_0,i3220). +Le(cr4_0,i3260). +Le(cr4_0,i3290). +Le(cr4_0,i3300). +Le(cr4_0,i3330). +Le(cr4_0,i3400). +Le(cr4_0,i3430). +Le(cr4_0,i3500). +Le(cr4_0,i3520). +Le(cr4_0,i3580). +Le(cr4_0,i3610). +Le(cr4_0,i3650). +Le(cr4_0,i3680). +Le(cr4_0,i3720). +Le(cr4_0,i3740). +Le(cr4_0,i3790). +Le(cr4_0,i3820). +Le(cr4_0,i3860). +Le(cr4_0,i3960). +Le(cr4_0,i4040). +Le(cr4_0,i4140). +Le(cr4_0,i4180). +Le(cr4_0,i4400). +Le(cr4_0,i4620). +Le(cr4_0,i4840). +Le(cr4_0,i5060). +Le(cr4_0,i5280). +Le(cr4_0,i5500). +Le(cr4_0,i5720). +Le(cr4_0,i5940). +Le(cr4_0,i6160). +Le(cr4_0,i6380). +Le(cr4_0,i6600). +Le(cr4_0,i6820). +Le(cr4_0,i7040). +Le(cr4_0,i7260). +Le(cr4_0,i7480). +Le(cr4_0,i7700). +Le(cr4_0,i7920). +Le(cr4_0,i8140). +Le(cr4_0,i8360). +Le(cr4_0,i8580). +Eq(i39,i39). +Le(i39,cr5_0). +Le(cr5_0,i52). +Le(i-30,cr5_0). +Le(i0,cr5_0). +Le(i13,cr5_0). +Le(i26,cr5_0). +Le(cr5_0,i60). +Le(cr5_0,i65). +Le(cr5_0,i70). +Le(cr5_0,i78). +Le(cr5_0,i90). +Le(cr5_0,i91). +Le(cr5_0,i104). +Le(cr5_0,i117). +Le(cr5_0,i130). +Le(cr5_0,i143). +Le(cr5_0,i156). +Le(cr5_0,i169). +Le(cr5_0,i182). +Le(cr5_0,i195). +Le(cr5_0,i208). +Le(cr5_0,i221). +Le(cr5_0,i234). +Le(cr5_0,i247). +Le(cr5_0,i260). +Le(cr5_0,i460). +Le(cr5_0,i530). +Le(cr5_0,i600). +Le(cr5_0,i660). +Le(cr5_0,i670). +Le(cr5_0,i710). +Le(cr5_0,i740). +Le(cr5_0,i810). +Le(cr5_0,i850). +Le(cr5_0,i880). +Le(cr5_0,i890). +Le(cr5_0,i920). +Le(cr5_0,i960). +Le(cr5_0,i990). +Le(cr5_0,i1030). +Le(cr5_0,i1060). +Le(cr5_0,i1100). +Le(cr5_0,i1130). +Le(cr5_0,i1170). +Le(cr5_0,i1200). +Le(cr5_0,i1240). +Le(cr5_0,i1260). +Le(cr5_0,i1270). +Le(cr5_0,i1290). +Le(cr5_0,i1310). +Le(cr5_0,i1320). +Le(cr5_0,i1330). +Le(cr5_0,i1350). +Le(cr5_0,i1360). +Le(cr5_0,i1380). +Le(cr5_0,i1390). +Le(cr5_0,i1420). +Le(cr5_0,i1430). +Le(cr5_0,i1450). +Le(cr5_0,i1460). +Le(cr5_0,i1490). +Le(cr5_0,i1520). +Le(cr5_0,i1530). +Le(cr5_0,i1540). +Le(cr5_0,i1560). +Le(cr5_0,i1590). +Le(cr5_0,i1630). +Le(cr5_0,i1660). +Le(cr5_0,i1700). +Le(cr5_0,i1730). +Le(cr5_0,i1760). +Le(cr5_0,i1770). +Le(cr5_0,i1810). +Le(cr5_0,i1840). +Le(cr5_0,i1880). +Le(cr5_0,i1910). +Le(cr5_0,i1950). +Le(cr5_0,i1980). +Le(cr5_0,i2020). +Le(cr5_0,i2050). +Le(cr5_0,i2090). +Le(cr5_0,i2120). +Le(cr5_0,i2160). +Le(cr5_0,i2190). +Le(cr5_0,i2200). +Le(cr5_0,i2230). +Le(cr5_0,i2270). +Le(cr5_0,i2300). +Le(cr5_0,i2340). +Le(cr5_0,i2370). +Le(cr5_0,i2410). +Le(cr5_0,i2420). +Le(cr5_0,i2440). +Le(cr5_0,i2480). +Le(cr5_0,i2510). +Le(cr5_0,i2550). +Le(cr5_0,i2580). +Le(cr5_0,i2620). +Le(cr5_0,i2640). +Le(cr5_0,i2660). +Le(cr5_0,i2730). +Le(cr5_0,i2760). +Le(cr5_0,i2800). +Le(cr5_0,i2830). +Le(cr5_0,i2860). +Le(cr5_0,i2870). +Le(cr5_0,i2940). +Le(cr5_0,i2970). +Le(cr5_0,i3010). +Le(cr5_0,i3040). +Le(cr5_0,i3080). +Le(cr5_0,i3120). +Le(cr5_0,i3150). +Le(cr5_0,i3220). +Le(cr5_0,i3260). +Le(cr5_0,i3290). +Le(cr5_0,i3300). +Le(cr5_0,i3330). +Le(cr5_0,i3400). +Le(cr5_0,i3430). +Le(cr5_0,i3500). +Le(cr5_0,i3520). +Le(cr5_0,i3580). +Le(cr5_0,i3610). +Le(cr5_0,i3650). +Le(cr5_0,i3680). +Le(cr5_0,i3720). +Le(cr5_0,i3740). +Le(cr5_0,i3790). +Le(cr5_0,i3820). +Le(cr5_0,i3860). +Le(cr5_0,i3960). +Le(cr5_0,i4040). +Le(cr5_0,i4140). +Le(cr5_0,i4180). +Le(cr5_0,i4400). +Le(cr5_0,i4620). +Le(cr5_0,i4840). +Le(cr5_0,i5060). +Le(cr5_0,i5280). +Le(cr5_0,i5500). +Le(cr5_0,i5720). +Le(cr5_0,i5940). +Le(cr5_0,i6160). +Le(cr5_0,i6380). +Le(cr5_0,i6600). +Le(cr5_0,i6820). +Le(cr5_0,i7040). +Le(cr5_0,i7260). +Le(cr5_0,i7480). +Le(cr5_0,i7700). +Le(cr5_0,i7920). +Le(cr5_0,i8140). +Le(cr5_0,i8360). +Le(cr5_0,i8580). +Eq(i52,i52). +Le(i52,cr6_0). +Le(cr6_0,i60). +Le(i-30,cr6_0). +Le(i0,cr6_0). +Le(i13,cr6_0). +Le(i26,cr6_0). +Le(i39,cr6_0). +Le(cr6_0,i65). +Le(cr6_0,i70). +Le(cr6_0,i78). +Le(cr6_0,i90). +Le(cr6_0,i91). +Le(cr6_0,i104). +Le(cr6_0,i117). +Le(cr6_0,i130). +Le(cr6_0,i143). +Le(cr6_0,i156). +Le(cr6_0,i169). +Le(cr6_0,i182). +Le(cr6_0,i195). +Le(cr6_0,i208). +Le(cr6_0,i221). +Le(cr6_0,i234). +Le(cr6_0,i247). +Le(cr6_0,i260). +Le(cr6_0,i460). +Le(cr6_0,i530). +Le(cr6_0,i600). +Le(cr6_0,i660). +Le(cr6_0,i670). +Le(cr6_0,i710). +Le(cr6_0,i740). +Le(cr6_0,i810). +Le(cr6_0,i850). +Le(cr6_0,i880). +Le(cr6_0,i890). +Le(cr6_0,i920). +Le(cr6_0,i960). +Le(cr6_0,i990). +Le(cr6_0,i1030). +Le(cr6_0,i1060). +Le(cr6_0,i1100). +Le(cr6_0,i1130). +Le(cr6_0,i1170). +Le(cr6_0,i1200). +Le(cr6_0,i1240). +Le(cr6_0,i1260). +Le(cr6_0,i1270). +Le(cr6_0,i1290). +Le(cr6_0,i1310). +Le(cr6_0,i1320). +Le(cr6_0,i1330). +Le(cr6_0,i1350). +Le(cr6_0,i1360). +Le(cr6_0,i1380). +Le(cr6_0,i1390). +Le(cr6_0,i1420). +Le(cr6_0,i1430). +Le(cr6_0,i1450). +Le(cr6_0,i1460). +Le(cr6_0,i1490). +Le(cr6_0,i1520). +Le(cr6_0,i1530). +Le(cr6_0,i1540). +Le(cr6_0,i1560). +Le(cr6_0,i1590). +Le(cr6_0,i1630). +Le(cr6_0,i1660). +Le(cr6_0,i1700). +Le(cr6_0,i1730). +Le(cr6_0,i1760). +Le(cr6_0,i1770). +Le(cr6_0,i1810). +Le(cr6_0,i1840). +Le(cr6_0,i1880). +Le(cr6_0,i1910). +Le(cr6_0,i1950). +Le(cr6_0,i1980). +Le(cr6_0,i2020). +Le(cr6_0,i2050). +Le(cr6_0,i2090). +Le(cr6_0,i2120). +Le(cr6_0,i2160). +Le(cr6_0,i2190). +Le(cr6_0,i2200). +Le(cr6_0,i2230). +Le(cr6_0,i2270). +Le(cr6_0,i2300). +Le(cr6_0,i2340). +Le(cr6_0,i2370). +Le(cr6_0,i2410). +Le(cr6_0,i2420). +Le(cr6_0,i2440). +Le(cr6_0,i2480). +Le(cr6_0,i2510). +Le(cr6_0,i2550). +Le(cr6_0,i2580). +Le(cr6_0,i2620). +Le(cr6_0,i2640). +Le(cr6_0,i2660). +Le(cr6_0,i2730). +Le(cr6_0,i2760). +Le(cr6_0,i2800). +Le(cr6_0,i2830). +Le(cr6_0,i2860). +Le(cr6_0,i2870). +Le(cr6_0,i2940). +Le(cr6_0,i2970). +Le(cr6_0,i3010). +Le(cr6_0,i3040). +Le(cr6_0,i3080). +Le(cr6_0,i3120). +Le(cr6_0,i3150). +Le(cr6_0,i3220). +Le(cr6_0,i3260). +Le(cr6_0,i3290). +Le(cr6_0,i3300). +Le(cr6_0,i3330). +Le(cr6_0,i3400). +Le(cr6_0,i3430). +Le(cr6_0,i3500). +Le(cr6_0,i3520). +Le(cr6_0,i3580). +Le(cr6_0,i3610). +Le(cr6_0,i3650). +Le(cr6_0,i3680). +Le(cr6_0,i3720). +Le(cr6_0,i3740). +Le(cr6_0,i3790). +Le(cr6_0,i3820). +Le(cr6_0,i3860). +Le(cr6_0,i3960). +Le(cr6_0,i4040). +Le(cr6_0,i4140). +Le(cr6_0,i4180). +Le(cr6_0,i4400). +Le(cr6_0,i4620). +Le(cr6_0,i4840). +Le(cr6_0,i5060). +Le(cr6_0,i5280). +Le(cr6_0,i5500). +Le(cr6_0,i5720). +Le(cr6_0,i5940). +Le(cr6_0,i6160). +Le(cr6_0,i6380). +Le(cr6_0,i6600). +Le(cr6_0,i6820). +Le(cr6_0,i7040). +Le(cr6_0,i7260). +Le(cr6_0,i7480). +Le(cr6_0,i7700). +Le(cr6_0,i7920). +Le(cr6_0,i8140). +Le(cr6_0,i8360). +Le(cr6_0,i8580). +Eq(i60,i60). +Le(i60,cr7_0). +Le(cr7_0,i65). +Le(i-30,cr7_0). +Le(i0,cr7_0). +Le(i13,cr7_0). +Le(i26,cr7_0). +Le(i39,cr7_0). +Le(i52,cr7_0). +Le(cr7_0,i70). +Le(cr7_0,i78). +Le(cr7_0,i90). +Le(cr7_0,i91). +Le(cr7_0,i104). +Le(cr7_0,i117). +Le(cr7_0,i130). +Le(cr7_0,i143). +Le(cr7_0,i156). +Le(cr7_0,i169). +Le(cr7_0,i182). +Le(cr7_0,i195). +Le(cr7_0,i208). +Le(cr7_0,i221). +Le(cr7_0,i234). +Le(cr7_0,i247). +Le(cr7_0,i260). +Le(cr7_0,i460). +Le(cr7_0,i530). +Le(cr7_0,i600). +Le(cr7_0,i660). +Le(cr7_0,i670). +Le(cr7_0,i710). +Le(cr7_0,i740). +Le(cr7_0,i810). +Le(cr7_0,i850). +Le(cr7_0,i880). +Le(cr7_0,i890). +Le(cr7_0,i920). +Le(cr7_0,i960). +Le(cr7_0,i990). +Le(cr7_0,i1030). +Le(cr7_0,i1060). +Le(cr7_0,i1100). +Le(cr7_0,i1130). +Le(cr7_0,i1170). +Le(cr7_0,i1200). +Le(cr7_0,i1240). +Le(cr7_0,i1260). +Le(cr7_0,i1270). +Le(cr7_0,i1290). +Le(cr7_0,i1310). +Le(cr7_0,i1320). +Le(cr7_0,i1330). +Le(cr7_0,i1350). +Le(cr7_0,i1360). +Le(cr7_0,i1380). +Le(cr7_0,i1390). +Le(cr7_0,i1420). +Le(cr7_0,i1430). +Le(cr7_0,i1450). +Le(cr7_0,i1460). +Le(cr7_0,i1490). +Le(cr7_0,i1520). +Le(cr7_0,i1530). +Le(cr7_0,i1540). +Le(cr7_0,i1560). +Le(cr7_0,i1590). +Le(cr7_0,i1630). +Le(cr7_0,i1660). +Le(cr7_0,i1700). +Le(cr7_0,i1730). +Le(cr7_0,i1760). +Le(cr7_0,i1770). +Le(cr7_0,i1810). +Le(cr7_0,i1840). +Le(cr7_0,i1880). +Le(cr7_0,i1910). +Le(cr7_0,i1950). +Le(cr7_0,i1980). +Le(cr7_0,i2020). +Le(cr7_0,i2050). +Le(cr7_0,i2090). +Le(cr7_0,i2120). +Le(cr7_0,i2160). +Le(cr7_0,i2190). +Le(cr7_0,i2200). +Le(cr7_0,i2230). +Le(cr7_0,i2270). +Le(cr7_0,i2300). +Le(cr7_0,i2340). +Le(cr7_0,i2370). +Le(cr7_0,i2410). +Le(cr7_0,i2420). +Le(cr7_0,i2440). +Le(cr7_0,i2480). +Le(cr7_0,i2510). +Le(cr7_0,i2550). +Le(cr7_0,i2580). +Le(cr7_0,i2620). +Le(cr7_0,i2640). +Le(cr7_0,i2660). +Le(cr7_0,i2730). +Le(cr7_0,i2760). +Le(cr7_0,i2800). +Le(cr7_0,i2830). +Le(cr7_0,i2860). +Le(cr7_0,i2870). +Le(cr7_0,i2940). +Le(cr7_0,i2970). +Le(cr7_0,i3010). +Le(cr7_0,i3040). +Le(cr7_0,i3080). +Le(cr7_0,i3120). +Le(cr7_0,i3150). +Le(cr7_0,i3220). +Le(cr7_0,i3260). +Le(cr7_0,i3290). +Le(cr7_0,i3300). +Le(cr7_0,i3330). +Le(cr7_0,i3400). +Le(cr7_0,i3430). +Le(cr7_0,i3500). +Le(cr7_0,i3520). +Le(cr7_0,i3580). +Le(cr7_0,i3610). +Le(cr7_0,i3650). +Le(cr7_0,i3680). +Le(cr7_0,i3720). +Le(cr7_0,i3740). +Le(cr7_0,i3790). +Le(cr7_0,i3820). +Le(cr7_0,i3860). +Le(cr7_0,i3960). +Le(cr7_0,i4040). +Le(cr7_0,i4140). +Le(cr7_0,i4180). +Le(cr7_0,i4400). +Le(cr7_0,i4620). +Le(cr7_0,i4840). +Le(cr7_0,i5060). +Le(cr7_0,i5280). +Le(cr7_0,i5500). +Le(cr7_0,i5720). +Le(cr7_0,i5940). +Le(cr7_0,i6160). +Le(cr7_0,i6380). +Le(cr7_0,i6600). +Le(cr7_0,i6820). +Le(cr7_0,i7040). +Le(cr7_0,i7260). +Le(cr7_0,i7480). +Le(cr7_0,i7700). +Le(cr7_0,i7920). +Le(cr7_0,i8140). +Le(cr7_0,i8360). +Le(cr7_0,i8580). +Eq(i65,i65). +Le(i65,cr8_0). +Le(cr8_0,i70). +Le(i-30,cr8_0). +Le(i0,cr8_0). +Le(i13,cr8_0). +Le(i26,cr8_0). +Le(i39,cr8_0). +Le(i52,cr8_0). +Le(i60,cr8_0). +Le(cr8_0,i78). +Le(cr8_0,i90). +Le(cr8_0,i91). +Le(cr8_0,i104). +Le(cr8_0,i117). +Le(cr8_0,i130). +Le(cr8_0,i143). +Le(cr8_0,i156). +Le(cr8_0,i169). +Le(cr8_0,i182). +Le(cr8_0,i195). +Le(cr8_0,i208). +Le(cr8_0,i221). +Le(cr8_0,i234). +Le(cr8_0,i247). +Le(cr8_0,i260). +Le(cr8_0,i460). +Le(cr8_0,i530). +Le(cr8_0,i600). +Le(cr8_0,i660). +Le(cr8_0,i670). +Le(cr8_0,i710). +Le(cr8_0,i740). +Le(cr8_0,i810). +Le(cr8_0,i850). +Le(cr8_0,i880). +Le(cr8_0,i890). +Le(cr8_0,i920). +Le(cr8_0,i960). +Le(cr8_0,i990). +Le(cr8_0,i1030). +Le(cr8_0,i1060). +Le(cr8_0,i1100). +Le(cr8_0,i1130). +Le(cr8_0,i1170). +Le(cr8_0,i1200). +Le(cr8_0,i1240). +Le(cr8_0,i1260). +Le(cr8_0,i1270). +Le(cr8_0,i1290). +Le(cr8_0,i1310). +Le(cr8_0,i1320). +Le(cr8_0,i1330). +Le(cr8_0,i1350). +Le(cr8_0,i1360). +Le(cr8_0,i1380). +Le(cr8_0,i1390). +Le(cr8_0,i1420). +Le(cr8_0,i1430). +Le(cr8_0,i1450). +Le(cr8_0,i1460). +Le(cr8_0,i1490). +Le(cr8_0,i1520). +Le(cr8_0,i1530). +Le(cr8_0,i1540). +Le(cr8_0,i1560). +Le(cr8_0,i1590). +Le(cr8_0,i1630). +Le(cr8_0,i1660). +Le(cr8_0,i1700). +Le(cr8_0,i1730). +Le(cr8_0,i1760). +Le(cr8_0,i1770). +Le(cr8_0,i1810). +Le(cr8_0,i1840). +Le(cr8_0,i1880). +Le(cr8_0,i1910). +Le(cr8_0,i1950). +Le(cr8_0,i1980). +Le(cr8_0,i2020). +Le(cr8_0,i2050). +Le(cr8_0,i2090). +Le(cr8_0,i2120). +Le(cr8_0,i2160). +Le(cr8_0,i2190). +Le(cr8_0,i2200). +Le(cr8_0,i2230). +Le(cr8_0,i2270). +Le(cr8_0,i2300). +Le(cr8_0,i2340). +Le(cr8_0,i2370). +Le(cr8_0,i2410). +Le(cr8_0,i2420). +Le(cr8_0,i2440). +Le(cr8_0,i2480). +Le(cr8_0,i2510). +Le(cr8_0,i2550). +Le(cr8_0,i2580). +Le(cr8_0,i2620). +Le(cr8_0,i2640). +Le(cr8_0,i2660). +Le(cr8_0,i2730). +Le(cr8_0,i2760). +Le(cr8_0,i2800). +Le(cr8_0,i2830). +Le(cr8_0,i2860). +Le(cr8_0,i2870). +Le(cr8_0,i2940). +Le(cr8_0,i2970). +Le(cr8_0,i3010). +Le(cr8_0,i3040). +Le(cr8_0,i3080). +Le(cr8_0,i3120). +Le(cr8_0,i3150). +Le(cr8_0,i3220). +Le(cr8_0,i3260). +Le(cr8_0,i3290). +Le(cr8_0,i3300). +Le(cr8_0,i3330). +Le(cr8_0,i3400). +Le(cr8_0,i3430). +Le(cr8_0,i3500). +Le(cr8_0,i3520). +Le(cr8_0,i3580). +Le(cr8_0,i3610). +Le(cr8_0,i3650). +Le(cr8_0,i3680). +Le(cr8_0,i3720). +Le(cr8_0,i3740). +Le(cr8_0,i3790). +Le(cr8_0,i3820). +Le(cr8_0,i3860). +Le(cr8_0,i3960). +Le(cr8_0,i4040). +Le(cr8_0,i4140). +Le(cr8_0,i4180). +Le(cr8_0,i4400). +Le(cr8_0,i4620). +Le(cr8_0,i4840). +Le(cr8_0,i5060). +Le(cr8_0,i5280). +Le(cr8_0,i5500). +Le(cr8_0,i5720). +Le(cr8_0,i5940). +Le(cr8_0,i6160). +Le(cr8_0,i6380). +Le(cr8_0,i6600). +Le(cr8_0,i6820). +Le(cr8_0,i7040). +Le(cr8_0,i7260). +Le(cr8_0,i7480). +Le(cr8_0,i7700). +Le(cr8_0,i7920). +Le(cr8_0,i8140). +Le(cr8_0,i8360). +Le(cr8_0,i8580). +Eq(i70,i70). +Le(i70,cr9_0). +Le(cr9_0,i78). +Le(i-30,cr9_0). +Le(i0,cr9_0). +Le(i13,cr9_0). +Le(i26,cr9_0). +Le(i39,cr9_0). +Le(i52,cr9_0). +Le(i60,cr9_0). +Le(i65,cr9_0). +Le(cr9_0,i90). +Le(cr9_0,i91). +Le(cr9_0,i104). +Le(cr9_0,i117). +Le(cr9_0,i130). +Le(cr9_0,i143). +Le(cr9_0,i156). +Le(cr9_0,i169). +Le(cr9_0,i182). +Le(cr9_0,i195). +Le(cr9_0,i208). +Le(cr9_0,i221). +Le(cr9_0,i234). +Le(cr9_0,i247). +Le(cr9_0,i260). +Le(cr9_0,i460). +Le(cr9_0,i530). +Le(cr9_0,i600). +Le(cr9_0,i660). +Le(cr9_0,i670). +Le(cr9_0,i710). +Le(cr9_0,i740). +Le(cr9_0,i810). +Le(cr9_0,i850). +Le(cr9_0,i880). +Le(cr9_0,i890). +Le(cr9_0,i920). +Le(cr9_0,i960). +Le(cr9_0,i990). +Le(cr9_0,i1030). +Le(cr9_0,i1060). +Le(cr9_0,i1100). +Le(cr9_0,i1130). +Le(cr9_0,i1170). +Le(cr9_0,i1200). +Le(cr9_0,i1240). +Le(cr9_0,i1260). +Le(cr9_0,i1270). +Le(cr9_0,i1290). +Le(cr9_0,i1310). +Le(cr9_0,i1320). +Le(cr9_0,i1330). +Le(cr9_0,i1350). +Le(cr9_0,i1360). +Le(cr9_0,i1380). +Le(cr9_0,i1390). +Le(cr9_0,i1420). +Le(cr9_0,i1430). +Le(cr9_0,i1450). +Le(cr9_0,i1460). +Le(cr9_0,i1490). +Le(cr9_0,i1520). +Le(cr9_0,i1530). +Le(cr9_0,i1540). +Le(cr9_0,i1560). +Le(cr9_0,i1590). +Le(cr9_0,i1630). +Le(cr9_0,i1660). +Le(cr9_0,i1700). +Le(cr9_0,i1730). +Le(cr9_0,i1760). +Le(cr9_0,i1770). +Le(cr9_0,i1810). +Le(cr9_0,i1840). +Le(cr9_0,i1880). +Le(cr9_0,i1910). +Le(cr9_0,i1950). +Le(cr9_0,i1980). +Le(cr9_0,i2020). +Le(cr9_0,i2050). +Le(cr9_0,i2090). +Le(cr9_0,i2120). +Le(cr9_0,i2160). +Le(cr9_0,i2190). +Le(cr9_0,i2200). +Le(cr9_0,i2230). +Le(cr9_0,i2270). +Le(cr9_0,i2300). +Le(cr9_0,i2340). +Le(cr9_0,i2370). +Le(cr9_0,i2410). +Le(cr9_0,i2420). +Le(cr9_0,i2440). +Le(cr9_0,i2480). +Le(cr9_0,i2510). +Le(cr9_0,i2550). +Le(cr9_0,i2580). +Le(cr9_0,i2620). +Le(cr9_0,i2640). +Le(cr9_0,i2660). +Le(cr9_0,i2730). +Le(cr9_0,i2760). +Le(cr9_0,i2800). +Le(cr9_0,i2830). +Le(cr9_0,i2860). +Le(cr9_0,i2870). +Le(cr9_0,i2940). +Le(cr9_0,i2970). +Le(cr9_0,i3010). +Le(cr9_0,i3040). +Le(cr9_0,i3080). +Le(cr9_0,i3120). +Le(cr9_0,i3150). +Le(cr9_0,i3220). +Le(cr9_0,i3260). +Le(cr9_0,i3290). +Le(cr9_0,i3300). +Le(cr9_0,i3330). +Le(cr9_0,i3400). +Le(cr9_0,i3430). +Le(cr9_0,i3500). +Le(cr9_0,i3520). +Le(cr9_0,i3580). +Le(cr9_0,i3610). +Le(cr9_0,i3650). +Le(cr9_0,i3680). +Le(cr9_0,i3720). +Le(cr9_0,i3740). +Le(cr9_0,i3790). +Le(cr9_0,i3820). +Le(cr9_0,i3860). +Le(cr9_0,i3960). +Le(cr9_0,i4040). +Le(cr9_0,i4140). +Le(cr9_0,i4180). +Le(cr9_0,i4400). +Le(cr9_0,i4620). +Le(cr9_0,i4840). +Le(cr9_0,i5060). +Le(cr9_0,i5280). +Le(cr9_0,i5500). +Le(cr9_0,i5720). +Le(cr9_0,i5940). +Le(cr9_0,i6160). +Le(cr9_0,i6380). +Le(cr9_0,i6600). +Le(cr9_0,i6820). +Le(cr9_0,i7040). +Le(cr9_0,i7260). +Le(cr9_0,i7480). +Le(cr9_0,i7700). +Le(cr9_0,i7920). +Le(cr9_0,i8140). +Le(cr9_0,i8360). +Le(cr9_0,i8580). +Eq(i78,i78). +Le(i78,cr10_0). +Le(cr10_0,i90). +Le(i-30,cr10_0). +Le(i0,cr10_0). +Le(i13,cr10_0). +Le(i26,cr10_0). +Le(i39,cr10_0). +Le(i52,cr10_0). +Le(i60,cr10_0). +Le(i65,cr10_0). +Le(i70,cr10_0). +Le(cr10_0,i91). +Le(cr10_0,i104). +Le(cr10_0,i117). +Le(cr10_0,i130). +Le(cr10_0,i143). +Le(cr10_0,i156). +Le(cr10_0,i169). +Le(cr10_0,i182). +Le(cr10_0,i195). +Le(cr10_0,i208). +Le(cr10_0,i221). +Le(cr10_0,i234). +Le(cr10_0,i247). +Le(cr10_0,i260). +Le(cr10_0,i460). +Le(cr10_0,i530). +Le(cr10_0,i600). +Le(cr10_0,i660). +Le(cr10_0,i670). +Le(cr10_0,i710). +Le(cr10_0,i740). +Le(cr10_0,i810). +Le(cr10_0,i850). +Le(cr10_0,i880). +Le(cr10_0,i890). +Le(cr10_0,i920). +Le(cr10_0,i960). +Le(cr10_0,i990). +Le(cr10_0,i1030). +Le(cr10_0,i1060). +Le(cr10_0,i1100). +Le(cr10_0,i1130). +Le(cr10_0,i1170). +Le(cr10_0,i1200). +Le(cr10_0,i1240). +Le(cr10_0,i1260). +Le(cr10_0,i1270). +Le(cr10_0,i1290). +Le(cr10_0,i1310). +Le(cr10_0,i1320). +Le(cr10_0,i1330). +Le(cr10_0,i1350). +Le(cr10_0,i1360). +Le(cr10_0,i1380). +Le(cr10_0,i1390). +Le(cr10_0,i1420). +Le(cr10_0,i1430). +Le(cr10_0,i1450). +Le(cr10_0,i1460). +Le(cr10_0,i1490). +Le(cr10_0,i1520). +Le(cr10_0,i1530). +Le(cr10_0,i1540). +Le(cr10_0,i1560). +Le(cr10_0,i1590). +Le(cr10_0,i1630). +Le(cr10_0,i1660). +Le(cr10_0,i1700). +Le(cr10_0,i1730). +Le(cr10_0,i1760). +Le(cr10_0,i1770). +Le(cr10_0,i1810). +Le(cr10_0,i1840). +Le(cr10_0,i1880). +Le(cr10_0,i1910). +Le(cr10_0,i1950). +Le(cr10_0,i1980). +Le(cr10_0,i2020). +Le(cr10_0,i2050). +Le(cr10_0,i2090). +Le(cr10_0,i2120). +Le(cr10_0,i2160). +Le(cr10_0,i2190). +Le(cr10_0,i2200). +Le(cr10_0,i2230). +Le(cr10_0,i2270). +Le(cr10_0,i2300). +Le(cr10_0,i2340). +Le(cr10_0,i2370). +Le(cr10_0,i2410). +Le(cr10_0,i2420). +Le(cr10_0,i2440). +Le(cr10_0,i2480). +Le(cr10_0,i2510). +Le(cr10_0,i2550). +Le(cr10_0,i2580). +Le(cr10_0,i2620). +Le(cr10_0,i2640). +Le(cr10_0,i2660). +Le(cr10_0,i2730). +Le(cr10_0,i2760). +Le(cr10_0,i2800). +Le(cr10_0,i2830). +Le(cr10_0,i2860). +Le(cr10_0,i2870). +Le(cr10_0,i2940). +Le(cr10_0,i2970). +Le(cr10_0,i3010). +Le(cr10_0,i3040). +Le(cr10_0,i3080). +Le(cr10_0,i3120). +Le(cr10_0,i3150). +Le(cr10_0,i3220). +Le(cr10_0,i3260). +Le(cr10_0,i3290). +Le(cr10_0,i3300). +Le(cr10_0,i3330). +Le(cr10_0,i3400). +Le(cr10_0,i3430). +Le(cr10_0,i3500). +Le(cr10_0,i3520). +Le(cr10_0,i3580). +Le(cr10_0,i3610). +Le(cr10_0,i3650). +Le(cr10_0,i3680). +Le(cr10_0,i3720). +Le(cr10_0,i3740). +Le(cr10_0,i3790). +Le(cr10_0,i3820). +Le(cr10_0,i3860). +Le(cr10_0,i3960). +Le(cr10_0,i4040). +Le(cr10_0,i4140). +Le(cr10_0,i4180). +Le(cr10_0,i4400). +Le(cr10_0,i4620). +Le(cr10_0,i4840). +Le(cr10_0,i5060). +Le(cr10_0,i5280). +Le(cr10_0,i5500). +Le(cr10_0,i5720). +Le(cr10_0,i5940). +Le(cr10_0,i6160). +Le(cr10_0,i6380). +Le(cr10_0,i6600). +Le(cr10_0,i6820). +Le(cr10_0,i7040). +Le(cr10_0,i7260). +Le(cr10_0,i7480). +Le(cr10_0,i7700). +Le(cr10_0,i7920). +Le(cr10_0,i8140). +Le(cr10_0,i8360). +Le(cr10_0,i8580). +Eq(i90,i90). +Le(i90,cr11_0). +Le(cr11_0,i91). +Le(i-30,cr11_0). +Le(i0,cr11_0). +Le(i13,cr11_0). +Le(i26,cr11_0). +Le(i39,cr11_0). +Le(i52,cr11_0). +Le(i60,cr11_0). +Le(i65,cr11_0). +Le(i70,cr11_0). +Le(i78,cr11_0). +Le(cr11_0,i104). +Le(cr11_0,i117). +Le(cr11_0,i130). +Le(cr11_0,i143). +Le(cr11_0,i156). +Le(cr11_0,i169). +Le(cr11_0,i182). +Le(cr11_0,i195). +Le(cr11_0,i208). +Le(cr11_0,i221). +Le(cr11_0,i234). +Le(cr11_0,i247). +Le(cr11_0,i260). +Le(cr11_0,i460). +Le(cr11_0,i530). +Le(cr11_0,i600). +Le(cr11_0,i660). +Le(cr11_0,i670). +Le(cr11_0,i710). +Le(cr11_0,i740). +Le(cr11_0,i810). +Le(cr11_0,i850). +Le(cr11_0,i880). +Le(cr11_0,i890). +Le(cr11_0,i920). +Le(cr11_0,i960). +Le(cr11_0,i990). +Le(cr11_0,i1030). +Le(cr11_0,i1060). +Le(cr11_0,i1100). +Le(cr11_0,i1130). +Le(cr11_0,i1170). +Le(cr11_0,i1200). +Le(cr11_0,i1240). +Le(cr11_0,i1260). +Le(cr11_0,i1270). +Le(cr11_0,i1290). +Le(cr11_0,i1310). +Le(cr11_0,i1320). +Le(cr11_0,i1330). +Le(cr11_0,i1350). +Le(cr11_0,i1360). +Le(cr11_0,i1380). +Le(cr11_0,i1390). +Le(cr11_0,i1420). +Le(cr11_0,i1430). +Le(cr11_0,i1450). +Le(cr11_0,i1460). +Le(cr11_0,i1490). +Le(cr11_0,i1520). +Le(cr11_0,i1530). +Le(cr11_0,i1540). +Le(cr11_0,i1560). +Le(cr11_0,i1590). +Le(cr11_0,i1630). +Le(cr11_0,i1660). +Le(cr11_0,i1700). +Le(cr11_0,i1730). +Le(cr11_0,i1760). +Le(cr11_0,i1770). +Le(cr11_0,i1810). +Le(cr11_0,i1840). +Le(cr11_0,i1880). +Le(cr11_0,i1910). +Le(cr11_0,i1950). +Le(cr11_0,i1980). +Le(cr11_0,i2020). +Le(cr11_0,i2050). +Le(cr11_0,i2090). +Le(cr11_0,i2120). +Le(cr11_0,i2160). +Le(cr11_0,i2190). +Le(cr11_0,i2200). +Le(cr11_0,i2230). +Le(cr11_0,i2270). +Le(cr11_0,i2300). +Le(cr11_0,i2340). +Le(cr11_0,i2370). +Le(cr11_0,i2410). +Le(cr11_0,i2420). +Le(cr11_0,i2440). +Le(cr11_0,i2480). +Le(cr11_0,i2510). +Le(cr11_0,i2550). +Le(cr11_0,i2580). +Le(cr11_0,i2620). +Le(cr11_0,i2640). +Le(cr11_0,i2660). +Le(cr11_0,i2730). +Le(cr11_0,i2760). +Le(cr11_0,i2800). +Le(cr11_0,i2830). +Le(cr11_0,i2860). +Le(cr11_0,i2870). +Le(cr11_0,i2940). +Le(cr11_0,i2970). +Le(cr11_0,i3010). +Le(cr11_0,i3040). +Le(cr11_0,i3080). +Le(cr11_0,i3120). +Le(cr11_0,i3150). +Le(cr11_0,i3220). +Le(cr11_0,i3260). +Le(cr11_0,i3290). +Le(cr11_0,i3300). +Le(cr11_0,i3330). +Le(cr11_0,i3400). +Le(cr11_0,i3430). +Le(cr11_0,i3500). +Le(cr11_0,i3520). +Le(cr11_0,i3580). +Le(cr11_0,i3610). +Le(cr11_0,i3650). +Le(cr11_0,i3680). +Le(cr11_0,i3720). +Le(cr11_0,i3740). +Le(cr11_0,i3790). +Le(cr11_0,i3820). +Le(cr11_0,i3860). +Le(cr11_0,i3960). +Le(cr11_0,i4040). +Le(cr11_0,i4140). +Le(cr11_0,i4180). +Le(cr11_0,i4400). +Le(cr11_0,i4620). +Le(cr11_0,i4840). +Le(cr11_0,i5060). +Le(cr11_0,i5280). +Le(cr11_0,i5500). +Le(cr11_0,i5720). +Le(cr11_0,i5940). +Le(cr11_0,i6160). +Le(cr11_0,i6380). +Le(cr11_0,i6600). +Le(cr11_0,i6820). +Le(cr11_0,i7040). +Le(cr11_0,i7260). +Le(cr11_0,i7480). +Le(cr11_0,i7700). +Le(cr11_0,i7920). +Le(cr11_0,i8140). +Le(cr11_0,i8360). +Le(cr11_0,i8580). +Eq(i91,i91). +Le(i91,cr12_0). +Le(cr12_0,i104). +Le(i-30,cr12_0). +Le(i0,cr12_0). +Le(i13,cr12_0). +Le(i26,cr12_0). +Le(i39,cr12_0). +Le(i52,cr12_0). +Le(i60,cr12_0). +Le(i65,cr12_0). +Le(i70,cr12_0). +Le(i78,cr12_0). +Le(i90,cr12_0). +Le(cr12_0,i117). +Le(cr12_0,i130). +Le(cr12_0,i143). +Le(cr12_0,i156). +Le(cr12_0,i169). +Le(cr12_0,i182). +Le(cr12_0,i195). +Le(cr12_0,i208). +Le(cr12_0,i221). +Le(cr12_0,i234). +Le(cr12_0,i247). +Le(cr12_0,i260). +Le(cr12_0,i460). +Le(cr12_0,i530). +Le(cr12_0,i600). +Le(cr12_0,i660). +Le(cr12_0,i670). +Le(cr12_0,i710). +Le(cr12_0,i740). +Le(cr12_0,i810). +Le(cr12_0,i850). +Le(cr12_0,i880). +Le(cr12_0,i890). +Le(cr12_0,i920). +Le(cr12_0,i960). +Le(cr12_0,i990). +Le(cr12_0,i1030). +Le(cr12_0,i1060). +Le(cr12_0,i1100). +Le(cr12_0,i1130). +Le(cr12_0,i1170). +Le(cr12_0,i1200). +Le(cr12_0,i1240). +Le(cr12_0,i1260). +Le(cr12_0,i1270). +Le(cr12_0,i1290). +Le(cr12_0,i1310). +Le(cr12_0,i1320). +Le(cr12_0,i1330). +Le(cr12_0,i1350). +Le(cr12_0,i1360). +Le(cr12_0,i1380). +Le(cr12_0,i1390). +Le(cr12_0,i1420). +Le(cr12_0,i1430). +Le(cr12_0,i1450). +Le(cr12_0,i1460). +Le(cr12_0,i1490). +Le(cr12_0,i1520). +Le(cr12_0,i1530). +Le(cr12_0,i1540). +Le(cr12_0,i1560). +Le(cr12_0,i1590). +Le(cr12_0,i1630). +Le(cr12_0,i1660). +Le(cr12_0,i1700). +Le(cr12_0,i1730). +Le(cr12_0,i1760). +Le(cr12_0,i1770). +Le(cr12_0,i1810). +Le(cr12_0,i1840). +Le(cr12_0,i1880). +Le(cr12_0,i1910). +Le(cr12_0,i1950). +Le(cr12_0,i1980). +Le(cr12_0,i2020). +Le(cr12_0,i2050). +Le(cr12_0,i2090). +Le(cr12_0,i2120). +Le(cr12_0,i2160). +Le(cr12_0,i2190). +Le(cr12_0,i2200). +Le(cr12_0,i2230). +Le(cr12_0,i2270). +Le(cr12_0,i2300). +Le(cr12_0,i2340). +Le(cr12_0,i2370). +Le(cr12_0,i2410). +Le(cr12_0,i2420). +Le(cr12_0,i2440). +Le(cr12_0,i2480). +Le(cr12_0,i2510). +Le(cr12_0,i2550). +Le(cr12_0,i2580). +Le(cr12_0,i2620). +Le(cr12_0,i2640). +Le(cr12_0,i2660). +Le(cr12_0,i2730). +Le(cr12_0,i2760). +Le(cr12_0,i2800). +Le(cr12_0,i2830). +Le(cr12_0,i2860). +Le(cr12_0,i2870). +Le(cr12_0,i2940). +Le(cr12_0,i2970). +Le(cr12_0,i3010). +Le(cr12_0,i3040). +Le(cr12_0,i3080). +Le(cr12_0,i3120). +Le(cr12_0,i3150). +Le(cr12_0,i3220). +Le(cr12_0,i3260). +Le(cr12_0,i3290). +Le(cr12_0,i3300). +Le(cr12_0,i3330). +Le(cr12_0,i3400). +Le(cr12_0,i3430). +Le(cr12_0,i3500). +Le(cr12_0,i3520). +Le(cr12_0,i3580). +Le(cr12_0,i3610). +Le(cr12_0,i3650). +Le(cr12_0,i3680). +Le(cr12_0,i3720). +Le(cr12_0,i3740). +Le(cr12_0,i3790). +Le(cr12_0,i3820). +Le(cr12_0,i3860). +Le(cr12_0,i3960). +Le(cr12_0,i4040). +Le(cr12_0,i4140). +Le(cr12_0,i4180). +Le(cr12_0,i4400). +Le(cr12_0,i4620). +Le(cr12_0,i4840). +Le(cr12_0,i5060). +Le(cr12_0,i5280). +Le(cr12_0,i5500). +Le(cr12_0,i5720). +Le(cr12_0,i5940). +Le(cr12_0,i6160). +Le(cr12_0,i6380). +Le(cr12_0,i6600). +Le(cr12_0,i6820). +Le(cr12_0,i7040). +Le(cr12_0,i7260). +Le(cr12_0,i7480). +Le(cr12_0,i7700). +Le(cr12_0,i7920). +Le(cr12_0,i8140). +Le(cr12_0,i8360). +Le(cr12_0,i8580). +Eq(i104,i104). +Le(i104,cr13_0). +Le(cr13_0,i117). +Le(i-30,cr13_0). +Le(i0,cr13_0). +Le(i13,cr13_0). +Le(i26,cr13_0). +Le(i39,cr13_0). +Le(i52,cr13_0). +Le(i60,cr13_0). +Le(i65,cr13_0). +Le(i70,cr13_0). +Le(i78,cr13_0). +Le(i90,cr13_0). +Le(i91,cr13_0). +Le(cr13_0,i130). +Le(cr13_0,i143). +Le(cr13_0,i156). +Le(cr13_0,i169). +Le(cr13_0,i182). +Le(cr13_0,i195). +Le(cr13_0,i208). +Le(cr13_0,i221). +Le(cr13_0,i234). +Le(cr13_0,i247). +Le(cr13_0,i260). +Le(cr13_0,i460). +Le(cr13_0,i530). +Le(cr13_0,i600). +Le(cr13_0,i660). +Le(cr13_0,i670). +Le(cr13_0,i710). +Le(cr13_0,i740). +Le(cr13_0,i810). +Le(cr13_0,i850). +Le(cr13_0,i880). +Le(cr13_0,i890). +Le(cr13_0,i920). +Le(cr13_0,i960). +Le(cr13_0,i990). +Le(cr13_0,i1030). +Le(cr13_0,i1060). +Le(cr13_0,i1100). +Le(cr13_0,i1130). +Le(cr13_0,i1170). +Le(cr13_0,i1200). +Le(cr13_0,i1240). +Le(cr13_0,i1260). +Le(cr13_0,i1270). +Le(cr13_0,i1290). +Le(cr13_0,i1310). +Le(cr13_0,i1320). +Le(cr13_0,i1330). +Le(cr13_0,i1350). +Le(cr13_0,i1360). +Le(cr13_0,i1380). +Le(cr13_0,i1390). +Le(cr13_0,i1420). +Le(cr13_0,i1430). +Le(cr13_0,i1450). +Le(cr13_0,i1460). +Le(cr13_0,i1490). +Le(cr13_0,i1520). +Le(cr13_0,i1530). +Le(cr13_0,i1540). +Le(cr13_0,i1560). +Le(cr13_0,i1590). +Le(cr13_0,i1630). +Le(cr13_0,i1660). +Le(cr13_0,i1700). +Le(cr13_0,i1730). +Le(cr13_0,i1760). +Le(cr13_0,i1770). +Le(cr13_0,i1810). +Le(cr13_0,i1840). +Le(cr13_0,i1880). +Le(cr13_0,i1910). +Le(cr13_0,i1950). +Le(cr13_0,i1980). +Le(cr13_0,i2020). +Le(cr13_0,i2050). +Le(cr13_0,i2090). +Le(cr13_0,i2120). +Le(cr13_0,i2160). +Le(cr13_0,i2190). +Le(cr13_0,i2200). +Le(cr13_0,i2230). +Le(cr13_0,i2270). +Le(cr13_0,i2300). +Le(cr13_0,i2340). +Le(cr13_0,i2370). +Le(cr13_0,i2410). +Le(cr13_0,i2420). +Le(cr13_0,i2440). +Le(cr13_0,i2480). +Le(cr13_0,i2510). +Le(cr13_0,i2550). +Le(cr13_0,i2580). +Le(cr13_0,i2620). +Le(cr13_0,i2640). +Le(cr13_0,i2660). +Le(cr13_0,i2730). +Le(cr13_0,i2760). +Le(cr13_0,i2800). +Le(cr13_0,i2830). +Le(cr13_0,i2860). +Le(cr13_0,i2870). +Le(cr13_0,i2940). +Le(cr13_0,i2970). +Le(cr13_0,i3010). +Le(cr13_0,i3040). +Le(cr13_0,i3080). +Le(cr13_0,i3120). +Le(cr13_0,i3150). +Le(cr13_0,i3220). +Le(cr13_0,i3260). +Le(cr13_0,i3290). +Le(cr13_0,i3300). +Le(cr13_0,i3330). +Le(cr13_0,i3400). +Le(cr13_0,i3430). +Le(cr13_0,i3500). +Le(cr13_0,i3520). +Le(cr13_0,i3580). +Le(cr13_0,i3610). +Le(cr13_0,i3650). +Le(cr13_0,i3680). +Le(cr13_0,i3720). +Le(cr13_0,i3740). +Le(cr13_0,i3790). +Le(cr13_0,i3820). +Le(cr13_0,i3860). +Le(cr13_0,i3960). +Le(cr13_0,i4040). +Le(cr13_0,i4140). +Le(cr13_0,i4180). +Le(cr13_0,i4400). +Le(cr13_0,i4620). +Le(cr13_0,i4840). +Le(cr13_0,i5060). +Le(cr13_0,i5280). +Le(cr13_0,i5500). +Le(cr13_0,i5720). +Le(cr13_0,i5940). +Le(cr13_0,i6160). +Le(cr13_0,i6380). +Le(cr13_0,i6600). +Le(cr13_0,i6820). +Le(cr13_0,i7040). +Le(cr13_0,i7260). +Le(cr13_0,i7480). +Le(cr13_0,i7700). +Le(cr13_0,i7920). +Le(cr13_0,i8140). +Le(cr13_0,i8360). +Le(cr13_0,i8580). +Eq(i117,i117). +Le(i117,cr14_0). +Le(cr14_0,i130). +Le(i-30,cr14_0). +Le(i0,cr14_0). +Le(i13,cr14_0). +Le(i26,cr14_0). +Le(i39,cr14_0). +Le(i52,cr14_0). +Le(i60,cr14_0). +Le(i65,cr14_0). +Le(i70,cr14_0). +Le(i78,cr14_0). +Le(i90,cr14_0). +Le(i91,cr14_0). +Le(i104,cr14_0). +Le(cr14_0,i143). +Le(cr14_0,i156). +Le(cr14_0,i169). +Le(cr14_0,i182). +Le(cr14_0,i195). +Le(cr14_0,i208). +Le(cr14_0,i221). +Le(cr14_0,i234). +Le(cr14_0,i247). +Le(cr14_0,i260). +Le(cr14_0,i460). +Le(cr14_0,i530). +Le(cr14_0,i600). +Le(cr14_0,i660). +Le(cr14_0,i670). +Le(cr14_0,i710). +Le(cr14_0,i740). +Le(cr14_0,i810). +Le(cr14_0,i850). +Le(cr14_0,i880). +Le(cr14_0,i890). +Le(cr14_0,i920). +Le(cr14_0,i960). +Le(cr14_0,i990). +Le(cr14_0,i1030). +Le(cr14_0,i1060). +Le(cr14_0,i1100). +Le(cr14_0,i1130). +Le(cr14_0,i1170). +Le(cr14_0,i1200). +Le(cr14_0,i1240). +Le(cr14_0,i1260). +Le(cr14_0,i1270). +Le(cr14_0,i1290). +Le(cr14_0,i1310). +Le(cr14_0,i1320). +Le(cr14_0,i1330). +Le(cr14_0,i1350). +Le(cr14_0,i1360). +Le(cr14_0,i1380). +Le(cr14_0,i1390). +Le(cr14_0,i1420). +Le(cr14_0,i1430). +Le(cr14_0,i1450). +Le(cr14_0,i1460). +Le(cr14_0,i1490). +Le(cr14_0,i1520). +Le(cr14_0,i1530). +Le(cr14_0,i1540). +Le(cr14_0,i1560). +Le(cr14_0,i1590). +Le(cr14_0,i1630). +Le(cr14_0,i1660). +Le(cr14_0,i1700). +Le(cr14_0,i1730). +Le(cr14_0,i1760). +Le(cr14_0,i1770). +Le(cr14_0,i1810). +Le(cr14_0,i1840). +Le(cr14_0,i1880). +Le(cr14_0,i1910). +Le(cr14_0,i1950). +Le(cr14_0,i1980). +Le(cr14_0,i2020). +Le(cr14_0,i2050). +Le(cr14_0,i2090). +Le(cr14_0,i2120). +Le(cr14_0,i2160). +Le(cr14_0,i2190). +Le(cr14_0,i2200). +Le(cr14_0,i2230). +Le(cr14_0,i2270). +Le(cr14_0,i2300). +Le(cr14_0,i2340). +Le(cr14_0,i2370). +Le(cr14_0,i2410). +Le(cr14_0,i2420). +Le(cr14_0,i2440). +Le(cr14_0,i2480). +Le(cr14_0,i2510). +Le(cr14_0,i2550). +Le(cr14_0,i2580). +Le(cr14_0,i2620). +Le(cr14_0,i2640). +Le(cr14_0,i2660). +Le(cr14_0,i2730). +Le(cr14_0,i2760). +Le(cr14_0,i2800). +Le(cr14_0,i2830). +Le(cr14_0,i2860). +Le(cr14_0,i2870). +Le(cr14_0,i2940). +Le(cr14_0,i2970). +Le(cr14_0,i3010). +Le(cr14_0,i3040). +Le(cr14_0,i3080). +Le(cr14_0,i3120). +Le(cr14_0,i3150). +Le(cr14_0,i3220). +Le(cr14_0,i3260). +Le(cr14_0,i3290). +Le(cr14_0,i3300). +Le(cr14_0,i3330). +Le(cr14_0,i3400). +Le(cr14_0,i3430). +Le(cr14_0,i3500). +Le(cr14_0,i3520). +Le(cr14_0,i3580). +Le(cr14_0,i3610). +Le(cr14_0,i3650). +Le(cr14_0,i3680). +Le(cr14_0,i3720). +Le(cr14_0,i3740). +Le(cr14_0,i3790). +Le(cr14_0,i3820). +Le(cr14_0,i3860). +Le(cr14_0,i3960). +Le(cr14_0,i4040). +Le(cr14_0,i4140). +Le(cr14_0,i4180). +Le(cr14_0,i4400). +Le(cr14_0,i4620). +Le(cr14_0,i4840). +Le(cr14_0,i5060). +Le(cr14_0,i5280). +Le(cr14_0,i5500). +Le(cr14_0,i5720). +Le(cr14_0,i5940). +Le(cr14_0,i6160). +Le(cr14_0,i6380). +Le(cr14_0,i6600). +Le(cr14_0,i6820). +Le(cr14_0,i7040). +Le(cr14_0,i7260). +Le(cr14_0,i7480). +Le(cr14_0,i7700). +Le(cr14_0,i7920). +Le(cr14_0,i8140). +Le(cr14_0,i8360). +Le(cr14_0,i8580). +Eq(i130,i130). +Le(i130,cr15_0). +Le(cr15_0,i143). +Le(i-30,cr15_0). +Le(i0,cr15_0). +Le(i13,cr15_0). +Le(i26,cr15_0). +Le(i39,cr15_0). +Le(i52,cr15_0). +Le(i60,cr15_0). +Le(i65,cr15_0). +Le(i70,cr15_0). +Le(i78,cr15_0). +Le(i90,cr15_0). +Le(i91,cr15_0). +Le(i104,cr15_0). +Le(i117,cr15_0). +Le(cr15_0,i156). +Le(cr15_0,i169). +Le(cr15_0,i182). +Le(cr15_0,i195). +Le(cr15_0,i208). +Le(cr15_0,i221). +Le(cr15_0,i234). +Le(cr15_0,i247). +Le(cr15_0,i260). +Le(cr15_0,i460). +Le(cr15_0,i530). +Le(cr15_0,i600). +Le(cr15_0,i660). +Le(cr15_0,i670). +Le(cr15_0,i710). +Le(cr15_0,i740). +Le(cr15_0,i810). +Le(cr15_0,i850). +Le(cr15_0,i880). +Le(cr15_0,i890). +Le(cr15_0,i920). +Le(cr15_0,i960). +Le(cr15_0,i990). +Le(cr15_0,i1030). +Le(cr15_0,i1060). +Le(cr15_0,i1100). +Le(cr15_0,i1130). +Le(cr15_0,i1170). +Le(cr15_0,i1200). +Le(cr15_0,i1240). +Le(cr15_0,i1260). +Le(cr15_0,i1270). +Le(cr15_0,i1290). +Le(cr15_0,i1310). +Le(cr15_0,i1320). +Le(cr15_0,i1330). +Le(cr15_0,i1350). +Le(cr15_0,i1360). +Le(cr15_0,i1380). +Le(cr15_0,i1390). +Le(cr15_0,i1420). +Le(cr15_0,i1430). +Le(cr15_0,i1450). +Le(cr15_0,i1460). +Le(cr15_0,i1490). +Le(cr15_0,i1520). +Le(cr15_0,i1530). +Le(cr15_0,i1540). +Le(cr15_0,i1560). +Le(cr15_0,i1590). +Le(cr15_0,i1630). +Le(cr15_0,i1660). +Le(cr15_0,i1700). +Le(cr15_0,i1730). +Le(cr15_0,i1760). +Le(cr15_0,i1770). +Le(cr15_0,i1810). +Le(cr15_0,i1840). +Le(cr15_0,i1880). +Le(cr15_0,i1910). +Le(cr15_0,i1950). +Le(cr15_0,i1980). +Le(cr15_0,i2020). +Le(cr15_0,i2050). +Le(cr15_0,i2090). +Le(cr15_0,i2120). +Le(cr15_0,i2160). +Le(cr15_0,i2190). +Le(cr15_0,i2200). +Le(cr15_0,i2230). +Le(cr15_0,i2270). +Le(cr15_0,i2300). +Le(cr15_0,i2340). +Le(cr15_0,i2370). +Le(cr15_0,i2410). +Le(cr15_0,i2420). +Le(cr15_0,i2440). +Le(cr15_0,i2480). +Le(cr15_0,i2510). +Le(cr15_0,i2550). +Le(cr15_0,i2580). +Le(cr15_0,i2620). +Le(cr15_0,i2640). +Le(cr15_0,i2660). +Le(cr15_0,i2730). +Le(cr15_0,i2760). +Le(cr15_0,i2800). +Le(cr15_0,i2830). +Le(cr15_0,i2860). +Le(cr15_0,i2870). +Le(cr15_0,i2940). +Le(cr15_0,i2970). +Le(cr15_0,i3010). +Le(cr15_0,i3040). +Le(cr15_0,i3080). +Le(cr15_0,i3120). +Le(cr15_0,i3150). +Le(cr15_0,i3220). +Le(cr15_0,i3260). +Le(cr15_0,i3290). +Le(cr15_0,i3300). +Le(cr15_0,i3330). +Le(cr15_0,i3400). +Le(cr15_0,i3430). +Le(cr15_0,i3500). +Le(cr15_0,i3520). +Le(cr15_0,i3580). +Le(cr15_0,i3610). +Le(cr15_0,i3650). +Le(cr15_0,i3680). +Le(cr15_0,i3720). +Le(cr15_0,i3740). +Le(cr15_0,i3790). +Le(cr15_0,i3820). +Le(cr15_0,i3860). +Le(cr15_0,i3960). +Le(cr15_0,i4040). +Le(cr15_0,i4140). +Le(cr15_0,i4180). +Le(cr15_0,i4400). +Le(cr15_0,i4620). +Le(cr15_0,i4840). +Le(cr15_0,i5060). +Le(cr15_0,i5280). +Le(cr15_0,i5500). +Le(cr15_0,i5720). +Le(cr15_0,i5940). +Le(cr15_0,i6160). +Le(cr15_0,i6380). +Le(cr15_0,i6600). +Le(cr15_0,i6820). +Le(cr15_0,i7040). +Le(cr15_0,i7260). +Le(cr15_0,i7480). +Le(cr15_0,i7700). +Le(cr15_0,i7920). +Le(cr15_0,i8140). +Le(cr15_0,i8360). +Le(cr15_0,i8580). +Eq(i143,i143). +Le(i143,cr16_0). +Le(cr16_0,i156). +Le(i-30,cr16_0). +Le(i0,cr16_0). +Le(i13,cr16_0). +Le(i26,cr16_0). +Le(i39,cr16_0). +Le(i52,cr16_0). +Le(i60,cr16_0). +Le(i65,cr16_0). +Le(i70,cr16_0). +Le(i78,cr16_0). +Le(i90,cr16_0). +Le(i91,cr16_0). +Le(i104,cr16_0). +Le(i117,cr16_0). +Le(i130,cr16_0). +Le(cr16_0,i169). +Le(cr16_0,i182). +Le(cr16_0,i195). +Le(cr16_0,i208). +Le(cr16_0,i221). +Le(cr16_0,i234). +Le(cr16_0,i247). +Le(cr16_0,i260). +Le(cr16_0,i460). +Le(cr16_0,i530). +Le(cr16_0,i600). +Le(cr16_0,i660). +Le(cr16_0,i670). +Le(cr16_0,i710). +Le(cr16_0,i740). +Le(cr16_0,i810). +Le(cr16_0,i850). +Le(cr16_0,i880). +Le(cr16_0,i890). +Le(cr16_0,i920). +Le(cr16_0,i960). +Le(cr16_0,i990). +Le(cr16_0,i1030). +Le(cr16_0,i1060). +Le(cr16_0,i1100). +Le(cr16_0,i1130). +Le(cr16_0,i1170). +Le(cr16_0,i1200). +Le(cr16_0,i1240). +Le(cr16_0,i1260). +Le(cr16_0,i1270). +Le(cr16_0,i1290). +Le(cr16_0,i1310). +Le(cr16_0,i1320). +Le(cr16_0,i1330). +Le(cr16_0,i1350). +Le(cr16_0,i1360). +Le(cr16_0,i1380). +Le(cr16_0,i1390). +Le(cr16_0,i1420). +Le(cr16_0,i1430). +Le(cr16_0,i1450). +Le(cr16_0,i1460). +Le(cr16_0,i1490). +Le(cr16_0,i1520). +Le(cr16_0,i1530). +Le(cr16_0,i1540). +Le(cr16_0,i1560). +Le(cr16_0,i1590). +Le(cr16_0,i1630). +Le(cr16_0,i1660). +Le(cr16_0,i1700). +Le(cr16_0,i1730). +Le(cr16_0,i1760). +Le(cr16_0,i1770). +Le(cr16_0,i1810). +Le(cr16_0,i1840). +Le(cr16_0,i1880). +Le(cr16_0,i1910). +Le(cr16_0,i1950). +Le(cr16_0,i1980). +Le(cr16_0,i2020). +Le(cr16_0,i2050). +Le(cr16_0,i2090). +Le(cr16_0,i2120). +Le(cr16_0,i2160). +Le(cr16_0,i2190). +Le(cr16_0,i2200). +Le(cr16_0,i2230). +Le(cr16_0,i2270). +Le(cr16_0,i2300). +Le(cr16_0,i2340). +Le(cr16_0,i2370). +Le(cr16_0,i2410). +Le(cr16_0,i2420). +Le(cr16_0,i2440). +Le(cr16_0,i2480). +Le(cr16_0,i2510). +Le(cr16_0,i2550). +Le(cr16_0,i2580). +Le(cr16_0,i2620). +Le(cr16_0,i2640). +Le(cr16_0,i2660). +Le(cr16_0,i2730). +Le(cr16_0,i2760). +Le(cr16_0,i2800). +Le(cr16_0,i2830). +Le(cr16_0,i2860). +Le(cr16_0,i2870). +Le(cr16_0,i2940). +Le(cr16_0,i2970). +Le(cr16_0,i3010). +Le(cr16_0,i3040). +Le(cr16_0,i3080). +Le(cr16_0,i3120). +Le(cr16_0,i3150). +Le(cr16_0,i3220). +Le(cr16_0,i3260). +Le(cr16_0,i3290). +Le(cr16_0,i3300). +Le(cr16_0,i3330). +Le(cr16_0,i3400). +Le(cr16_0,i3430). +Le(cr16_0,i3500). +Le(cr16_0,i3520). +Le(cr16_0,i3580). +Le(cr16_0,i3610). +Le(cr16_0,i3650). +Le(cr16_0,i3680). +Le(cr16_0,i3720). +Le(cr16_0,i3740). +Le(cr16_0,i3790). +Le(cr16_0,i3820). +Le(cr16_0,i3860). +Le(cr16_0,i3960). +Le(cr16_0,i4040). +Le(cr16_0,i4140). +Le(cr16_0,i4180). +Le(cr16_0,i4400). +Le(cr16_0,i4620). +Le(cr16_0,i4840). +Le(cr16_0,i5060). +Le(cr16_0,i5280). +Le(cr16_0,i5500). +Le(cr16_0,i5720). +Le(cr16_0,i5940). +Le(cr16_0,i6160). +Le(cr16_0,i6380). +Le(cr16_0,i6600). +Le(cr16_0,i6820). +Le(cr16_0,i7040). +Le(cr16_0,i7260). +Le(cr16_0,i7480). +Le(cr16_0,i7700). +Le(cr16_0,i7920). +Le(cr16_0,i8140). +Le(cr16_0,i8360). +Le(cr16_0,i8580). +Eq(i156,i156). +Le(i156,cr17_0). +Le(cr17_0,i169). +Le(i-30,cr17_0). +Le(i0,cr17_0). +Le(i13,cr17_0). +Le(i26,cr17_0). +Le(i39,cr17_0). +Le(i52,cr17_0). +Le(i60,cr17_0). +Le(i65,cr17_0). +Le(i70,cr17_0). +Le(i78,cr17_0). +Le(i90,cr17_0). +Le(i91,cr17_0). +Le(i104,cr17_0). +Le(i117,cr17_0). +Le(i130,cr17_0). +Le(i143,cr17_0). +Le(cr17_0,i182). +Le(cr17_0,i195). +Le(cr17_0,i208). +Le(cr17_0,i221). +Le(cr17_0,i234). +Le(cr17_0,i247). +Le(cr17_0,i260). +Le(cr17_0,i460). +Le(cr17_0,i530). +Le(cr17_0,i600). +Le(cr17_0,i660). +Le(cr17_0,i670). +Le(cr17_0,i710). +Le(cr17_0,i740). +Le(cr17_0,i810). +Le(cr17_0,i850). +Le(cr17_0,i880). +Le(cr17_0,i890). +Le(cr17_0,i920). +Le(cr17_0,i960). +Le(cr17_0,i990). +Le(cr17_0,i1030). +Le(cr17_0,i1060). +Le(cr17_0,i1100). +Le(cr17_0,i1130). +Le(cr17_0,i1170). +Le(cr17_0,i1200). +Le(cr17_0,i1240). +Le(cr17_0,i1260). +Le(cr17_0,i1270). +Le(cr17_0,i1290). +Le(cr17_0,i1310). +Le(cr17_0,i1320). +Le(cr17_0,i1330). +Le(cr17_0,i1350). +Le(cr17_0,i1360). +Le(cr17_0,i1380). +Le(cr17_0,i1390). +Le(cr17_0,i1420). +Le(cr17_0,i1430). +Le(cr17_0,i1450). +Le(cr17_0,i1460). +Le(cr17_0,i1490). +Le(cr17_0,i1520). +Le(cr17_0,i1530). +Le(cr17_0,i1540). +Le(cr17_0,i1560). +Le(cr17_0,i1590). +Le(cr17_0,i1630). +Le(cr17_0,i1660). +Le(cr17_0,i1700). +Le(cr17_0,i1730). +Le(cr17_0,i1760). +Le(cr17_0,i1770). +Le(cr17_0,i1810). +Le(cr17_0,i1840). +Le(cr17_0,i1880). +Le(cr17_0,i1910). +Le(cr17_0,i1950). +Le(cr17_0,i1980). +Le(cr17_0,i2020). +Le(cr17_0,i2050). +Le(cr17_0,i2090). +Le(cr17_0,i2120). +Le(cr17_0,i2160). +Le(cr17_0,i2190). +Le(cr17_0,i2200). +Le(cr17_0,i2230). +Le(cr17_0,i2270). +Le(cr17_0,i2300). +Le(cr17_0,i2340). +Le(cr17_0,i2370). +Le(cr17_0,i2410). +Le(cr17_0,i2420). +Le(cr17_0,i2440). +Le(cr17_0,i2480). +Le(cr17_0,i2510). +Le(cr17_0,i2550). +Le(cr17_0,i2580). +Le(cr17_0,i2620). +Le(cr17_0,i2640). +Le(cr17_0,i2660). +Le(cr17_0,i2730). +Le(cr17_0,i2760). +Le(cr17_0,i2800). +Le(cr17_0,i2830). +Le(cr17_0,i2860). +Le(cr17_0,i2870). +Le(cr17_0,i2940). +Le(cr17_0,i2970). +Le(cr17_0,i3010). +Le(cr17_0,i3040). +Le(cr17_0,i3080). +Le(cr17_0,i3120). +Le(cr17_0,i3150). +Le(cr17_0,i3220). +Le(cr17_0,i3260). +Le(cr17_0,i3290). +Le(cr17_0,i3300). +Le(cr17_0,i3330). +Le(cr17_0,i3400). +Le(cr17_0,i3430). +Le(cr17_0,i3500). +Le(cr17_0,i3520). +Le(cr17_0,i3580). +Le(cr17_0,i3610). +Le(cr17_0,i3650). +Le(cr17_0,i3680). +Le(cr17_0,i3720). +Le(cr17_0,i3740). +Le(cr17_0,i3790). +Le(cr17_0,i3820). +Le(cr17_0,i3860). +Le(cr17_0,i3960). +Le(cr17_0,i4040). +Le(cr17_0,i4140). +Le(cr17_0,i4180). +Le(cr17_0,i4400). +Le(cr17_0,i4620). +Le(cr17_0,i4840). +Le(cr17_0,i5060). +Le(cr17_0,i5280). +Le(cr17_0,i5500). +Le(cr17_0,i5720). +Le(cr17_0,i5940). +Le(cr17_0,i6160). +Le(cr17_0,i6380). +Le(cr17_0,i6600). +Le(cr17_0,i6820). +Le(cr17_0,i7040). +Le(cr17_0,i7260). +Le(cr17_0,i7480). +Le(cr17_0,i7700). +Le(cr17_0,i7920). +Le(cr17_0,i8140). +Le(cr17_0,i8360). +Le(cr17_0,i8580). +Eq(i169,i169). +Le(i169,cr18_0). +Le(cr18_0,i182). +Le(i-30,cr18_0). +Le(i0,cr18_0). +Le(i13,cr18_0). +Le(i26,cr18_0). +Le(i39,cr18_0). +Le(i52,cr18_0). +Le(i60,cr18_0). +Le(i65,cr18_0). +Le(i70,cr18_0). +Le(i78,cr18_0). +Le(i90,cr18_0). +Le(i91,cr18_0). +Le(i104,cr18_0). +Le(i117,cr18_0). +Le(i130,cr18_0). +Le(i143,cr18_0). +Le(i156,cr18_0). +Le(cr18_0,i195). +Le(cr18_0,i208). +Le(cr18_0,i221). +Le(cr18_0,i234). +Le(cr18_0,i247). +Le(cr18_0,i260). +Le(cr18_0,i460). +Le(cr18_0,i530). +Le(cr18_0,i600). +Le(cr18_0,i660). +Le(cr18_0,i670). +Le(cr18_0,i710). +Le(cr18_0,i740). +Le(cr18_0,i810). +Le(cr18_0,i850). +Le(cr18_0,i880). +Le(cr18_0,i890). +Le(cr18_0,i920). +Le(cr18_0,i960). +Le(cr18_0,i990). +Le(cr18_0,i1030). +Le(cr18_0,i1060). +Le(cr18_0,i1100). +Le(cr18_0,i1130). +Le(cr18_0,i1170). +Le(cr18_0,i1200). +Le(cr18_0,i1240). +Le(cr18_0,i1260). +Le(cr18_0,i1270). +Le(cr18_0,i1290). +Le(cr18_0,i1310). +Le(cr18_0,i1320). +Le(cr18_0,i1330). +Le(cr18_0,i1350). +Le(cr18_0,i1360). +Le(cr18_0,i1380). +Le(cr18_0,i1390). +Le(cr18_0,i1420). +Le(cr18_0,i1430). +Le(cr18_0,i1450). +Le(cr18_0,i1460). +Le(cr18_0,i1490). +Le(cr18_0,i1520). +Le(cr18_0,i1530). +Le(cr18_0,i1540). +Le(cr18_0,i1560). +Le(cr18_0,i1590). +Le(cr18_0,i1630). +Le(cr18_0,i1660). +Le(cr18_0,i1700). +Le(cr18_0,i1730). +Le(cr18_0,i1760). +Le(cr18_0,i1770). +Le(cr18_0,i1810). +Le(cr18_0,i1840). +Le(cr18_0,i1880). +Le(cr18_0,i1910). +Le(cr18_0,i1950). +Le(cr18_0,i1980). +Le(cr18_0,i2020). +Le(cr18_0,i2050). +Le(cr18_0,i2090). +Le(cr18_0,i2120). +Le(cr18_0,i2160). +Le(cr18_0,i2190). +Le(cr18_0,i2200). +Le(cr18_0,i2230). +Le(cr18_0,i2270). +Le(cr18_0,i2300). +Le(cr18_0,i2340). +Le(cr18_0,i2370). +Le(cr18_0,i2410). +Le(cr18_0,i2420). +Le(cr18_0,i2440). +Le(cr18_0,i2480). +Le(cr18_0,i2510). +Le(cr18_0,i2550). +Le(cr18_0,i2580). +Le(cr18_0,i2620). +Le(cr18_0,i2640). +Le(cr18_0,i2660). +Le(cr18_0,i2730). +Le(cr18_0,i2760). +Le(cr18_0,i2800). +Le(cr18_0,i2830). +Le(cr18_0,i2860). +Le(cr18_0,i2870). +Le(cr18_0,i2940). +Le(cr18_0,i2970). +Le(cr18_0,i3010). +Le(cr18_0,i3040). +Le(cr18_0,i3080). +Le(cr18_0,i3120). +Le(cr18_0,i3150). +Le(cr18_0,i3220). +Le(cr18_0,i3260). +Le(cr18_0,i3290). +Le(cr18_0,i3300). +Le(cr18_0,i3330). +Le(cr18_0,i3400). +Le(cr18_0,i3430). +Le(cr18_0,i3500). +Le(cr18_0,i3520). +Le(cr18_0,i3580). +Le(cr18_0,i3610). +Le(cr18_0,i3650). +Le(cr18_0,i3680). +Le(cr18_0,i3720). +Le(cr18_0,i3740). +Le(cr18_0,i3790). +Le(cr18_0,i3820). +Le(cr18_0,i3860). +Le(cr18_0,i3960). +Le(cr18_0,i4040). +Le(cr18_0,i4140). +Le(cr18_0,i4180). +Le(cr18_0,i4400). +Le(cr18_0,i4620). +Le(cr18_0,i4840). +Le(cr18_0,i5060). +Le(cr18_0,i5280). +Le(cr18_0,i5500). +Le(cr18_0,i5720). +Le(cr18_0,i5940). +Le(cr18_0,i6160). +Le(cr18_0,i6380). +Le(cr18_0,i6600). +Le(cr18_0,i6820). +Le(cr18_0,i7040). +Le(cr18_0,i7260). +Le(cr18_0,i7480). +Le(cr18_0,i7700). +Le(cr18_0,i7920). +Le(cr18_0,i8140). +Le(cr18_0,i8360). +Le(cr18_0,i8580). +Eq(i182,i182). +Le(i182,cr19_0). +Le(cr19_0,i195). +Le(i-30,cr19_0). +Le(i0,cr19_0). +Le(i13,cr19_0). +Le(i26,cr19_0). +Le(i39,cr19_0). +Le(i52,cr19_0). +Le(i60,cr19_0). +Le(i65,cr19_0). +Le(i70,cr19_0). +Le(i78,cr19_0). +Le(i90,cr19_0). +Le(i91,cr19_0). +Le(i104,cr19_0). +Le(i117,cr19_0). +Le(i130,cr19_0). +Le(i143,cr19_0). +Le(i156,cr19_0). +Le(i169,cr19_0). +Le(cr19_0,i208). +Le(cr19_0,i221). +Le(cr19_0,i234). +Le(cr19_0,i247). +Le(cr19_0,i260). +Le(cr19_0,i460). +Le(cr19_0,i530). +Le(cr19_0,i600). +Le(cr19_0,i660). +Le(cr19_0,i670). +Le(cr19_0,i710). +Le(cr19_0,i740). +Le(cr19_0,i810). +Le(cr19_0,i850). +Le(cr19_0,i880). +Le(cr19_0,i890). +Le(cr19_0,i920). +Le(cr19_0,i960). +Le(cr19_0,i990). +Le(cr19_0,i1030). +Le(cr19_0,i1060). +Le(cr19_0,i1100). +Le(cr19_0,i1130). +Le(cr19_0,i1170). +Le(cr19_0,i1200). +Le(cr19_0,i1240). +Le(cr19_0,i1260). +Le(cr19_0,i1270). +Le(cr19_0,i1290). +Le(cr19_0,i1310). +Le(cr19_0,i1320). +Le(cr19_0,i1330). +Le(cr19_0,i1350). +Le(cr19_0,i1360). +Le(cr19_0,i1380). +Le(cr19_0,i1390). +Le(cr19_0,i1420). +Le(cr19_0,i1430). +Le(cr19_0,i1450). +Le(cr19_0,i1460). +Le(cr19_0,i1490). +Le(cr19_0,i1520). +Le(cr19_0,i1530). +Le(cr19_0,i1540). +Le(cr19_0,i1560). +Le(cr19_0,i1590). +Le(cr19_0,i1630). +Le(cr19_0,i1660). +Le(cr19_0,i1700). +Le(cr19_0,i1730). +Le(cr19_0,i1760). +Le(cr19_0,i1770). +Le(cr19_0,i1810). +Le(cr19_0,i1840). +Le(cr19_0,i1880). +Le(cr19_0,i1910). +Le(cr19_0,i1950). +Le(cr19_0,i1980). +Le(cr19_0,i2020). +Le(cr19_0,i2050). +Le(cr19_0,i2090). +Le(cr19_0,i2120). +Le(cr19_0,i2160). +Le(cr19_0,i2190). +Le(cr19_0,i2200). +Le(cr19_0,i2230). +Le(cr19_0,i2270). +Le(cr19_0,i2300). +Le(cr19_0,i2340). +Le(cr19_0,i2370). +Le(cr19_0,i2410). +Le(cr19_0,i2420). +Le(cr19_0,i2440). +Le(cr19_0,i2480). +Le(cr19_0,i2510). +Le(cr19_0,i2550). +Le(cr19_0,i2580). +Le(cr19_0,i2620). +Le(cr19_0,i2640). +Le(cr19_0,i2660). +Le(cr19_0,i2730). +Le(cr19_0,i2760). +Le(cr19_0,i2800). +Le(cr19_0,i2830). +Le(cr19_0,i2860). +Le(cr19_0,i2870). +Le(cr19_0,i2940). +Le(cr19_0,i2970). +Le(cr19_0,i3010). +Le(cr19_0,i3040). +Le(cr19_0,i3080). +Le(cr19_0,i3120). +Le(cr19_0,i3150). +Le(cr19_0,i3220). +Le(cr19_0,i3260). +Le(cr19_0,i3290). +Le(cr19_0,i3300). +Le(cr19_0,i3330). +Le(cr19_0,i3400). +Le(cr19_0,i3430). +Le(cr19_0,i3500). +Le(cr19_0,i3520). +Le(cr19_0,i3580). +Le(cr19_0,i3610). +Le(cr19_0,i3650). +Le(cr19_0,i3680). +Le(cr19_0,i3720). +Le(cr19_0,i3740). +Le(cr19_0,i3790). +Le(cr19_0,i3820). +Le(cr19_0,i3860). +Le(cr19_0,i3960). +Le(cr19_0,i4040). +Le(cr19_0,i4140). +Le(cr19_0,i4180). +Le(cr19_0,i4400). +Le(cr19_0,i4620). +Le(cr19_0,i4840). +Le(cr19_0,i5060). +Le(cr19_0,i5280). +Le(cr19_0,i5500). +Le(cr19_0,i5720). +Le(cr19_0,i5940). +Le(cr19_0,i6160). +Le(cr19_0,i6380). +Le(cr19_0,i6600). +Le(cr19_0,i6820). +Le(cr19_0,i7040). +Le(cr19_0,i7260). +Le(cr19_0,i7480). +Le(cr19_0,i7700). +Le(cr19_0,i7920). +Le(cr19_0,i8140). +Le(cr19_0,i8360). +Le(cr19_0,i8580). +Eq(i195,i195). +Le(i195,cr20_0). +Le(cr20_0,i208). +Le(i-30,cr20_0). +Le(i0,cr20_0). +Le(i13,cr20_0). +Le(i26,cr20_0). +Le(i39,cr20_0). +Le(i52,cr20_0). +Le(i60,cr20_0). +Le(i65,cr20_0). +Le(i70,cr20_0). +Le(i78,cr20_0). +Le(i90,cr20_0). +Le(i91,cr20_0). +Le(i104,cr20_0). +Le(i117,cr20_0). +Le(i130,cr20_0). +Le(i143,cr20_0). +Le(i156,cr20_0). +Le(i169,cr20_0). +Le(i182,cr20_0). +Le(cr20_0,i221). +Le(cr20_0,i234). +Le(cr20_0,i247). +Le(cr20_0,i260). +Le(cr20_0,i460). +Le(cr20_0,i530). +Le(cr20_0,i600). +Le(cr20_0,i660). +Le(cr20_0,i670). +Le(cr20_0,i710). +Le(cr20_0,i740). +Le(cr20_0,i810). +Le(cr20_0,i850). +Le(cr20_0,i880). +Le(cr20_0,i890). +Le(cr20_0,i920). +Le(cr20_0,i960). +Le(cr20_0,i990). +Le(cr20_0,i1030). +Le(cr20_0,i1060). +Le(cr20_0,i1100). +Le(cr20_0,i1130). +Le(cr20_0,i1170). +Le(cr20_0,i1200). +Le(cr20_0,i1240). +Le(cr20_0,i1260). +Le(cr20_0,i1270). +Le(cr20_0,i1290). +Le(cr20_0,i1310). +Le(cr20_0,i1320). +Le(cr20_0,i1330). +Le(cr20_0,i1350). +Le(cr20_0,i1360). +Le(cr20_0,i1380). +Le(cr20_0,i1390). +Le(cr20_0,i1420). +Le(cr20_0,i1430). +Le(cr20_0,i1450). +Le(cr20_0,i1460). +Le(cr20_0,i1490). +Le(cr20_0,i1520). +Le(cr20_0,i1530). +Le(cr20_0,i1540). +Le(cr20_0,i1560). +Le(cr20_0,i1590). +Le(cr20_0,i1630). +Le(cr20_0,i1660). +Le(cr20_0,i1700). +Le(cr20_0,i1730). +Le(cr20_0,i1760). +Le(cr20_0,i1770). +Le(cr20_0,i1810). +Le(cr20_0,i1840). +Le(cr20_0,i1880). +Le(cr20_0,i1910). +Le(cr20_0,i1950). +Le(cr20_0,i1980). +Le(cr20_0,i2020). +Le(cr20_0,i2050). +Le(cr20_0,i2090). +Le(cr20_0,i2120). +Le(cr20_0,i2160). +Le(cr20_0,i2190). +Le(cr20_0,i2200). +Le(cr20_0,i2230). +Le(cr20_0,i2270). +Le(cr20_0,i2300). +Le(cr20_0,i2340). +Le(cr20_0,i2370). +Le(cr20_0,i2410). +Le(cr20_0,i2420). +Le(cr20_0,i2440). +Le(cr20_0,i2480). +Le(cr20_0,i2510). +Le(cr20_0,i2550). +Le(cr20_0,i2580). +Le(cr20_0,i2620). +Le(cr20_0,i2640). +Le(cr20_0,i2660). +Le(cr20_0,i2730). +Le(cr20_0,i2760). +Le(cr20_0,i2800). +Le(cr20_0,i2830). +Le(cr20_0,i2860). +Le(cr20_0,i2870). +Le(cr20_0,i2940). +Le(cr20_0,i2970). +Le(cr20_0,i3010). +Le(cr20_0,i3040). +Le(cr20_0,i3080). +Le(cr20_0,i3120). +Le(cr20_0,i3150). +Le(cr20_0,i3220). +Le(cr20_0,i3260). +Le(cr20_0,i3290). +Le(cr20_0,i3300). +Le(cr20_0,i3330). +Le(cr20_0,i3400). +Le(cr20_0,i3430). +Le(cr20_0,i3500). +Le(cr20_0,i3520). +Le(cr20_0,i3580). +Le(cr20_0,i3610). +Le(cr20_0,i3650). +Le(cr20_0,i3680). +Le(cr20_0,i3720). +Le(cr20_0,i3740). +Le(cr20_0,i3790). +Le(cr20_0,i3820). +Le(cr20_0,i3860). +Le(cr20_0,i3960). +Le(cr20_0,i4040). +Le(cr20_0,i4140). +Le(cr20_0,i4180). +Le(cr20_0,i4400). +Le(cr20_0,i4620). +Le(cr20_0,i4840). +Le(cr20_0,i5060). +Le(cr20_0,i5280). +Le(cr20_0,i5500). +Le(cr20_0,i5720). +Le(cr20_0,i5940). +Le(cr20_0,i6160). +Le(cr20_0,i6380). +Le(cr20_0,i6600). +Le(cr20_0,i6820). +Le(cr20_0,i7040). +Le(cr20_0,i7260). +Le(cr20_0,i7480). +Le(cr20_0,i7700). +Le(cr20_0,i7920). +Le(cr20_0,i8140). +Le(cr20_0,i8360). +Le(cr20_0,i8580). +Eq(i208,i208). +Le(i208,cr21_0). +Le(cr21_0,i221). +Le(i-30,cr21_0). +Le(i0,cr21_0). +Le(i13,cr21_0). +Le(i26,cr21_0). +Le(i39,cr21_0). +Le(i52,cr21_0). +Le(i60,cr21_0). +Le(i65,cr21_0). +Le(i70,cr21_0). +Le(i78,cr21_0). +Le(i90,cr21_0). +Le(i91,cr21_0). +Le(i104,cr21_0). +Le(i117,cr21_0). +Le(i130,cr21_0). +Le(i143,cr21_0). +Le(i156,cr21_0). +Le(i169,cr21_0). +Le(i182,cr21_0). +Le(i195,cr21_0). +Le(cr21_0,i234). +Le(cr21_0,i247). +Le(cr21_0,i260). +Le(cr21_0,i460). +Le(cr21_0,i530). +Le(cr21_0,i600). +Le(cr21_0,i660). +Le(cr21_0,i670). +Le(cr21_0,i710). +Le(cr21_0,i740). +Le(cr21_0,i810). +Le(cr21_0,i850). +Le(cr21_0,i880). +Le(cr21_0,i890). +Le(cr21_0,i920). +Le(cr21_0,i960). +Le(cr21_0,i990). +Le(cr21_0,i1030). +Le(cr21_0,i1060). +Le(cr21_0,i1100). +Le(cr21_0,i1130). +Le(cr21_0,i1170). +Le(cr21_0,i1200). +Le(cr21_0,i1240). +Le(cr21_0,i1260). +Le(cr21_0,i1270). +Le(cr21_0,i1290). +Le(cr21_0,i1310). +Le(cr21_0,i1320). +Le(cr21_0,i1330). +Le(cr21_0,i1350). +Le(cr21_0,i1360). +Le(cr21_0,i1380). +Le(cr21_0,i1390). +Le(cr21_0,i1420). +Le(cr21_0,i1430). +Le(cr21_0,i1450). +Le(cr21_0,i1460). +Le(cr21_0,i1490). +Le(cr21_0,i1520). +Le(cr21_0,i1530). +Le(cr21_0,i1540). +Le(cr21_0,i1560). +Le(cr21_0,i1590). +Le(cr21_0,i1630). +Le(cr21_0,i1660). +Le(cr21_0,i1700). +Le(cr21_0,i1730). +Le(cr21_0,i1760). +Le(cr21_0,i1770). +Le(cr21_0,i1810). +Le(cr21_0,i1840). +Le(cr21_0,i1880). +Le(cr21_0,i1910). +Le(cr21_0,i1950). +Le(cr21_0,i1980). +Le(cr21_0,i2020). +Le(cr21_0,i2050). +Le(cr21_0,i2090). +Le(cr21_0,i2120). +Le(cr21_0,i2160). +Le(cr21_0,i2190). +Le(cr21_0,i2200). +Le(cr21_0,i2230). +Le(cr21_0,i2270). +Le(cr21_0,i2300). +Le(cr21_0,i2340). +Le(cr21_0,i2370). +Le(cr21_0,i2410). +Le(cr21_0,i2420). +Le(cr21_0,i2440). +Le(cr21_0,i2480). +Le(cr21_0,i2510). +Le(cr21_0,i2550). +Le(cr21_0,i2580). +Le(cr21_0,i2620). +Le(cr21_0,i2640). +Le(cr21_0,i2660). +Le(cr21_0,i2730). +Le(cr21_0,i2760). +Le(cr21_0,i2800). +Le(cr21_0,i2830). +Le(cr21_0,i2860). +Le(cr21_0,i2870). +Le(cr21_0,i2940). +Le(cr21_0,i2970). +Le(cr21_0,i3010). +Le(cr21_0,i3040). +Le(cr21_0,i3080). +Le(cr21_0,i3120). +Le(cr21_0,i3150). +Le(cr21_0,i3220). +Le(cr21_0,i3260). +Le(cr21_0,i3290). +Le(cr21_0,i3300). +Le(cr21_0,i3330). +Le(cr21_0,i3400). +Le(cr21_0,i3430). +Le(cr21_0,i3500). +Le(cr21_0,i3520). +Le(cr21_0,i3580). +Le(cr21_0,i3610). +Le(cr21_0,i3650). +Le(cr21_0,i3680). +Le(cr21_0,i3720). +Le(cr21_0,i3740). +Le(cr21_0,i3790). +Le(cr21_0,i3820). +Le(cr21_0,i3860). +Le(cr21_0,i3960). +Le(cr21_0,i4040). +Le(cr21_0,i4140). +Le(cr21_0,i4180). +Le(cr21_0,i4400). +Le(cr21_0,i4620). +Le(cr21_0,i4840). +Le(cr21_0,i5060). +Le(cr21_0,i5280). +Le(cr21_0,i5500). +Le(cr21_0,i5720). +Le(cr21_0,i5940). +Le(cr21_0,i6160). +Le(cr21_0,i6380). +Le(cr21_0,i6600). +Le(cr21_0,i6820). +Le(cr21_0,i7040). +Le(cr21_0,i7260). +Le(cr21_0,i7480). +Le(cr21_0,i7700). +Le(cr21_0,i7920). +Le(cr21_0,i8140). +Le(cr21_0,i8360). +Le(cr21_0,i8580). +Eq(i221,i221). +Le(i221,cr22_0). +Le(cr22_0,i234). +Le(i-30,cr22_0). +Le(i0,cr22_0). +Le(i13,cr22_0). +Le(i26,cr22_0). +Le(i39,cr22_0). +Le(i52,cr22_0). +Le(i60,cr22_0). +Le(i65,cr22_0). +Le(i70,cr22_0). +Le(i78,cr22_0). +Le(i90,cr22_0). +Le(i91,cr22_0). +Le(i104,cr22_0). +Le(i117,cr22_0). +Le(i130,cr22_0). +Le(i143,cr22_0). +Le(i156,cr22_0). +Le(i169,cr22_0). +Le(i182,cr22_0). +Le(i195,cr22_0). +Le(i208,cr22_0). +Le(cr22_0,i247). +Le(cr22_0,i260). +Le(cr22_0,i460). +Le(cr22_0,i530). +Le(cr22_0,i600). +Le(cr22_0,i660). +Le(cr22_0,i670). +Le(cr22_0,i710). +Le(cr22_0,i740). +Le(cr22_0,i810). +Le(cr22_0,i850). +Le(cr22_0,i880). +Le(cr22_0,i890). +Le(cr22_0,i920). +Le(cr22_0,i960). +Le(cr22_0,i990). +Le(cr22_0,i1030). +Le(cr22_0,i1060). +Le(cr22_0,i1100). +Le(cr22_0,i1130). +Le(cr22_0,i1170). +Le(cr22_0,i1200). +Le(cr22_0,i1240). +Le(cr22_0,i1260). +Le(cr22_0,i1270). +Le(cr22_0,i1290). +Le(cr22_0,i1310). +Le(cr22_0,i1320). +Le(cr22_0,i1330). +Le(cr22_0,i1350). +Le(cr22_0,i1360). +Le(cr22_0,i1380). +Le(cr22_0,i1390). +Le(cr22_0,i1420). +Le(cr22_0,i1430). +Le(cr22_0,i1450). +Le(cr22_0,i1460). +Le(cr22_0,i1490). +Le(cr22_0,i1520). +Le(cr22_0,i1530). +Le(cr22_0,i1540). +Le(cr22_0,i1560). +Le(cr22_0,i1590). +Le(cr22_0,i1630). +Le(cr22_0,i1660). +Le(cr22_0,i1700). +Le(cr22_0,i1730). +Le(cr22_0,i1760). +Le(cr22_0,i1770). +Le(cr22_0,i1810). +Le(cr22_0,i1840). +Le(cr22_0,i1880). +Le(cr22_0,i1910). +Le(cr22_0,i1950). +Le(cr22_0,i1980). +Le(cr22_0,i2020). +Le(cr22_0,i2050). +Le(cr22_0,i2090). +Le(cr22_0,i2120). +Le(cr22_0,i2160). +Le(cr22_0,i2190). +Le(cr22_0,i2200). +Le(cr22_0,i2230). +Le(cr22_0,i2270). +Le(cr22_0,i2300). +Le(cr22_0,i2340). +Le(cr22_0,i2370). +Le(cr22_0,i2410). +Le(cr22_0,i2420). +Le(cr22_0,i2440). +Le(cr22_0,i2480). +Le(cr22_0,i2510). +Le(cr22_0,i2550). +Le(cr22_0,i2580). +Le(cr22_0,i2620). +Le(cr22_0,i2640). +Le(cr22_0,i2660). +Le(cr22_0,i2730). +Le(cr22_0,i2760). +Le(cr22_0,i2800). +Le(cr22_0,i2830). +Le(cr22_0,i2860). +Le(cr22_0,i2870). +Le(cr22_0,i2940). +Le(cr22_0,i2970). +Le(cr22_0,i3010). +Le(cr22_0,i3040). +Le(cr22_0,i3080). +Le(cr22_0,i3120). +Le(cr22_0,i3150). +Le(cr22_0,i3220). +Le(cr22_0,i3260). +Le(cr22_0,i3290). +Le(cr22_0,i3300). +Le(cr22_0,i3330). +Le(cr22_0,i3400). +Le(cr22_0,i3430). +Le(cr22_0,i3500). +Le(cr22_0,i3520). +Le(cr22_0,i3580). +Le(cr22_0,i3610). +Le(cr22_0,i3650). +Le(cr22_0,i3680). +Le(cr22_0,i3720). +Le(cr22_0,i3740). +Le(cr22_0,i3790). +Le(cr22_0,i3820). +Le(cr22_0,i3860). +Le(cr22_0,i3960). +Le(cr22_0,i4040). +Le(cr22_0,i4140). +Le(cr22_0,i4180). +Le(cr22_0,i4400). +Le(cr22_0,i4620). +Le(cr22_0,i4840). +Le(cr22_0,i5060). +Le(cr22_0,i5280). +Le(cr22_0,i5500). +Le(cr22_0,i5720). +Le(cr22_0,i5940). +Le(cr22_0,i6160). +Le(cr22_0,i6380). +Le(cr22_0,i6600). +Le(cr22_0,i6820). +Le(cr22_0,i7040). +Le(cr22_0,i7260). +Le(cr22_0,i7480). +Le(cr22_0,i7700). +Le(cr22_0,i7920). +Le(cr22_0,i8140). +Le(cr22_0,i8360). +Le(cr22_0,i8580). +Eq(i234,i234). +Le(i234,cr23_0). +Le(cr23_0,i247). +Le(i-30,cr23_0). +Le(i0,cr23_0). +Le(i13,cr23_0). +Le(i26,cr23_0). +Le(i39,cr23_0). +Le(i52,cr23_0). +Le(i60,cr23_0). +Le(i65,cr23_0). +Le(i70,cr23_0). +Le(i78,cr23_0). +Le(i90,cr23_0). +Le(i91,cr23_0). +Le(i104,cr23_0). +Le(i117,cr23_0). +Le(i130,cr23_0). +Le(i143,cr23_0). +Le(i156,cr23_0). +Le(i169,cr23_0). +Le(i182,cr23_0). +Le(i195,cr23_0). +Le(i208,cr23_0). +Le(i221,cr23_0). +Le(cr23_0,i260). +Le(cr23_0,i460). +Le(cr23_0,i530). +Le(cr23_0,i600). +Le(cr23_0,i660). +Le(cr23_0,i670). +Le(cr23_0,i710). +Le(cr23_0,i740). +Le(cr23_0,i810). +Le(cr23_0,i850). +Le(cr23_0,i880). +Le(cr23_0,i890). +Le(cr23_0,i920). +Le(cr23_0,i960). +Le(cr23_0,i990). +Le(cr23_0,i1030). +Le(cr23_0,i1060). +Le(cr23_0,i1100). +Le(cr23_0,i1130). +Le(cr23_0,i1170). +Le(cr23_0,i1200). +Le(cr23_0,i1240). +Le(cr23_0,i1260). +Le(cr23_0,i1270). +Le(cr23_0,i1290). +Le(cr23_0,i1310). +Le(cr23_0,i1320). +Le(cr23_0,i1330). +Le(cr23_0,i1350). +Le(cr23_0,i1360). +Le(cr23_0,i1380). +Le(cr23_0,i1390). +Le(cr23_0,i1420). +Le(cr23_0,i1430). +Le(cr23_0,i1450). +Le(cr23_0,i1460). +Le(cr23_0,i1490). +Le(cr23_0,i1520). +Le(cr23_0,i1530). +Le(cr23_0,i1540). +Le(cr23_0,i1560). +Le(cr23_0,i1590). +Le(cr23_0,i1630). +Le(cr23_0,i1660). +Le(cr23_0,i1700). +Le(cr23_0,i1730). +Le(cr23_0,i1760). +Le(cr23_0,i1770). +Le(cr23_0,i1810). +Le(cr23_0,i1840). +Le(cr23_0,i1880). +Le(cr23_0,i1910). +Le(cr23_0,i1950). +Le(cr23_0,i1980). +Le(cr23_0,i2020). +Le(cr23_0,i2050). +Le(cr23_0,i2090). +Le(cr23_0,i2120). +Le(cr23_0,i2160). +Le(cr23_0,i2190). +Le(cr23_0,i2200). +Le(cr23_0,i2230). +Le(cr23_0,i2270). +Le(cr23_0,i2300). +Le(cr23_0,i2340). +Le(cr23_0,i2370). +Le(cr23_0,i2410). +Le(cr23_0,i2420). +Le(cr23_0,i2440). +Le(cr23_0,i2480). +Le(cr23_0,i2510). +Le(cr23_0,i2550). +Le(cr23_0,i2580). +Le(cr23_0,i2620). +Le(cr23_0,i2640). +Le(cr23_0,i2660). +Le(cr23_0,i2730). +Le(cr23_0,i2760). +Le(cr23_0,i2800). +Le(cr23_0,i2830). +Le(cr23_0,i2860). +Le(cr23_0,i2870). +Le(cr23_0,i2940). +Le(cr23_0,i2970). +Le(cr23_0,i3010). +Le(cr23_0,i3040). +Le(cr23_0,i3080). +Le(cr23_0,i3120). +Le(cr23_0,i3150). +Le(cr23_0,i3220). +Le(cr23_0,i3260). +Le(cr23_0,i3290). +Le(cr23_0,i3300). +Le(cr23_0,i3330). +Le(cr23_0,i3400). +Le(cr23_0,i3430). +Le(cr23_0,i3500). +Le(cr23_0,i3520). +Le(cr23_0,i3580). +Le(cr23_0,i3610). +Le(cr23_0,i3650). +Le(cr23_0,i3680). +Le(cr23_0,i3720). +Le(cr23_0,i3740). +Le(cr23_0,i3790). +Le(cr23_0,i3820). +Le(cr23_0,i3860). +Le(cr23_0,i3960). +Le(cr23_0,i4040). +Le(cr23_0,i4140). +Le(cr23_0,i4180). +Le(cr23_0,i4400). +Le(cr23_0,i4620). +Le(cr23_0,i4840). +Le(cr23_0,i5060). +Le(cr23_0,i5280). +Le(cr23_0,i5500). +Le(cr23_0,i5720). +Le(cr23_0,i5940). +Le(cr23_0,i6160). +Le(cr23_0,i6380). +Le(cr23_0,i6600). +Le(cr23_0,i6820). +Le(cr23_0,i7040). +Le(cr23_0,i7260). +Le(cr23_0,i7480). +Le(cr23_0,i7700). +Le(cr23_0,i7920). +Le(cr23_0,i8140). +Le(cr23_0,i8360). +Le(cr23_0,i8580). +Eq(i247,i247). +Le(i247,cr24_0). +Le(cr24_0,i260). +Le(i-30,cr24_0). +Le(i0,cr24_0). +Le(i13,cr24_0). +Le(i26,cr24_0). +Le(i39,cr24_0). +Le(i52,cr24_0). +Le(i60,cr24_0). +Le(i65,cr24_0). +Le(i70,cr24_0). +Le(i78,cr24_0). +Le(i90,cr24_0). +Le(i91,cr24_0). +Le(i104,cr24_0). +Le(i117,cr24_0). +Le(i130,cr24_0). +Le(i143,cr24_0). +Le(i156,cr24_0). +Le(i169,cr24_0). +Le(i182,cr24_0). +Le(i195,cr24_0). +Le(i208,cr24_0). +Le(i221,cr24_0). +Le(i234,cr24_0). +Le(cr24_0,i460). +Le(cr24_0,i530). +Le(cr24_0,i600). +Le(cr24_0,i660). +Le(cr24_0,i670). +Le(cr24_0,i710). +Le(cr24_0,i740). +Le(cr24_0,i810). +Le(cr24_0,i850). +Le(cr24_0,i880). +Le(cr24_0,i890). +Le(cr24_0,i920). +Le(cr24_0,i960). +Le(cr24_0,i990). +Le(cr24_0,i1030). +Le(cr24_0,i1060). +Le(cr24_0,i1100). +Le(cr24_0,i1130). +Le(cr24_0,i1170). +Le(cr24_0,i1200). +Le(cr24_0,i1240). +Le(cr24_0,i1260). +Le(cr24_0,i1270). +Le(cr24_0,i1290). +Le(cr24_0,i1310). +Le(cr24_0,i1320). +Le(cr24_0,i1330). +Le(cr24_0,i1350). +Le(cr24_0,i1360). +Le(cr24_0,i1380). +Le(cr24_0,i1390). +Le(cr24_0,i1420). +Le(cr24_0,i1430). +Le(cr24_0,i1450). +Le(cr24_0,i1460). +Le(cr24_0,i1490). +Le(cr24_0,i1520). +Le(cr24_0,i1530). +Le(cr24_0,i1540). +Le(cr24_0,i1560). +Le(cr24_0,i1590). +Le(cr24_0,i1630). +Le(cr24_0,i1660). +Le(cr24_0,i1700). +Le(cr24_0,i1730). +Le(cr24_0,i1760). +Le(cr24_0,i1770). +Le(cr24_0,i1810). +Le(cr24_0,i1840). +Le(cr24_0,i1880). +Le(cr24_0,i1910). +Le(cr24_0,i1950). +Le(cr24_0,i1980). +Le(cr24_0,i2020). +Le(cr24_0,i2050). +Le(cr24_0,i2090). +Le(cr24_0,i2120). +Le(cr24_0,i2160). +Le(cr24_0,i2190). +Le(cr24_0,i2200). +Le(cr24_0,i2230). +Le(cr24_0,i2270). +Le(cr24_0,i2300). +Le(cr24_0,i2340). +Le(cr24_0,i2370). +Le(cr24_0,i2410). +Le(cr24_0,i2420). +Le(cr24_0,i2440). +Le(cr24_0,i2480). +Le(cr24_0,i2510). +Le(cr24_0,i2550). +Le(cr24_0,i2580). +Le(cr24_0,i2620). +Le(cr24_0,i2640). +Le(cr24_0,i2660). +Le(cr24_0,i2730). +Le(cr24_0,i2760). +Le(cr24_0,i2800). +Le(cr24_0,i2830). +Le(cr24_0,i2860). +Le(cr24_0,i2870). +Le(cr24_0,i2940). +Le(cr24_0,i2970). +Le(cr24_0,i3010). +Le(cr24_0,i3040). +Le(cr24_0,i3080). +Le(cr24_0,i3120). +Le(cr24_0,i3150). +Le(cr24_0,i3220). +Le(cr24_0,i3260). +Le(cr24_0,i3290). +Le(cr24_0,i3300). +Le(cr24_0,i3330). +Le(cr24_0,i3400). +Le(cr24_0,i3430). +Le(cr24_0,i3500). +Le(cr24_0,i3520). +Le(cr24_0,i3580). +Le(cr24_0,i3610). +Le(cr24_0,i3650). +Le(cr24_0,i3680). +Le(cr24_0,i3720). +Le(cr24_0,i3740). +Le(cr24_0,i3790). +Le(cr24_0,i3820). +Le(cr24_0,i3860). +Le(cr24_0,i3960). +Le(cr24_0,i4040). +Le(cr24_0,i4140). +Le(cr24_0,i4180). +Le(cr24_0,i4400). +Le(cr24_0,i4620). +Le(cr24_0,i4840). +Le(cr24_0,i5060). +Le(cr24_0,i5280). +Le(cr24_0,i5500). +Le(cr24_0,i5720). +Le(cr24_0,i5940). +Le(cr24_0,i6160). +Le(cr24_0,i6380). +Le(cr24_0,i6600). +Le(cr24_0,i6820). +Le(cr24_0,i7040). +Le(cr24_0,i7260). +Le(cr24_0,i7480). +Le(cr24_0,i7700). +Le(cr24_0,i7920). +Le(cr24_0,i8140). +Le(cr24_0,i8360). +Le(cr24_0,i8580). +Eq(i260,i260). +Le(i260,cr25_0). +Le(cr25_0,i460). +Le(i-30,cr25_0). +Le(i0,cr25_0). +Le(i13,cr25_0). +Le(i26,cr25_0). +Le(i39,cr25_0). +Le(i52,cr25_0). +Le(i60,cr25_0). +Le(i65,cr25_0). +Le(i70,cr25_0). +Le(i78,cr25_0). +Le(i90,cr25_0). +Le(i91,cr25_0). +Le(i104,cr25_0). +Le(i117,cr25_0). +Le(i130,cr25_0). +Le(i143,cr25_0). +Le(i156,cr25_0). +Le(i169,cr25_0). +Le(i182,cr25_0). +Le(i195,cr25_0). +Le(i208,cr25_0). +Le(i221,cr25_0). +Le(i234,cr25_0). +Le(i247,cr25_0). +Le(cr25_0,i530). +Le(cr25_0,i600). +Le(cr25_0,i660). +Le(cr25_0,i670). +Le(cr25_0,i710). +Le(cr25_0,i740). +Le(cr25_0,i810). +Le(cr25_0,i850). +Le(cr25_0,i880). +Le(cr25_0,i890). +Le(cr25_0,i920). +Le(cr25_0,i960). +Le(cr25_0,i990). +Le(cr25_0,i1030). +Le(cr25_0,i1060). +Le(cr25_0,i1100). +Le(cr25_0,i1130). +Le(cr25_0,i1170). +Le(cr25_0,i1200). +Le(cr25_0,i1240). +Le(cr25_0,i1260). +Le(cr25_0,i1270). +Le(cr25_0,i1290). +Le(cr25_0,i1310). +Le(cr25_0,i1320). +Le(cr25_0,i1330). +Le(cr25_0,i1350). +Le(cr25_0,i1360). +Le(cr25_0,i1380). +Le(cr25_0,i1390). +Le(cr25_0,i1420). +Le(cr25_0,i1430). +Le(cr25_0,i1450). +Le(cr25_0,i1460). +Le(cr25_0,i1490). +Le(cr25_0,i1520). +Le(cr25_0,i1530). +Le(cr25_0,i1540). +Le(cr25_0,i1560). +Le(cr25_0,i1590). +Le(cr25_0,i1630). +Le(cr25_0,i1660). +Le(cr25_0,i1700). +Le(cr25_0,i1730). +Le(cr25_0,i1760). +Le(cr25_0,i1770). +Le(cr25_0,i1810). +Le(cr25_0,i1840). +Le(cr25_0,i1880). +Le(cr25_0,i1910). +Le(cr25_0,i1950). +Le(cr25_0,i1980). +Le(cr25_0,i2020). +Le(cr25_0,i2050). +Le(cr25_0,i2090). +Le(cr25_0,i2120). +Le(cr25_0,i2160). +Le(cr25_0,i2190). +Le(cr25_0,i2200). +Le(cr25_0,i2230). +Le(cr25_0,i2270). +Le(cr25_0,i2300). +Le(cr25_0,i2340). +Le(cr25_0,i2370). +Le(cr25_0,i2410). +Le(cr25_0,i2420). +Le(cr25_0,i2440). +Le(cr25_0,i2480). +Le(cr25_0,i2510). +Le(cr25_0,i2550). +Le(cr25_0,i2580). +Le(cr25_0,i2620). +Le(cr25_0,i2640). +Le(cr25_0,i2660). +Le(cr25_0,i2730). +Le(cr25_0,i2760). +Le(cr25_0,i2800). +Le(cr25_0,i2830). +Le(cr25_0,i2860). +Le(cr25_0,i2870). +Le(cr25_0,i2940). +Le(cr25_0,i2970). +Le(cr25_0,i3010). +Le(cr25_0,i3040). +Le(cr25_0,i3080). +Le(cr25_0,i3120). +Le(cr25_0,i3150). +Le(cr25_0,i3220). +Le(cr25_0,i3260). +Le(cr25_0,i3290). +Le(cr25_0,i3300). +Le(cr25_0,i3330). +Le(cr25_0,i3400). +Le(cr25_0,i3430). +Le(cr25_0,i3500). +Le(cr25_0,i3520). +Le(cr25_0,i3580). +Le(cr25_0,i3610). +Le(cr25_0,i3650). +Le(cr25_0,i3680). +Le(cr25_0,i3720). +Le(cr25_0,i3740). +Le(cr25_0,i3790). +Le(cr25_0,i3820). +Le(cr25_0,i3860). +Le(cr25_0,i3960). +Le(cr25_0,i4040). +Le(cr25_0,i4140). +Le(cr25_0,i4180). +Le(cr25_0,i4400). +Le(cr25_0,i4620). +Le(cr25_0,i4840). +Le(cr25_0,i5060). +Le(cr25_0,i5280). +Le(cr25_0,i5500). +Le(cr25_0,i5720). +Le(cr25_0,i5940). +Le(cr25_0,i6160). +Le(cr25_0,i6380). +Le(cr25_0,i6600). +Le(cr25_0,i6820). +Le(cr25_0,i7040). +Le(cr25_0,i7260). +Le(cr25_0,i7480). +Le(cr25_0,i7700). +Le(cr25_0,i7920). +Le(cr25_0,i8140). +Le(cr25_0,i8360). +Le(cr25_0,i8580). +Eq(i460,i460). +Le(i460,cr26_0). +Le(cr26_0,i530). +Le(i-30,cr26_0). +Le(i0,cr26_0). +Le(i13,cr26_0). +Le(i26,cr26_0). +Le(i39,cr26_0). +Le(i52,cr26_0). +Le(i60,cr26_0). +Le(i65,cr26_0). +Le(i70,cr26_0). +Le(i78,cr26_0). +Le(i90,cr26_0). +Le(i91,cr26_0). +Le(i104,cr26_0). +Le(i117,cr26_0). +Le(i130,cr26_0). +Le(i143,cr26_0). +Le(i156,cr26_0). +Le(i169,cr26_0). +Le(i182,cr26_0). +Le(i195,cr26_0). +Le(i208,cr26_0). +Le(i221,cr26_0). +Le(i234,cr26_0). +Le(i247,cr26_0). +Le(i260,cr26_0). +Le(cr26_0,i600). +Le(cr26_0,i660). +Le(cr26_0,i670). +Le(cr26_0,i710). +Le(cr26_0,i740). +Le(cr26_0,i810). +Le(cr26_0,i850). +Le(cr26_0,i880). +Le(cr26_0,i890). +Le(cr26_0,i920). +Le(cr26_0,i960). +Le(cr26_0,i990). +Le(cr26_0,i1030). +Le(cr26_0,i1060). +Le(cr26_0,i1100). +Le(cr26_0,i1130). +Le(cr26_0,i1170). +Le(cr26_0,i1200). +Le(cr26_0,i1240). +Le(cr26_0,i1260). +Le(cr26_0,i1270). +Le(cr26_0,i1290). +Le(cr26_0,i1310). +Le(cr26_0,i1320). +Le(cr26_0,i1330). +Le(cr26_0,i1350). +Le(cr26_0,i1360). +Le(cr26_0,i1380). +Le(cr26_0,i1390). +Le(cr26_0,i1420). +Le(cr26_0,i1430). +Le(cr26_0,i1450). +Le(cr26_0,i1460). +Le(cr26_0,i1490). +Le(cr26_0,i1520). +Le(cr26_0,i1530). +Le(cr26_0,i1540). +Le(cr26_0,i1560). +Le(cr26_0,i1590). +Le(cr26_0,i1630). +Le(cr26_0,i1660). +Le(cr26_0,i1700). +Le(cr26_0,i1730). +Le(cr26_0,i1760). +Le(cr26_0,i1770). +Le(cr26_0,i1810). +Le(cr26_0,i1840). +Le(cr26_0,i1880). +Le(cr26_0,i1910). +Le(cr26_0,i1950). +Le(cr26_0,i1980). +Le(cr26_0,i2020). +Le(cr26_0,i2050). +Le(cr26_0,i2090). +Le(cr26_0,i2120). +Le(cr26_0,i2160). +Le(cr26_0,i2190). +Le(cr26_0,i2200). +Le(cr26_0,i2230). +Le(cr26_0,i2270). +Le(cr26_0,i2300). +Le(cr26_0,i2340). +Le(cr26_0,i2370). +Le(cr26_0,i2410). +Le(cr26_0,i2420). +Le(cr26_0,i2440). +Le(cr26_0,i2480). +Le(cr26_0,i2510). +Le(cr26_0,i2550). +Le(cr26_0,i2580). +Le(cr26_0,i2620). +Le(cr26_0,i2640). +Le(cr26_0,i2660). +Le(cr26_0,i2730). +Le(cr26_0,i2760). +Le(cr26_0,i2800). +Le(cr26_0,i2830). +Le(cr26_0,i2860). +Le(cr26_0,i2870). +Le(cr26_0,i2940). +Le(cr26_0,i2970). +Le(cr26_0,i3010). +Le(cr26_0,i3040). +Le(cr26_0,i3080). +Le(cr26_0,i3120). +Le(cr26_0,i3150). +Le(cr26_0,i3220). +Le(cr26_0,i3260). +Le(cr26_0,i3290). +Le(cr26_0,i3300). +Le(cr26_0,i3330). +Le(cr26_0,i3400). +Le(cr26_0,i3430). +Le(cr26_0,i3500). +Le(cr26_0,i3520). +Le(cr26_0,i3580). +Le(cr26_0,i3610). +Le(cr26_0,i3650). +Le(cr26_0,i3680). +Le(cr26_0,i3720). +Le(cr26_0,i3740). +Le(cr26_0,i3790). +Le(cr26_0,i3820). +Le(cr26_0,i3860). +Le(cr26_0,i3960). +Le(cr26_0,i4040). +Le(cr26_0,i4140). +Le(cr26_0,i4180). +Le(cr26_0,i4400). +Le(cr26_0,i4620). +Le(cr26_0,i4840). +Le(cr26_0,i5060). +Le(cr26_0,i5280). +Le(cr26_0,i5500). +Le(cr26_0,i5720). +Le(cr26_0,i5940). +Le(cr26_0,i6160). +Le(cr26_0,i6380). +Le(cr26_0,i6600). +Le(cr26_0,i6820). +Le(cr26_0,i7040). +Le(cr26_0,i7260). +Le(cr26_0,i7480). +Le(cr26_0,i7700). +Le(cr26_0,i7920). +Le(cr26_0,i8140). +Le(cr26_0,i8360). +Le(cr26_0,i8580). +Eq(i530,i530). +Le(i530,cr27_0). +Le(cr27_0,i600). +Le(i-30,cr27_0). +Le(i0,cr27_0). +Le(i13,cr27_0). +Le(i26,cr27_0). +Le(i39,cr27_0). +Le(i52,cr27_0). +Le(i60,cr27_0). +Le(i65,cr27_0). +Le(i70,cr27_0). +Le(i78,cr27_0). +Le(i90,cr27_0). +Le(i91,cr27_0). +Le(i104,cr27_0). +Le(i117,cr27_0). +Le(i130,cr27_0). +Le(i143,cr27_0). +Le(i156,cr27_0). +Le(i169,cr27_0). +Le(i182,cr27_0). +Le(i195,cr27_0). +Le(i208,cr27_0). +Le(i221,cr27_0). +Le(i234,cr27_0). +Le(i247,cr27_0). +Le(i260,cr27_0). +Le(i460,cr27_0). +Le(cr27_0,i660). +Le(cr27_0,i670). +Le(cr27_0,i710). +Le(cr27_0,i740). +Le(cr27_0,i810). +Le(cr27_0,i850). +Le(cr27_0,i880). +Le(cr27_0,i890). +Le(cr27_0,i920). +Le(cr27_0,i960). +Le(cr27_0,i990). +Le(cr27_0,i1030). +Le(cr27_0,i1060). +Le(cr27_0,i1100). +Le(cr27_0,i1130). +Le(cr27_0,i1170). +Le(cr27_0,i1200). +Le(cr27_0,i1240). +Le(cr27_0,i1260). +Le(cr27_0,i1270). +Le(cr27_0,i1290). +Le(cr27_0,i1310). +Le(cr27_0,i1320). +Le(cr27_0,i1330). +Le(cr27_0,i1350). +Le(cr27_0,i1360). +Le(cr27_0,i1380). +Le(cr27_0,i1390). +Le(cr27_0,i1420). +Le(cr27_0,i1430). +Le(cr27_0,i1450). +Le(cr27_0,i1460). +Le(cr27_0,i1490). +Le(cr27_0,i1520). +Le(cr27_0,i1530). +Le(cr27_0,i1540). +Le(cr27_0,i1560). +Le(cr27_0,i1590). +Le(cr27_0,i1630). +Le(cr27_0,i1660). +Le(cr27_0,i1700). +Le(cr27_0,i1730). +Le(cr27_0,i1760). +Le(cr27_0,i1770). +Le(cr27_0,i1810). +Le(cr27_0,i1840). +Le(cr27_0,i1880). +Le(cr27_0,i1910). +Le(cr27_0,i1950). +Le(cr27_0,i1980). +Le(cr27_0,i2020). +Le(cr27_0,i2050). +Le(cr27_0,i2090). +Le(cr27_0,i2120). +Le(cr27_0,i2160). +Le(cr27_0,i2190). +Le(cr27_0,i2200). +Le(cr27_0,i2230). +Le(cr27_0,i2270). +Le(cr27_0,i2300). +Le(cr27_0,i2340). +Le(cr27_0,i2370). +Le(cr27_0,i2410). +Le(cr27_0,i2420). +Le(cr27_0,i2440). +Le(cr27_0,i2480). +Le(cr27_0,i2510). +Le(cr27_0,i2550). +Le(cr27_0,i2580). +Le(cr27_0,i2620). +Le(cr27_0,i2640). +Le(cr27_0,i2660). +Le(cr27_0,i2730). +Le(cr27_0,i2760). +Le(cr27_0,i2800). +Le(cr27_0,i2830). +Le(cr27_0,i2860). +Le(cr27_0,i2870). +Le(cr27_0,i2940). +Le(cr27_0,i2970). +Le(cr27_0,i3010). +Le(cr27_0,i3040). +Le(cr27_0,i3080). +Le(cr27_0,i3120). +Le(cr27_0,i3150). +Le(cr27_0,i3220). +Le(cr27_0,i3260). +Le(cr27_0,i3290). +Le(cr27_0,i3300). +Le(cr27_0,i3330). +Le(cr27_0,i3400). +Le(cr27_0,i3430). +Le(cr27_0,i3500). +Le(cr27_0,i3520). +Le(cr27_0,i3580). +Le(cr27_0,i3610). +Le(cr27_0,i3650). +Le(cr27_0,i3680). +Le(cr27_0,i3720). +Le(cr27_0,i3740). +Le(cr27_0,i3790). +Le(cr27_0,i3820). +Le(cr27_0,i3860). +Le(cr27_0,i3960). +Le(cr27_0,i4040). +Le(cr27_0,i4140). +Le(cr27_0,i4180). +Le(cr27_0,i4400). +Le(cr27_0,i4620). +Le(cr27_0,i4840). +Le(cr27_0,i5060). +Le(cr27_0,i5280). +Le(cr27_0,i5500). +Le(cr27_0,i5720). +Le(cr27_0,i5940). +Le(cr27_0,i6160). +Le(cr27_0,i6380). +Le(cr27_0,i6600). +Le(cr27_0,i6820). +Le(cr27_0,i7040). +Le(cr27_0,i7260). +Le(cr27_0,i7480). +Le(cr27_0,i7700). +Le(cr27_0,i7920). +Le(cr27_0,i8140). +Le(cr27_0,i8360). +Le(cr27_0,i8580). +Eq(i600,i600). +Le(i600,cr28_0). +Le(cr28_0,i660). +Le(i-30,cr28_0). +Le(i0,cr28_0). +Le(i13,cr28_0). +Le(i26,cr28_0). +Le(i39,cr28_0). +Le(i52,cr28_0). +Le(i60,cr28_0). +Le(i65,cr28_0). +Le(i70,cr28_0). +Le(i78,cr28_0). +Le(i90,cr28_0). +Le(i91,cr28_0). +Le(i104,cr28_0). +Le(i117,cr28_0). +Le(i130,cr28_0). +Le(i143,cr28_0). +Le(i156,cr28_0). +Le(i169,cr28_0). +Le(i182,cr28_0). +Le(i195,cr28_0). +Le(i208,cr28_0). +Le(i221,cr28_0). +Le(i234,cr28_0). +Le(i247,cr28_0). +Le(i260,cr28_0). +Le(i460,cr28_0). +Le(i530,cr28_0). +Le(cr28_0,i670). +Le(cr28_0,i710). +Le(cr28_0,i740). +Le(cr28_0,i810). +Le(cr28_0,i850). +Le(cr28_0,i880). +Le(cr28_0,i890). +Le(cr28_0,i920). +Le(cr28_0,i960). +Le(cr28_0,i990). +Le(cr28_0,i1030). +Le(cr28_0,i1060). +Le(cr28_0,i1100). +Le(cr28_0,i1130). +Le(cr28_0,i1170). +Le(cr28_0,i1200). +Le(cr28_0,i1240). +Le(cr28_0,i1260). +Le(cr28_0,i1270). +Le(cr28_0,i1290). +Le(cr28_0,i1310). +Le(cr28_0,i1320). +Le(cr28_0,i1330). +Le(cr28_0,i1350). +Le(cr28_0,i1360). +Le(cr28_0,i1380). +Le(cr28_0,i1390). +Le(cr28_0,i1420). +Le(cr28_0,i1430). +Le(cr28_0,i1450). +Le(cr28_0,i1460). +Le(cr28_0,i1490). +Le(cr28_0,i1520). +Le(cr28_0,i1530). +Le(cr28_0,i1540). +Le(cr28_0,i1560). +Le(cr28_0,i1590). +Le(cr28_0,i1630). +Le(cr28_0,i1660). +Le(cr28_0,i1700). +Le(cr28_0,i1730). +Le(cr28_0,i1760). +Le(cr28_0,i1770). +Le(cr28_0,i1810). +Le(cr28_0,i1840). +Le(cr28_0,i1880). +Le(cr28_0,i1910). +Le(cr28_0,i1950). +Le(cr28_0,i1980). +Le(cr28_0,i2020). +Le(cr28_0,i2050). +Le(cr28_0,i2090). +Le(cr28_0,i2120). +Le(cr28_0,i2160). +Le(cr28_0,i2190). +Le(cr28_0,i2200). +Le(cr28_0,i2230). +Le(cr28_0,i2270). +Le(cr28_0,i2300). +Le(cr28_0,i2340). +Le(cr28_0,i2370). +Le(cr28_0,i2410). +Le(cr28_0,i2420). +Le(cr28_0,i2440). +Le(cr28_0,i2480). +Le(cr28_0,i2510). +Le(cr28_0,i2550). +Le(cr28_0,i2580). +Le(cr28_0,i2620). +Le(cr28_0,i2640). +Le(cr28_0,i2660). +Le(cr28_0,i2730). +Le(cr28_0,i2760). +Le(cr28_0,i2800). +Le(cr28_0,i2830). +Le(cr28_0,i2860). +Le(cr28_0,i2870). +Le(cr28_0,i2940). +Le(cr28_0,i2970). +Le(cr28_0,i3010). +Le(cr28_0,i3040). +Le(cr28_0,i3080). +Le(cr28_0,i3120). +Le(cr28_0,i3150). +Le(cr28_0,i3220). +Le(cr28_0,i3260). +Le(cr28_0,i3290). +Le(cr28_0,i3300). +Le(cr28_0,i3330). +Le(cr28_0,i3400). +Le(cr28_0,i3430). +Le(cr28_0,i3500). +Le(cr28_0,i3520). +Le(cr28_0,i3580). +Le(cr28_0,i3610). +Le(cr28_0,i3650). +Le(cr28_0,i3680). +Le(cr28_0,i3720). +Le(cr28_0,i3740). +Le(cr28_0,i3790). +Le(cr28_0,i3820). +Le(cr28_0,i3860). +Le(cr28_0,i3960). +Le(cr28_0,i4040). +Le(cr28_0,i4140). +Le(cr28_0,i4180). +Le(cr28_0,i4400). +Le(cr28_0,i4620). +Le(cr28_0,i4840). +Le(cr28_0,i5060). +Le(cr28_0,i5280). +Le(cr28_0,i5500). +Le(cr28_0,i5720). +Le(cr28_0,i5940). +Le(cr28_0,i6160). +Le(cr28_0,i6380). +Le(cr28_0,i6600). +Le(cr28_0,i6820). +Le(cr28_0,i7040). +Le(cr28_0,i7260). +Le(cr28_0,i7480). +Le(cr28_0,i7700). +Le(cr28_0,i7920). +Le(cr28_0,i8140). +Le(cr28_0,i8360). +Le(cr28_0,i8580). +Eq(i660,i660). +Le(i660,cr29_0). +Le(cr29_0,i670). +Le(i-30,cr29_0). +Le(i0,cr29_0). +Le(i13,cr29_0). +Le(i26,cr29_0). +Le(i39,cr29_0). +Le(i52,cr29_0). +Le(i60,cr29_0). +Le(i65,cr29_0). +Le(i70,cr29_0). +Le(i78,cr29_0). +Le(i90,cr29_0). +Le(i91,cr29_0). +Le(i104,cr29_0). +Le(i117,cr29_0). +Le(i130,cr29_0). +Le(i143,cr29_0). +Le(i156,cr29_0). +Le(i169,cr29_0). +Le(i182,cr29_0). +Le(i195,cr29_0). +Le(i208,cr29_0). +Le(i221,cr29_0). +Le(i234,cr29_0). +Le(i247,cr29_0). +Le(i260,cr29_0). +Le(i460,cr29_0). +Le(i530,cr29_0). +Le(i600,cr29_0). +Le(cr29_0,i710). +Le(cr29_0,i740). +Le(cr29_0,i810). +Le(cr29_0,i850). +Le(cr29_0,i880). +Le(cr29_0,i890). +Le(cr29_0,i920). +Le(cr29_0,i960). +Le(cr29_0,i990). +Le(cr29_0,i1030). +Le(cr29_0,i1060). +Le(cr29_0,i1100). +Le(cr29_0,i1130). +Le(cr29_0,i1170). +Le(cr29_0,i1200). +Le(cr29_0,i1240). +Le(cr29_0,i1260). +Le(cr29_0,i1270). +Le(cr29_0,i1290). +Le(cr29_0,i1310). +Le(cr29_0,i1320). +Le(cr29_0,i1330). +Le(cr29_0,i1350). +Le(cr29_0,i1360). +Le(cr29_0,i1380). +Le(cr29_0,i1390). +Le(cr29_0,i1420). +Le(cr29_0,i1430). +Le(cr29_0,i1450). +Le(cr29_0,i1460). +Le(cr29_0,i1490). +Le(cr29_0,i1520). +Le(cr29_0,i1530). +Le(cr29_0,i1540). +Le(cr29_0,i1560). +Le(cr29_0,i1590). +Le(cr29_0,i1630). +Le(cr29_0,i1660). +Le(cr29_0,i1700). +Le(cr29_0,i1730). +Le(cr29_0,i1760). +Le(cr29_0,i1770). +Le(cr29_0,i1810). +Le(cr29_0,i1840). +Le(cr29_0,i1880). +Le(cr29_0,i1910). +Le(cr29_0,i1950). +Le(cr29_0,i1980). +Le(cr29_0,i2020). +Le(cr29_0,i2050). +Le(cr29_0,i2090). +Le(cr29_0,i2120). +Le(cr29_0,i2160). +Le(cr29_0,i2190). +Le(cr29_0,i2200). +Le(cr29_0,i2230). +Le(cr29_0,i2270). +Le(cr29_0,i2300). +Le(cr29_0,i2340). +Le(cr29_0,i2370). +Le(cr29_0,i2410). +Le(cr29_0,i2420). +Le(cr29_0,i2440). +Le(cr29_0,i2480). +Le(cr29_0,i2510). +Le(cr29_0,i2550). +Le(cr29_0,i2580). +Le(cr29_0,i2620). +Le(cr29_0,i2640). +Le(cr29_0,i2660). +Le(cr29_0,i2730). +Le(cr29_0,i2760). +Le(cr29_0,i2800). +Le(cr29_0,i2830). +Le(cr29_0,i2860). +Le(cr29_0,i2870). +Le(cr29_0,i2940). +Le(cr29_0,i2970). +Le(cr29_0,i3010). +Le(cr29_0,i3040). +Le(cr29_0,i3080). +Le(cr29_0,i3120). +Le(cr29_0,i3150). +Le(cr29_0,i3220). +Le(cr29_0,i3260). +Le(cr29_0,i3290). +Le(cr29_0,i3300). +Le(cr29_0,i3330). +Le(cr29_0,i3400). +Le(cr29_0,i3430). +Le(cr29_0,i3500). +Le(cr29_0,i3520). +Le(cr29_0,i3580). +Le(cr29_0,i3610). +Le(cr29_0,i3650). +Le(cr29_0,i3680). +Le(cr29_0,i3720). +Le(cr29_0,i3740). +Le(cr29_0,i3790). +Le(cr29_0,i3820). +Le(cr29_0,i3860). +Le(cr29_0,i3960). +Le(cr29_0,i4040). +Le(cr29_0,i4140). +Le(cr29_0,i4180). +Le(cr29_0,i4400). +Le(cr29_0,i4620). +Le(cr29_0,i4840). +Le(cr29_0,i5060). +Le(cr29_0,i5280). +Le(cr29_0,i5500). +Le(cr29_0,i5720). +Le(cr29_0,i5940). +Le(cr29_0,i6160). +Le(cr29_0,i6380). +Le(cr29_0,i6600). +Le(cr29_0,i6820). +Le(cr29_0,i7040). +Le(cr29_0,i7260). +Le(cr29_0,i7480). +Le(cr29_0,i7700). +Le(cr29_0,i7920). +Le(cr29_0,i8140). +Le(cr29_0,i8360). +Le(cr29_0,i8580). +Eq(i670,i670). +Le(i670,cr30_0). +Le(cr30_0,i710). +Le(i-30,cr30_0). +Le(i0,cr30_0). +Le(i13,cr30_0). +Le(i26,cr30_0). +Le(i39,cr30_0). +Le(i52,cr30_0). +Le(i60,cr30_0). +Le(i65,cr30_0). +Le(i70,cr30_0). +Le(i78,cr30_0). +Le(i90,cr30_0). +Le(i91,cr30_0). +Le(i104,cr30_0). +Le(i117,cr30_0). +Le(i130,cr30_0). +Le(i143,cr30_0). +Le(i156,cr30_0). +Le(i169,cr30_0). +Le(i182,cr30_0). +Le(i195,cr30_0). +Le(i208,cr30_0). +Le(i221,cr30_0). +Le(i234,cr30_0). +Le(i247,cr30_0). +Le(i260,cr30_0). +Le(i460,cr30_0). +Le(i530,cr30_0). +Le(i600,cr30_0). +Le(i660,cr30_0). +Le(cr30_0,i740). +Le(cr30_0,i810). +Le(cr30_0,i850). +Le(cr30_0,i880). +Le(cr30_0,i890). +Le(cr30_0,i920). +Le(cr30_0,i960). +Le(cr30_0,i990). +Le(cr30_0,i1030). +Le(cr30_0,i1060). +Le(cr30_0,i1100). +Le(cr30_0,i1130). +Le(cr30_0,i1170). +Le(cr30_0,i1200). +Le(cr30_0,i1240). +Le(cr30_0,i1260). +Le(cr30_0,i1270). +Le(cr30_0,i1290). +Le(cr30_0,i1310). +Le(cr30_0,i1320). +Le(cr30_0,i1330). +Le(cr30_0,i1350). +Le(cr30_0,i1360). +Le(cr30_0,i1380). +Le(cr30_0,i1390). +Le(cr30_0,i1420). +Le(cr30_0,i1430). +Le(cr30_0,i1450). +Le(cr30_0,i1460). +Le(cr30_0,i1490). +Le(cr30_0,i1520). +Le(cr30_0,i1530). +Le(cr30_0,i1540). +Le(cr30_0,i1560). +Le(cr30_0,i1590). +Le(cr30_0,i1630). +Le(cr30_0,i1660). +Le(cr30_0,i1700). +Le(cr30_0,i1730). +Le(cr30_0,i1760). +Le(cr30_0,i1770). +Le(cr30_0,i1810). +Le(cr30_0,i1840). +Le(cr30_0,i1880). +Le(cr30_0,i1910). +Le(cr30_0,i1950). +Le(cr30_0,i1980). +Le(cr30_0,i2020). +Le(cr30_0,i2050). +Le(cr30_0,i2090). +Le(cr30_0,i2120). +Le(cr30_0,i2160). +Le(cr30_0,i2190). +Le(cr30_0,i2200). +Le(cr30_0,i2230). +Le(cr30_0,i2270). +Le(cr30_0,i2300). +Le(cr30_0,i2340). +Le(cr30_0,i2370). +Le(cr30_0,i2410). +Le(cr30_0,i2420). +Le(cr30_0,i2440). +Le(cr30_0,i2480). +Le(cr30_0,i2510). +Le(cr30_0,i2550). +Le(cr30_0,i2580). +Le(cr30_0,i2620). +Le(cr30_0,i2640). +Le(cr30_0,i2660). +Le(cr30_0,i2730). +Le(cr30_0,i2760). +Le(cr30_0,i2800). +Le(cr30_0,i2830). +Le(cr30_0,i2860). +Le(cr30_0,i2870). +Le(cr30_0,i2940). +Le(cr30_0,i2970). +Le(cr30_0,i3010). +Le(cr30_0,i3040). +Le(cr30_0,i3080). +Le(cr30_0,i3120). +Le(cr30_0,i3150). +Le(cr30_0,i3220). +Le(cr30_0,i3260). +Le(cr30_0,i3290). +Le(cr30_0,i3300). +Le(cr30_0,i3330). +Le(cr30_0,i3400). +Le(cr30_0,i3430). +Le(cr30_0,i3500). +Le(cr30_0,i3520). +Le(cr30_0,i3580). +Le(cr30_0,i3610). +Le(cr30_0,i3650). +Le(cr30_0,i3680). +Le(cr30_0,i3720). +Le(cr30_0,i3740). +Le(cr30_0,i3790). +Le(cr30_0,i3820). +Le(cr30_0,i3860). +Le(cr30_0,i3960). +Le(cr30_0,i4040). +Le(cr30_0,i4140). +Le(cr30_0,i4180). +Le(cr30_0,i4400). +Le(cr30_0,i4620). +Le(cr30_0,i4840). +Le(cr30_0,i5060). +Le(cr30_0,i5280). +Le(cr30_0,i5500). +Le(cr30_0,i5720). +Le(cr30_0,i5940). +Le(cr30_0,i6160). +Le(cr30_0,i6380). +Le(cr30_0,i6600). +Le(cr30_0,i6820). +Le(cr30_0,i7040). +Le(cr30_0,i7260). +Le(cr30_0,i7480). +Le(cr30_0,i7700). +Le(cr30_0,i7920). +Le(cr30_0,i8140). +Le(cr30_0,i8360). +Le(cr30_0,i8580). +Eq(i710,i710). +Le(i710,cr31_0). +Le(cr31_0,i740). +Le(i-30,cr31_0). +Le(i0,cr31_0). +Le(i13,cr31_0). +Le(i26,cr31_0). +Le(i39,cr31_0). +Le(i52,cr31_0). +Le(i60,cr31_0). +Le(i65,cr31_0). +Le(i70,cr31_0). +Le(i78,cr31_0). +Le(i90,cr31_0). +Le(i91,cr31_0). +Le(i104,cr31_0). +Le(i117,cr31_0). +Le(i130,cr31_0). +Le(i143,cr31_0). +Le(i156,cr31_0). +Le(i169,cr31_0). +Le(i182,cr31_0). +Le(i195,cr31_0). +Le(i208,cr31_0). +Le(i221,cr31_0). +Le(i234,cr31_0). +Le(i247,cr31_0). +Le(i260,cr31_0). +Le(i460,cr31_0). +Le(i530,cr31_0). +Le(i600,cr31_0). +Le(i660,cr31_0). +Le(i670,cr31_0). +Le(cr31_0,i810). +Le(cr31_0,i850). +Le(cr31_0,i880). +Le(cr31_0,i890). +Le(cr31_0,i920). +Le(cr31_0,i960). +Le(cr31_0,i990). +Le(cr31_0,i1030). +Le(cr31_0,i1060). +Le(cr31_0,i1100). +Le(cr31_0,i1130). +Le(cr31_0,i1170). +Le(cr31_0,i1200). +Le(cr31_0,i1240). +Le(cr31_0,i1260). +Le(cr31_0,i1270). +Le(cr31_0,i1290). +Le(cr31_0,i1310). +Le(cr31_0,i1320). +Le(cr31_0,i1330). +Le(cr31_0,i1350). +Le(cr31_0,i1360). +Le(cr31_0,i1380). +Le(cr31_0,i1390). +Le(cr31_0,i1420). +Le(cr31_0,i1430). +Le(cr31_0,i1450). +Le(cr31_0,i1460). +Le(cr31_0,i1490). +Le(cr31_0,i1520). +Le(cr31_0,i1530). +Le(cr31_0,i1540). +Le(cr31_0,i1560). +Le(cr31_0,i1590). +Le(cr31_0,i1630). +Le(cr31_0,i1660). +Le(cr31_0,i1700). +Le(cr31_0,i1730). +Le(cr31_0,i1760). +Le(cr31_0,i1770). +Le(cr31_0,i1810). +Le(cr31_0,i1840). +Le(cr31_0,i1880). +Le(cr31_0,i1910). +Le(cr31_0,i1950). +Le(cr31_0,i1980). +Le(cr31_0,i2020). +Le(cr31_0,i2050). +Le(cr31_0,i2090). +Le(cr31_0,i2120). +Le(cr31_0,i2160). +Le(cr31_0,i2190). +Le(cr31_0,i2200). +Le(cr31_0,i2230). +Le(cr31_0,i2270). +Le(cr31_0,i2300). +Le(cr31_0,i2340). +Le(cr31_0,i2370). +Le(cr31_0,i2410). +Le(cr31_0,i2420). +Le(cr31_0,i2440). +Le(cr31_0,i2480). +Le(cr31_0,i2510). +Le(cr31_0,i2550). +Le(cr31_0,i2580). +Le(cr31_0,i2620). +Le(cr31_0,i2640). +Le(cr31_0,i2660). +Le(cr31_0,i2730). +Le(cr31_0,i2760). +Le(cr31_0,i2800). +Le(cr31_0,i2830). +Le(cr31_0,i2860). +Le(cr31_0,i2870). +Le(cr31_0,i2940). +Le(cr31_0,i2970). +Le(cr31_0,i3010). +Le(cr31_0,i3040). +Le(cr31_0,i3080). +Le(cr31_0,i3120). +Le(cr31_0,i3150). +Le(cr31_0,i3220). +Le(cr31_0,i3260). +Le(cr31_0,i3290). +Le(cr31_0,i3300). +Le(cr31_0,i3330). +Le(cr31_0,i3400). +Le(cr31_0,i3430). +Le(cr31_0,i3500). +Le(cr31_0,i3520). +Le(cr31_0,i3580). +Le(cr31_0,i3610). +Le(cr31_0,i3650). +Le(cr31_0,i3680). +Le(cr31_0,i3720). +Le(cr31_0,i3740). +Le(cr31_0,i3790). +Le(cr31_0,i3820). +Le(cr31_0,i3860). +Le(cr31_0,i3960). +Le(cr31_0,i4040). +Le(cr31_0,i4140). +Le(cr31_0,i4180). +Le(cr31_0,i4400). +Le(cr31_0,i4620). +Le(cr31_0,i4840). +Le(cr31_0,i5060). +Le(cr31_0,i5280). +Le(cr31_0,i5500). +Le(cr31_0,i5720). +Le(cr31_0,i5940). +Le(cr31_0,i6160). +Le(cr31_0,i6380). +Le(cr31_0,i6600). +Le(cr31_0,i6820). +Le(cr31_0,i7040). +Le(cr31_0,i7260). +Le(cr31_0,i7480). +Le(cr31_0,i7700). +Le(cr31_0,i7920). +Le(cr31_0,i8140). +Le(cr31_0,i8360). +Le(cr31_0,i8580). +Eq(i740,i740). +Le(i740,cr32_0). +Le(cr32_0,i810). +Le(i-30,cr32_0). +Le(i0,cr32_0). +Le(i13,cr32_0). +Le(i26,cr32_0). +Le(i39,cr32_0). +Le(i52,cr32_0). +Le(i60,cr32_0). +Le(i65,cr32_0). +Le(i70,cr32_0). +Le(i78,cr32_0). +Le(i90,cr32_0). +Le(i91,cr32_0). +Le(i104,cr32_0). +Le(i117,cr32_0). +Le(i130,cr32_0). +Le(i143,cr32_0). +Le(i156,cr32_0). +Le(i169,cr32_0). +Le(i182,cr32_0). +Le(i195,cr32_0). +Le(i208,cr32_0). +Le(i221,cr32_0). +Le(i234,cr32_0). +Le(i247,cr32_0). +Le(i260,cr32_0). +Le(i460,cr32_0). +Le(i530,cr32_0). +Le(i600,cr32_0). +Le(i660,cr32_0). +Le(i670,cr32_0). +Le(i710,cr32_0). +Le(cr32_0,i850). +Le(cr32_0,i880). +Le(cr32_0,i890). +Le(cr32_0,i920). +Le(cr32_0,i960). +Le(cr32_0,i990). +Le(cr32_0,i1030). +Le(cr32_0,i1060). +Le(cr32_0,i1100). +Le(cr32_0,i1130). +Le(cr32_0,i1170). +Le(cr32_0,i1200). +Le(cr32_0,i1240). +Le(cr32_0,i1260). +Le(cr32_0,i1270). +Le(cr32_0,i1290). +Le(cr32_0,i1310). +Le(cr32_0,i1320). +Le(cr32_0,i1330). +Le(cr32_0,i1350). +Le(cr32_0,i1360). +Le(cr32_0,i1380). +Le(cr32_0,i1390). +Le(cr32_0,i1420). +Le(cr32_0,i1430). +Le(cr32_0,i1450). +Le(cr32_0,i1460). +Le(cr32_0,i1490). +Le(cr32_0,i1520). +Le(cr32_0,i1530). +Le(cr32_0,i1540). +Le(cr32_0,i1560). +Le(cr32_0,i1590). +Le(cr32_0,i1630). +Le(cr32_0,i1660). +Le(cr32_0,i1700). +Le(cr32_0,i1730). +Le(cr32_0,i1760). +Le(cr32_0,i1770). +Le(cr32_0,i1810). +Le(cr32_0,i1840). +Le(cr32_0,i1880). +Le(cr32_0,i1910). +Le(cr32_0,i1950). +Le(cr32_0,i1980). +Le(cr32_0,i2020). +Le(cr32_0,i2050). +Le(cr32_0,i2090). +Le(cr32_0,i2120). +Le(cr32_0,i2160). +Le(cr32_0,i2190). +Le(cr32_0,i2200). +Le(cr32_0,i2230). +Le(cr32_0,i2270). +Le(cr32_0,i2300). +Le(cr32_0,i2340). +Le(cr32_0,i2370). +Le(cr32_0,i2410). +Le(cr32_0,i2420). +Le(cr32_0,i2440). +Le(cr32_0,i2480). +Le(cr32_0,i2510). +Le(cr32_0,i2550). +Le(cr32_0,i2580). +Le(cr32_0,i2620). +Le(cr32_0,i2640). +Le(cr32_0,i2660). +Le(cr32_0,i2730). +Le(cr32_0,i2760). +Le(cr32_0,i2800). +Le(cr32_0,i2830). +Le(cr32_0,i2860). +Le(cr32_0,i2870). +Le(cr32_0,i2940). +Le(cr32_0,i2970). +Le(cr32_0,i3010). +Le(cr32_0,i3040). +Le(cr32_0,i3080). +Le(cr32_0,i3120). +Le(cr32_0,i3150). +Le(cr32_0,i3220). +Le(cr32_0,i3260). +Le(cr32_0,i3290). +Le(cr32_0,i3300). +Le(cr32_0,i3330). +Le(cr32_0,i3400). +Le(cr32_0,i3430). +Le(cr32_0,i3500). +Le(cr32_0,i3520). +Le(cr32_0,i3580). +Le(cr32_0,i3610). +Le(cr32_0,i3650). +Le(cr32_0,i3680). +Le(cr32_0,i3720). +Le(cr32_0,i3740). +Le(cr32_0,i3790). +Le(cr32_0,i3820). +Le(cr32_0,i3860). +Le(cr32_0,i3960). +Le(cr32_0,i4040). +Le(cr32_0,i4140). +Le(cr32_0,i4180). +Le(cr32_0,i4400). +Le(cr32_0,i4620). +Le(cr32_0,i4840). +Le(cr32_0,i5060). +Le(cr32_0,i5280). +Le(cr32_0,i5500). +Le(cr32_0,i5720). +Le(cr32_0,i5940). +Le(cr32_0,i6160). +Le(cr32_0,i6380). +Le(cr32_0,i6600). +Le(cr32_0,i6820). +Le(cr32_0,i7040). +Le(cr32_0,i7260). +Le(cr32_0,i7480). +Le(cr32_0,i7700). +Le(cr32_0,i7920). +Le(cr32_0,i8140). +Le(cr32_0,i8360). +Le(cr32_0,i8580). +Eq(i810,i810). +Le(i810,cr33_0). +Le(cr33_0,i850). +Le(i-30,cr33_0). +Le(i0,cr33_0). +Le(i13,cr33_0). +Le(i26,cr33_0). +Le(i39,cr33_0). +Le(i52,cr33_0). +Le(i60,cr33_0). +Le(i65,cr33_0). +Le(i70,cr33_0). +Le(i78,cr33_0). +Le(i90,cr33_0). +Le(i91,cr33_0). +Le(i104,cr33_0). +Le(i117,cr33_0). +Le(i130,cr33_0). +Le(i143,cr33_0). +Le(i156,cr33_0). +Le(i169,cr33_0). +Le(i182,cr33_0). +Le(i195,cr33_0). +Le(i208,cr33_0). +Le(i221,cr33_0). +Le(i234,cr33_0). +Le(i247,cr33_0). +Le(i260,cr33_0). +Le(i460,cr33_0). +Le(i530,cr33_0). +Le(i600,cr33_0). +Le(i660,cr33_0). +Le(i670,cr33_0). +Le(i710,cr33_0). +Le(i740,cr33_0). +Le(cr33_0,i880). +Le(cr33_0,i890). +Le(cr33_0,i920). +Le(cr33_0,i960). +Le(cr33_0,i990). +Le(cr33_0,i1030). +Le(cr33_0,i1060). +Le(cr33_0,i1100). +Le(cr33_0,i1130). +Le(cr33_0,i1170). +Le(cr33_0,i1200). +Le(cr33_0,i1240). +Le(cr33_0,i1260). +Le(cr33_0,i1270). +Le(cr33_0,i1290). +Le(cr33_0,i1310). +Le(cr33_0,i1320). +Le(cr33_0,i1330). +Le(cr33_0,i1350). +Le(cr33_0,i1360). +Le(cr33_0,i1380). +Le(cr33_0,i1390). +Le(cr33_0,i1420). +Le(cr33_0,i1430). +Le(cr33_0,i1450). +Le(cr33_0,i1460). +Le(cr33_0,i1490). +Le(cr33_0,i1520). +Le(cr33_0,i1530). +Le(cr33_0,i1540). +Le(cr33_0,i1560). +Le(cr33_0,i1590). +Le(cr33_0,i1630). +Le(cr33_0,i1660). +Le(cr33_0,i1700). +Le(cr33_0,i1730). +Le(cr33_0,i1760). +Le(cr33_0,i1770). +Le(cr33_0,i1810). +Le(cr33_0,i1840). +Le(cr33_0,i1880). +Le(cr33_0,i1910). +Le(cr33_0,i1950). +Le(cr33_0,i1980). +Le(cr33_0,i2020). +Le(cr33_0,i2050). +Le(cr33_0,i2090). +Le(cr33_0,i2120). +Le(cr33_0,i2160). +Le(cr33_0,i2190). +Le(cr33_0,i2200). +Le(cr33_0,i2230). +Le(cr33_0,i2270). +Le(cr33_0,i2300). +Le(cr33_0,i2340). +Le(cr33_0,i2370). +Le(cr33_0,i2410). +Le(cr33_0,i2420). +Le(cr33_0,i2440). +Le(cr33_0,i2480). +Le(cr33_0,i2510). +Le(cr33_0,i2550). +Le(cr33_0,i2580). +Le(cr33_0,i2620). +Le(cr33_0,i2640). +Le(cr33_0,i2660). +Le(cr33_0,i2730). +Le(cr33_0,i2760). +Le(cr33_0,i2800). +Le(cr33_0,i2830). +Le(cr33_0,i2860). +Le(cr33_0,i2870). +Le(cr33_0,i2940). +Le(cr33_0,i2970). +Le(cr33_0,i3010). +Le(cr33_0,i3040). +Le(cr33_0,i3080). +Le(cr33_0,i3120). +Le(cr33_0,i3150). +Le(cr33_0,i3220). +Le(cr33_0,i3260). +Le(cr33_0,i3290). +Le(cr33_0,i3300). +Le(cr33_0,i3330). +Le(cr33_0,i3400). +Le(cr33_0,i3430). +Le(cr33_0,i3500). +Le(cr33_0,i3520). +Le(cr33_0,i3580). +Le(cr33_0,i3610). +Le(cr33_0,i3650). +Le(cr33_0,i3680). +Le(cr33_0,i3720). +Le(cr33_0,i3740). +Le(cr33_0,i3790). +Le(cr33_0,i3820). +Le(cr33_0,i3860). +Le(cr33_0,i3960). +Le(cr33_0,i4040). +Le(cr33_0,i4140). +Le(cr33_0,i4180). +Le(cr33_0,i4400). +Le(cr33_0,i4620). +Le(cr33_0,i4840). +Le(cr33_0,i5060). +Le(cr33_0,i5280). +Le(cr33_0,i5500). +Le(cr33_0,i5720). +Le(cr33_0,i5940). +Le(cr33_0,i6160). +Le(cr33_0,i6380). +Le(cr33_0,i6600). +Le(cr33_0,i6820). +Le(cr33_0,i7040). +Le(cr33_0,i7260). +Le(cr33_0,i7480). +Le(cr33_0,i7700). +Le(cr33_0,i7920). +Le(cr33_0,i8140). +Le(cr33_0,i8360). +Le(cr33_0,i8580). +Eq(i850,i850). +Le(i850,cr34_0). +Le(cr34_0,i880). +Le(i-30,cr34_0). +Le(i0,cr34_0). +Le(i13,cr34_0). +Le(i26,cr34_0). +Le(i39,cr34_0). +Le(i52,cr34_0). +Le(i60,cr34_0). +Le(i65,cr34_0). +Le(i70,cr34_0). +Le(i78,cr34_0). +Le(i90,cr34_0). +Le(i91,cr34_0). +Le(i104,cr34_0). +Le(i117,cr34_0). +Le(i130,cr34_0). +Le(i143,cr34_0). +Le(i156,cr34_0). +Le(i169,cr34_0). +Le(i182,cr34_0). +Le(i195,cr34_0). +Le(i208,cr34_0). +Le(i221,cr34_0). +Le(i234,cr34_0). +Le(i247,cr34_0). +Le(i260,cr34_0). +Le(i460,cr34_0). +Le(i530,cr34_0). +Le(i600,cr34_0). +Le(i660,cr34_0). +Le(i670,cr34_0). +Le(i710,cr34_0). +Le(i740,cr34_0). +Le(i810,cr34_0). +Le(cr34_0,i890). +Le(cr34_0,i920). +Le(cr34_0,i960). +Le(cr34_0,i990). +Le(cr34_0,i1030). +Le(cr34_0,i1060). +Le(cr34_0,i1100). +Le(cr34_0,i1130). +Le(cr34_0,i1170). +Le(cr34_0,i1200). +Le(cr34_0,i1240). +Le(cr34_0,i1260). +Le(cr34_0,i1270). +Le(cr34_0,i1290). +Le(cr34_0,i1310). +Le(cr34_0,i1320). +Le(cr34_0,i1330). +Le(cr34_0,i1350). +Le(cr34_0,i1360). +Le(cr34_0,i1380). +Le(cr34_0,i1390). +Le(cr34_0,i1420). +Le(cr34_0,i1430). +Le(cr34_0,i1450). +Le(cr34_0,i1460). +Le(cr34_0,i1490). +Le(cr34_0,i1520). +Le(cr34_0,i1530). +Le(cr34_0,i1540). +Le(cr34_0,i1560). +Le(cr34_0,i1590). +Le(cr34_0,i1630). +Le(cr34_0,i1660). +Le(cr34_0,i1700). +Le(cr34_0,i1730). +Le(cr34_0,i1760). +Le(cr34_0,i1770). +Le(cr34_0,i1810). +Le(cr34_0,i1840). +Le(cr34_0,i1880). +Le(cr34_0,i1910). +Le(cr34_0,i1950). +Le(cr34_0,i1980). +Le(cr34_0,i2020). +Le(cr34_0,i2050). +Le(cr34_0,i2090). +Le(cr34_0,i2120). +Le(cr34_0,i2160). +Le(cr34_0,i2190). +Le(cr34_0,i2200). +Le(cr34_0,i2230). +Le(cr34_0,i2270). +Le(cr34_0,i2300). +Le(cr34_0,i2340). +Le(cr34_0,i2370). +Le(cr34_0,i2410). +Le(cr34_0,i2420). +Le(cr34_0,i2440). +Le(cr34_0,i2480). +Le(cr34_0,i2510). +Le(cr34_0,i2550). +Le(cr34_0,i2580). +Le(cr34_0,i2620). +Le(cr34_0,i2640). +Le(cr34_0,i2660). +Le(cr34_0,i2730). +Le(cr34_0,i2760). +Le(cr34_0,i2800). +Le(cr34_0,i2830). +Le(cr34_0,i2860). +Le(cr34_0,i2870). +Le(cr34_0,i2940). +Le(cr34_0,i2970). +Le(cr34_0,i3010). +Le(cr34_0,i3040). +Le(cr34_0,i3080). +Le(cr34_0,i3120). +Le(cr34_0,i3150). +Le(cr34_0,i3220). +Le(cr34_0,i3260). +Le(cr34_0,i3290). +Le(cr34_0,i3300). +Le(cr34_0,i3330). +Le(cr34_0,i3400). +Le(cr34_0,i3430). +Le(cr34_0,i3500). +Le(cr34_0,i3520). +Le(cr34_0,i3580). +Le(cr34_0,i3610). +Le(cr34_0,i3650). +Le(cr34_0,i3680). +Le(cr34_0,i3720). +Le(cr34_0,i3740). +Le(cr34_0,i3790). +Le(cr34_0,i3820). +Le(cr34_0,i3860). +Le(cr34_0,i3960). +Le(cr34_0,i4040). +Le(cr34_0,i4140). +Le(cr34_0,i4180). +Le(cr34_0,i4400). +Le(cr34_0,i4620). +Le(cr34_0,i4840). +Le(cr34_0,i5060). +Le(cr34_0,i5280). +Le(cr34_0,i5500). +Le(cr34_0,i5720). +Le(cr34_0,i5940). +Le(cr34_0,i6160). +Le(cr34_0,i6380). +Le(cr34_0,i6600). +Le(cr34_0,i6820). +Le(cr34_0,i7040). +Le(cr34_0,i7260). +Le(cr34_0,i7480). +Le(cr34_0,i7700). +Le(cr34_0,i7920). +Le(cr34_0,i8140). +Le(cr34_0,i8360). +Le(cr34_0,i8580). +Eq(i880,i880). +Le(i880,cr35_0). +Le(cr35_0,i890). +Le(i-30,cr35_0). +Le(i0,cr35_0). +Le(i13,cr35_0). +Le(i26,cr35_0). +Le(i39,cr35_0). +Le(i52,cr35_0). +Le(i60,cr35_0). +Le(i65,cr35_0). +Le(i70,cr35_0). +Le(i78,cr35_0). +Le(i90,cr35_0). +Le(i91,cr35_0). +Le(i104,cr35_0). +Le(i117,cr35_0). +Le(i130,cr35_0). +Le(i143,cr35_0). +Le(i156,cr35_0). +Le(i169,cr35_0). +Le(i182,cr35_0). +Le(i195,cr35_0). +Le(i208,cr35_0). +Le(i221,cr35_0). +Le(i234,cr35_0). +Le(i247,cr35_0). +Le(i260,cr35_0). +Le(i460,cr35_0). +Le(i530,cr35_0). +Le(i600,cr35_0). +Le(i660,cr35_0). +Le(i670,cr35_0). +Le(i710,cr35_0). +Le(i740,cr35_0). +Le(i810,cr35_0). +Le(i850,cr35_0). +Le(cr35_0,i920). +Le(cr35_0,i960). +Le(cr35_0,i990). +Le(cr35_0,i1030). +Le(cr35_0,i1060). +Le(cr35_0,i1100). +Le(cr35_0,i1130). +Le(cr35_0,i1170). +Le(cr35_0,i1200). +Le(cr35_0,i1240). +Le(cr35_0,i1260). +Le(cr35_0,i1270). +Le(cr35_0,i1290). +Le(cr35_0,i1310). +Le(cr35_0,i1320). +Le(cr35_0,i1330). +Le(cr35_0,i1350). +Le(cr35_0,i1360). +Le(cr35_0,i1380). +Le(cr35_0,i1390). +Le(cr35_0,i1420). +Le(cr35_0,i1430). +Le(cr35_0,i1450). +Le(cr35_0,i1460). +Le(cr35_0,i1490). +Le(cr35_0,i1520). +Le(cr35_0,i1530). +Le(cr35_0,i1540). +Le(cr35_0,i1560). +Le(cr35_0,i1590). +Le(cr35_0,i1630). +Le(cr35_0,i1660). +Le(cr35_0,i1700). +Le(cr35_0,i1730). +Le(cr35_0,i1760). +Le(cr35_0,i1770). +Le(cr35_0,i1810). +Le(cr35_0,i1840). +Le(cr35_0,i1880). +Le(cr35_0,i1910). +Le(cr35_0,i1950). +Le(cr35_0,i1980). +Le(cr35_0,i2020). +Le(cr35_0,i2050). +Le(cr35_0,i2090). +Le(cr35_0,i2120). +Le(cr35_0,i2160). +Le(cr35_0,i2190). +Le(cr35_0,i2200). +Le(cr35_0,i2230). +Le(cr35_0,i2270). +Le(cr35_0,i2300). +Le(cr35_0,i2340). +Le(cr35_0,i2370). +Le(cr35_0,i2410). +Le(cr35_0,i2420). +Le(cr35_0,i2440). +Le(cr35_0,i2480). +Le(cr35_0,i2510). +Le(cr35_0,i2550). +Le(cr35_0,i2580). +Le(cr35_0,i2620). +Le(cr35_0,i2640). +Le(cr35_0,i2660). +Le(cr35_0,i2730). +Le(cr35_0,i2760). +Le(cr35_0,i2800). +Le(cr35_0,i2830). +Le(cr35_0,i2860). +Le(cr35_0,i2870). +Le(cr35_0,i2940). +Le(cr35_0,i2970). +Le(cr35_0,i3010). +Le(cr35_0,i3040). +Le(cr35_0,i3080). +Le(cr35_0,i3120). +Le(cr35_0,i3150). +Le(cr35_0,i3220). +Le(cr35_0,i3260). +Le(cr35_0,i3290). +Le(cr35_0,i3300). +Le(cr35_0,i3330). +Le(cr35_0,i3400). +Le(cr35_0,i3430). +Le(cr35_0,i3500). +Le(cr35_0,i3520). +Le(cr35_0,i3580). +Le(cr35_0,i3610). +Le(cr35_0,i3650). +Le(cr35_0,i3680). +Le(cr35_0,i3720). +Le(cr35_0,i3740). +Le(cr35_0,i3790). +Le(cr35_0,i3820). +Le(cr35_0,i3860). +Le(cr35_0,i3960). +Le(cr35_0,i4040). +Le(cr35_0,i4140). +Le(cr35_0,i4180). +Le(cr35_0,i4400). +Le(cr35_0,i4620). +Le(cr35_0,i4840). +Le(cr35_0,i5060). +Le(cr35_0,i5280). +Le(cr35_0,i5500). +Le(cr35_0,i5720). +Le(cr35_0,i5940). +Le(cr35_0,i6160). +Le(cr35_0,i6380). +Le(cr35_0,i6600). +Le(cr35_0,i6820). +Le(cr35_0,i7040). +Le(cr35_0,i7260). +Le(cr35_0,i7480). +Le(cr35_0,i7700). +Le(cr35_0,i7920). +Le(cr35_0,i8140). +Le(cr35_0,i8360). +Le(cr35_0,i8580). +Eq(i890,i890). +Le(i890,cr36_0). +Le(cr36_0,i920). +Le(i-30,cr36_0). +Le(i0,cr36_0). +Le(i13,cr36_0). +Le(i26,cr36_0). +Le(i39,cr36_0). +Le(i52,cr36_0). +Le(i60,cr36_0). +Le(i65,cr36_0). +Le(i70,cr36_0). +Le(i78,cr36_0). +Le(i90,cr36_0). +Le(i91,cr36_0). +Le(i104,cr36_0). +Le(i117,cr36_0). +Le(i130,cr36_0). +Le(i143,cr36_0). +Le(i156,cr36_0). +Le(i169,cr36_0). +Le(i182,cr36_0). +Le(i195,cr36_0). +Le(i208,cr36_0). +Le(i221,cr36_0). +Le(i234,cr36_0). +Le(i247,cr36_0). +Le(i260,cr36_0). +Le(i460,cr36_0). +Le(i530,cr36_0). +Le(i600,cr36_0). +Le(i660,cr36_0). +Le(i670,cr36_0). +Le(i710,cr36_0). +Le(i740,cr36_0). +Le(i810,cr36_0). +Le(i850,cr36_0). +Le(i880,cr36_0). +Le(cr36_0,i960). +Le(cr36_0,i990). +Le(cr36_0,i1030). +Le(cr36_0,i1060). +Le(cr36_0,i1100). +Le(cr36_0,i1130). +Le(cr36_0,i1170). +Le(cr36_0,i1200). +Le(cr36_0,i1240). +Le(cr36_0,i1260). +Le(cr36_0,i1270). +Le(cr36_0,i1290). +Le(cr36_0,i1310). +Le(cr36_0,i1320). +Le(cr36_0,i1330). +Le(cr36_0,i1350). +Le(cr36_0,i1360). +Le(cr36_0,i1380). +Le(cr36_0,i1390). +Le(cr36_0,i1420). +Le(cr36_0,i1430). +Le(cr36_0,i1450). +Le(cr36_0,i1460). +Le(cr36_0,i1490). +Le(cr36_0,i1520). +Le(cr36_0,i1530). +Le(cr36_0,i1540). +Le(cr36_0,i1560). +Le(cr36_0,i1590). +Le(cr36_0,i1630). +Le(cr36_0,i1660). +Le(cr36_0,i1700). +Le(cr36_0,i1730). +Le(cr36_0,i1760). +Le(cr36_0,i1770). +Le(cr36_0,i1810). +Le(cr36_0,i1840). +Le(cr36_0,i1880). +Le(cr36_0,i1910). +Le(cr36_0,i1950). +Le(cr36_0,i1980). +Le(cr36_0,i2020). +Le(cr36_0,i2050). +Le(cr36_0,i2090). +Le(cr36_0,i2120). +Le(cr36_0,i2160). +Le(cr36_0,i2190). +Le(cr36_0,i2200). +Le(cr36_0,i2230). +Le(cr36_0,i2270). +Le(cr36_0,i2300). +Le(cr36_0,i2340). +Le(cr36_0,i2370). +Le(cr36_0,i2410). +Le(cr36_0,i2420). +Le(cr36_0,i2440). +Le(cr36_0,i2480). +Le(cr36_0,i2510). +Le(cr36_0,i2550). +Le(cr36_0,i2580). +Le(cr36_0,i2620). +Le(cr36_0,i2640). +Le(cr36_0,i2660). +Le(cr36_0,i2730). +Le(cr36_0,i2760). +Le(cr36_0,i2800). +Le(cr36_0,i2830). +Le(cr36_0,i2860). +Le(cr36_0,i2870). +Le(cr36_0,i2940). +Le(cr36_0,i2970). +Le(cr36_0,i3010). +Le(cr36_0,i3040). +Le(cr36_0,i3080). +Le(cr36_0,i3120). +Le(cr36_0,i3150). +Le(cr36_0,i3220). +Le(cr36_0,i3260). +Le(cr36_0,i3290). +Le(cr36_0,i3300). +Le(cr36_0,i3330). +Le(cr36_0,i3400). +Le(cr36_0,i3430). +Le(cr36_0,i3500). +Le(cr36_0,i3520). +Le(cr36_0,i3580). +Le(cr36_0,i3610). +Le(cr36_0,i3650). +Le(cr36_0,i3680). +Le(cr36_0,i3720). +Le(cr36_0,i3740). +Le(cr36_0,i3790). +Le(cr36_0,i3820). +Le(cr36_0,i3860). +Le(cr36_0,i3960). +Le(cr36_0,i4040). +Le(cr36_0,i4140). +Le(cr36_0,i4180). +Le(cr36_0,i4400). +Le(cr36_0,i4620). +Le(cr36_0,i4840). +Le(cr36_0,i5060). +Le(cr36_0,i5280). +Le(cr36_0,i5500). +Le(cr36_0,i5720). +Le(cr36_0,i5940). +Le(cr36_0,i6160). +Le(cr36_0,i6380). +Le(cr36_0,i6600). +Le(cr36_0,i6820). +Le(cr36_0,i7040). +Le(cr36_0,i7260). +Le(cr36_0,i7480). +Le(cr36_0,i7700). +Le(cr36_0,i7920). +Le(cr36_0,i8140). +Le(cr36_0,i8360). +Le(cr36_0,i8580). +Eq(i920,i920). +Le(i920,cr37_0). +Le(cr37_0,i960). +Le(i-30,cr37_0). +Le(i0,cr37_0). +Le(i13,cr37_0). +Le(i26,cr37_0). +Le(i39,cr37_0). +Le(i52,cr37_0). +Le(i60,cr37_0). +Le(i65,cr37_0). +Le(i70,cr37_0). +Le(i78,cr37_0). +Le(i90,cr37_0). +Le(i91,cr37_0). +Le(i104,cr37_0). +Le(i117,cr37_0). +Le(i130,cr37_0). +Le(i143,cr37_0). +Le(i156,cr37_0). +Le(i169,cr37_0). +Le(i182,cr37_0). +Le(i195,cr37_0). +Le(i208,cr37_0). +Le(i221,cr37_0). +Le(i234,cr37_0). +Le(i247,cr37_0). +Le(i260,cr37_0). +Le(i460,cr37_0). +Le(i530,cr37_0). +Le(i600,cr37_0). +Le(i660,cr37_0). +Le(i670,cr37_0). +Le(i710,cr37_0). +Le(i740,cr37_0). +Le(i810,cr37_0). +Le(i850,cr37_0). +Le(i880,cr37_0). +Le(i890,cr37_0). +Le(cr37_0,i990). +Le(cr37_0,i1030). +Le(cr37_0,i1060). +Le(cr37_0,i1100). +Le(cr37_0,i1130). +Le(cr37_0,i1170). +Le(cr37_0,i1200). +Le(cr37_0,i1240). +Le(cr37_0,i1260). +Le(cr37_0,i1270). +Le(cr37_0,i1290). +Le(cr37_0,i1310). +Le(cr37_0,i1320). +Le(cr37_0,i1330). +Le(cr37_0,i1350). +Le(cr37_0,i1360). +Le(cr37_0,i1380). +Le(cr37_0,i1390). +Le(cr37_0,i1420). +Le(cr37_0,i1430). +Le(cr37_0,i1450). +Le(cr37_0,i1460). +Le(cr37_0,i1490). +Le(cr37_0,i1520). +Le(cr37_0,i1530). +Le(cr37_0,i1540). +Le(cr37_0,i1560). +Le(cr37_0,i1590). +Le(cr37_0,i1630). +Le(cr37_0,i1660). +Le(cr37_0,i1700). +Le(cr37_0,i1730). +Le(cr37_0,i1760). +Le(cr37_0,i1770). +Le(cr37_0,i1810). +Le(cr37_0,i1840). +Le(cr37_0,i1880). +Le(cr37_0,i1910). +Le(cr37_0,i1950). +Le(cr37_0,i1980). +Le(cr37_0,i2020). +Le(cr37_0,i2050). +Le(cr37_0,i2090). +Le(cr37_0,i2120). +Le(cr37_0,i2160). +Le(cr37_0,i2190). +Le(cr37_0,i2200). +Le(cr37_0,i2230). +Le(cr37_0,i2270). +Le(cr37_0,i2300). +Le(cr37_0,i2340). +Le(cr37_0,i2370). +Le(cr37_0,i2410). +Le(cr37_0,i2420). +Le(cr37_0,i2440). +Le(cr37_0,i2480). +Le(cr37_0,i2510). +Le(cr37_0,i2550). +Le(cr37_0,i2580). +Le(cr37_0,i2620). +Le(cr37_0,i2640). +Le(cr37_0,i2660). +Le(cr37_0,i2730). +Le(cr37_0,i2760). +Le(cr37_0,i2800). +Le(cr37_0,i2830). +Le(cr37_0,i2860). +Le(cr37_0,i2870). +Le(cr37_0,i2940). +Le(cr37_0,i2970). +Le(cr37_0,i3010). +Le(cr37_0,i3040). +Le(cr37_0,i3080). +Le(cr37_0,i3120). +Le(cr37_0,i3150). +Le(cr37_0,i3220). +Le(cr37_0,i3260). +Le(cr37_0,i3290). +Le(cr37_0,i3300). +Le(cr37_0,i3330). +Le(cr37_0,i3400). +Le(cr37_0,i3430). +Le(cr37_0,i3500). +Le(cr37_0,i3520). +Le(cr37_0,i3580). +Le(cr37_0,i3610). +Le(cr37_0,i3650). +Le(cr37_0,i3680). +Le(cr37_0,i3720). +Le(cr37_0,i3740). +Le(cr37_0,i3790). +Le(cr37_0,i3820). +Le(cr37_0,i3860). +Le(cr37_0,i3960). +Le(cr37_0,i4040). +Le(cr37_0,i4140). +Le(cr37_0,i4180). +Le(cr37_0,i4400). +Le(cr37_0,i4620). +Le(cr37_0,i4840). +Le(cr37_0,i5060). +Le(cr37_0,i5280). +Le(cr37_0,i5500). +Le(cr37_0,i5720). +Le(cr37_0,i5940). +Le(cr37_0,i6160). +Le(cr37_0,i6380). +Le(cr37_0,i6600). +Le(cr37_0,i6820). +Le(cr37_0,i7040). +Le(cr37_0,i7260). +Le(cr37_0,i7480). +Le(cr37_0,i7700). +Le(cr37_0,i7920). +Le(cr37_0,i8140). +Le(cr37_0,i8360). +Le(cr37_0,i8580). +Eq(i960,i960). +Le(i960,cr38_0). +Le(cr38_0,i990). +Le(i-30,cr38_0). +Le(i0,cr38_0). +Le(i13,cr38_0). +Le(i26,cr38_0). +Le(i39,cr38_0). +Le(i52,cr38_0). +Le(i60,cr38_0). +Le(i65,cr38_0). +Le(i70,cr38_0). +Le(i78,cr38_0). +Le(i90,cr38_0). +Le(i91,cr38_0). +Le(i104,cr38_0). +Le(i117,cr38_0). +Le(i130,cr38_0). +Le(i143,cr38_0). +Le(i156,cr38_0). +Le(i169,cr38_0). +Le(i182,cr38_0). +Le(i195,cr38_0). +Le(i208,cr38_0). +Le(i221,cr38_0). +Le(i234,cr38_0). +Le(i247,cr38_0). +Le(i260,cr38_0). +Le(i460,cr38_0). +Le(i530,cr38_0). +Le(i600,cr38_0). +Le(i660,cr38_0). +Le(i670,cr38_0). +Le(i710,cr38_0). +Le(i740,cr38_0). +Le(i810,cr38_0). +Le(i850,cr38_0). +Le(i880,cr38_0). +Le(i890,cr38_0). +Le(i920,cr38_0). +Le(cr38_0,i1030). +Le(cr38_0,i1060). +Le(cr38_0,i1100). +Le(cr38_0,i1130). +Le(cr38_0,i1170). +Le(cr38_0,i1200). +Le(cr38_0,i1240). +Le(cr38_0,i1260). +Le(cr38_0,i1270). +Le(cr38_0,i1290). +Le(cr38_0,i1310). +Le(cr38_0,i1320). +Le(cr38_0,i1330). +Le(cr38_0,i1350). +Le(cr38_0,i1360). +Le(cr38_0,i1380). +Le(cr38_0,i1390). +Le(cr38_0,i1420). +Le(cr38_0,i1430). +Le(cr38_0,i1450). +Le(cr38_0,i1460). +Le(cr38_0,i1490). +Le(cr38_0,i1520). +Le(cr38_0,i1530). +Le(cr38_0,i1540). +Le(cr38_0,i1560). +Le(cr38_0,i1590). +Le(cr38_0,i1630). +Le(cr38_0,i1660). +Le(cr38_0,i1700). +Le(cr38_0,i1730). +Le(cr38_0,i1760). +Le(cr38_0,i1770). +Le(cr38_0,i1810). +Le(cr38_0,i1840). +Le(cr38_0,i1880). +Le(cr38_0,i1910). +Le(cr38_0,i1950). +Le(cr38_0,i1980). +Le(cr38_0,i2020). +Le(cr38_0,i2050). +Le(cr38_0,i2090). +Le(cr38_0,i2120). +Le(cr38_0,i2160). +Le(cr38_0,i2190). +Le(cr38_0,i2200). +Le(cr38_0,i2230). +Le(cr38_0,i2270). +Le(cr38_0,i2300). +Le(cr38_0,i2340). +Le(cr38_0,i2370). +Le(cr38_0,i2410). +Le(cr38_0,i2420). +Le(cr38_0,i2440). +Le(cr38_0,i2480). +Le(cr38_0,i2510). +Le(cr38_0,i2550). +Le(cr38_0,i2580). +Le(cr38_0,i2620). +Le(cr38_0,i2640). +Le(cr38_0,i2660). +Le(cr38_0,i2730). +Le(cr38_0,i2760). +Le(cr38_0,i2800). +Le(cr38_0,i2830). +Le(cr38_0,i2860). +Le(cr38_0,i2870). +Le(cr38_0,i2940). +Le(cr38_0,i2970). +Le(cr38_0,i3010). +Le(cr38_0,i3040). +Le(cr38_0,i3080). +Le(cr38_0,i3120). +Le(cr38_0,i3150). +Le(cr38_0,i3220). +Le(cr38_0,i3260). +Le(cr38_0,i3290). +Le(cr38_0,i3300). +Le(cr38_0,i3330). +Le(cr38_0,i3400). +Le(cr38_0,i3430). +Le(cr38_0,i3500). +Le(cr38_0,i3520). +Le(cr38_0,i3580). +Le(cr38_0,i3610). +Le(cr38_0,i3650). +Le(cr38_0,i3680). +Le(cr38_0,i3720). +Le(cr38_0,i3740). +Le(cr38_0,i3790). +Le(cr38_0,i3820). +Le(cr38_0,i3860). +Le(cr38_0,i3960). +Le(cr38_0,i4040). +Le(cr38_0,i4140). +Le(cr38_0,i4180). +Le(cr38_0,i4400). +Le(cr38_0,i4620). +Le(cr38_0,i4840). +Le(cr38_0,i5060). +Le(cr38_0,i5280). +Le(cr38_0,i5500). +Le(cr38_0,i5720). +Le(cr38_0,i5940). +Le(cr38_0,i6160). +Le(cr38_0,i6380). +Le(cr38_0,i6600). +Le(cr38_0,i6820). +Le(cr38_0,i7040). +Le(cr38_0,i7260). +Le(cr38_0,i7480). +Le(cr38_0,i7700). +Le(cr38_0,i7920). +Le(cr38_0,i8140). +Le(cr38_0,i8360). +Le(cr38_0,i8580). +Eq(i990,i990). +Le(i990,cr39_0). +Le(cr39_0,i1030). +Le(i-30,cr39_0). +Le(i0,cr39_0). +Le(i13,cr39_0). +Le(i26,cr39_0). +Le(i39,cr39_0). +Le(i52,cr39_0). +Le(i60,cr39_0). +Le(i65,cr39_0). +Le(i70,cr39_0). +Le(i78,cr39_0). +Le(i90,cr39_0). +Le(i91,cr39_0). +Le(i104,cr39_0). +Le(i117,cr39_0). +Le(i130,cr39_0). +Le(i143,cr39_0). +Le(i156,cr39_0). +Le(i169,cr39_0). +Le(i182,cr39_0). +Le(i195,cr39_0). +Le(i208,cr39_0). +Le(i221,cr39_0). +Le(i234,cr39_0). +Le(i247,cr39_0). +Le(i260,cr39_0). +Le(i460,cr39_0). +Le(i530,cr39_0). +Le(i600,cr39_0). +Le(i660,cr39_0). +Le(i670,cr39_0). +Le(i710,cr39_0). +Le(i740,cr39_0). +Le(i810,cr39_0). +Le(i850,cr39_0). +Le(i880,cr39_0). +Le(i890,cr39_0). +Le(i920,cr39_0). +Le(i960,cr39_0). +Le(cr39_0,i1060). +Le(cr39_0,i1100). +Le(cr39_0,i1130). +Le(cr39_0,i1170). +Le(cr39_0,i1200). +Le(cr39_0,i1240). +Le(cr39_0,i1260). +Le(cr39_0,i1270). +Le(cr39_0,i1290). +Le(cr39_0,i1310). +Le(cr39_0,i1320). +Le(cr39_0,i1330). +Le(cr39_0,i1350). +Le(cr39_0,i1360). +Le(cr39_0,i1380). +Le(cr39_0,i1390). +Le(cr39_0,i1420). +Le(cr39_0,i1430). +Le(cr39_0,i1450). +Le(cr39_0,i1460). +Le(cr39_0,i1490). +Le(cr39_0,i1520). +Le(cr39_0,i1530). +Le(cr39_0,i1540). +Le(cr39_0,i1560). +Le(cr39_0,i1590). +Le(cr39_0,i1630). +Le(cr39_0,i1660). +Le(cr39_0,i1700). +Le(cr39_0,i1730). +Le(cr39_0,i1760). +Le(cr39_0,i1770). +Le(cr39_0,i1810). +Le(cr39_0,i1840). +Le(cr39_0,i1880). +Le(cr39_0,i1910). +Le(cr39_0,i1950). +Le(cr39_0,i1980). +Le(cr39_0,i2020). +Le(cr39_0,i2050). +Le(cr39_0,i2090). +Le(cr39_0,i2120). +Le(cr39_0,i2160). +Le(cr39_0,i2190). +Le(cr39_0,i2200). +Le(cr39_0,i2230). +Le(cr39_0,i2270). +Le(cr39_0,i2300). +Le(cr39_0,i2340). +Le(cr39_0,i2370). +Le(cr39_0,i2410). +Le(cr39_0,i2420). +Le(cr39_0,i2440). +Le(cr39_0,i2480). +Le(cr39_0,i2510). +Le(cr39_0,i2550). +Le(cr39_0,i2580). +Le(cr39_0,i2620). +Le(cr39_0,i2640). +Le(cr39_0,i2660). +Le(cr39_0,i2730). +Le(cr39_0,i2760). +Le(cr39_0,i2800). +Le(cr39_0,i2830). +Le(cr39_0,i2860). +Le(cr39_0,i2870). +Le(cr39_0,i2940). +Le(cr39_0,i2970). +Le(cr39_0,i3010). +Le(cr39_0,i3040). +Le(cr39_0,i3080). +Le(cr39_0,i3120). +Le(cr39_0,i3150). +Le(cr39_0,i3220). +Le(cr39_0,i3260). +Le(cr39_0,i3290). +Le(cr39_0,i3300). +Le(cr39_0,i3330). +Le(cr39_0,i3400). +Le(cr39_0,i3430). +Le(cr39_0,i3500). +Le(cr39_0,i3520). +Le(cr39_0,i3580). +Le(cr39_0,i3610). +Le(cr39_0,i3650). +Le(cr39_0,i3680). +Le(cr39_0,i3720). +Le(cr39_0,i3740). +Le(cr39_0,i3790). +Le(cr39_0,i3820). +Le(cr39_0,i3860). +Le(cr39_0,i3960). +Le(cr39_0,i4040). +Le(cr39_0,i4140). +Le(cr39_0,i4180). +Le(cr39_0,i4400). +Le(cr39_0,i4620). +Le(cr39_0,i4840). +Le(cr39_0,i5060). +Le(cr39_0,i5280). +Le(cr39_0,i5500). +Le(cr39_0,i5720). +Le(cr39_0,i5940). +Le(cr39_0,i6160). +Le(cr39_0,i6380). +Le(cr39_0,i6600). +Le(cr39_0,i6820). +Le(cr39_0,i7040). +Le(cr39_0,i7260). +Le(cr39_0,i7480). +Le(cr39_0,i7700). +Le(cr39_0,i7920). +Le(cr39_0,i8140). +Le(cr39_0,i8360). +Le(cr39_0,i8580). +Eq(i1030,i1030). +Le(i1030,cr40_0). +Le(cr40_0,i1060). +Le(i-30,cr40_0). +Le(i0,cr40_0). +Le(i13,cr40_0). +Le(i26,cr40_0). +Le(i39,cr40_0). +Le(i52,cr40_0). +Le(i60,cr40_0). +Le(i65,cr40_0). +Le(i70,cr40_0). +Le(i78,cr40_0). +Le(i90,cr40_0). +Le(i91,cr40_0). +Le(i104,cr40_0). +Le(i117,cr40_0). +Le(i130,cr40_0). +Le(i143,cr40_0). +Le(i156,cr40_0). +Le(i169,cr40_0). +Le(i182,cr40_0). +Le(i195,cr40_0). +Le(i208,cr40_0). +Le(i221,cr40_0). +Le(i234,cr40_0). +Le(i247,cr40_0). +Le(i260,cr40_0). +Le(i460,cr40_0). +Le(i530,cr40_0). +Le(i600,cr40_0). +Le(i660,cr40_0). +Le(i670,cr40_0). +Le(i710,cr40_0). +Le(i740,cr40_0). +Le(i810,cr40_0). +Le(i850,cr40_0). +Le(i880,cr40_0). +Le(i890,cr40_0). +Le(i920,cr40_0). +Le(i960,cr40_0). +Le(i990,cr40_0). +Le(cr40_0,i1100). +Le(cr40_0,i1130). +Le(cr40_0,i1170). +Le(cr40_0,i1200). +Le(cr40_0,i1240). +Le(cr40_0,i1260). +Le(cr40_0,i1270). +Le(cr40_0,i1290). +Le(cr40_0,i1310). +Le(cr40_0,i1320). +Le(cr40_0,i1330). +Le(cr40_0,i1350). +Le(cr40_0,i1360). +Le(cr40_0,i1380). +Le(cr40_0,i1390). +Le(cr40_0,i1420). +Le(cr40_0,i1430). +Le(cr40_0,i1450). +Le(cr40_0,i1460). +Le(cr40_0,i1490). +Le(cr40_0,i1520). +Le(cr40_0,i1530). +Le(cr40_0,i1540). +Le(cr40_0,i1560). +Le(cr40_0,i1590). +Le(cr40_0,i1630). +Le(cr40_0,i1660). +Le(cr40_0,i1700). +Le(cr40_0,i1730). +Le(cr40_0,i1760). +Le(cr40_0,i1770). +Le(cr40_0,i1810). +Le(cr40_0,i1840). +Le(cr40_0,i1880). +Le(cr40_0,i1910). +Le(cr40_0,i1950). +Le(cr40_0,i1980). +Le(cr40_0,i2020). +Le(cr40_0,i2050). +Le(cr40_0,i2090). +Le(cr40_0,i2120). +Le(cr40_0,i2160). +Le(cr40_0,i2190). +Le(cr40_0,i2200). +Le(cr40_0,i2230). +Le(cr40_0,i2270). +Le(cr40_0,i2300). +Le(cr40_0,i2340). +Le(cr40_0,i2370). +Le(cr40_0,i2410). +Le(cr40_0,i2420). +Le(cr40_0,i2440). +Le(cr40_0,i2480). +Le(cr40_0,i2510). +Le(cr40_0,i2550). +Le(cr40_0,i2580). +Le(cr40_0,i2620). +Le(cr40_0,i2640). +Le(cr40_0,i2660). +Le(cr40_0,i2730). +Le(cr40_0,i2760). +Le(cr40_0,i2800). +Le(cr40_0,i2830). +Le(cr40_0,i2860). +Le(cr40_0,i2870). +Le(cr40_0,i2940). +Le(cr40_0,i2970). +Le(cr40_0,i3010). +Le(cr40_0,i3040). +Le(cr40_0,i3080). +Le(cr40_0,i3120). +Le(cr40_0,i3150). +Le(cr40_0,i3220). +Le(cr40_0,i3260). +Le(cr40_0,i3290). +Le(cr40_0,i3300). +Le(cr40_0,i3330). +Le(cr40_0,i3400). +Le(cr40_0,i3430). +Le(cr40_0,i3500). +Le(cr40_0,i3520). +Le(cr40_0,i3580). +Le(cr40_0,i3610). +Le(cr40_0,i3650). +Le(cr40_0,i3680). +Le(cr40_0,i3720). +Le(cr40_0,i3740). +Le(cr40_0,i3790). +Le(cr40_0,i3820). +Le(cr40_0,i3860). +Le(cr40_0,i3960). +Le(cr40_0,i4040). +Le(cr40_0,i4140). +Le(cr40_0,i4180). +Le(cr40_0,i4400). +Le(cr40_0,i4620). +Le(cr40_0,i4840). +Le(cr40_0,i5060). +Le(cr40_0,i5280). +Le(cr40_0,i5500). +Le(cr40_0,i5720). +Le(cr40_0,i5940). +Le(cr40_0,i6160). +Le(cr40_0,i6380). +Le(cr40_0,i6600). +Le(cr40_0,i6820). +Le(cr40_0,i7040). +Le(cr40_0,i7260). +Le(cr40_0,i7480). +Le(cr40_0,i7700). +Le(cr40_0,i7920). +Le(cr40_0,i8140). +Le(cr40_0,i8360). +Le(cr40_0,i8580). +Eq(i1060,i1060). +Le(i1060,cr41_0). +Le(cr41_0,i1100). +Le(i-30,cr41_0). +Le(i0,cr41_0). +Le(i13,cr41_0). +Le(i26,cr41_0). +Le(i39,cr41_0). +Le(i52,cr41_0). +Le(i60,cr41_0). +Le(i65,cr41_0). +Le(i70,cr41_0). +Le(i78,cr41_0). +Le(i90,cr41_0). +Le(i91,cr41_0). +Le(i104,cr41_0). +Le(i117,cr41_0). +Le(i130,cr41_0). +Le(i143,cr41_0). +Le(i156,cr41_0). +Le(i169,cr41_0). +Le(i182,cr41_0). +Le(i195,cr41_0). +Le(i208,cr41_0). +Le(i221,cr41_0). +Le(i234,cr41_0). +Le(i247,cr41_0). +Le(i260,cr41_0). +Le(i460,cr41_0). +Le(i530,cr41_0). +Le(i600,cr41_0). +Le(i660,cr41_0). +Le(i670,cr41_0). +Le(i710,cr41_0). +Le(i740,cr41_0). +Le(i810,cr41_0). +Le(i850,cr41_0). +Le(i880,cr41_0). +Le(i890,cr41_0). +Le(i920,cr41_0). +Le(i960,cr41_0). +Le(i990,cr41_0). +Le(i1030,cr41_0). +Le(cr41_0,i1130). +Le(cr41_0,i1170). +Le(cr41_0,i1200). +Le(cr41_0,i1240). +Le(cr41_0,i1260). +Le(cr41_0,i1270). +Le(cr41_0,i1290). +Le(cr41_0,i1310). +Le(cr41_0,i1320). +Le(cr41_0,i1330). +Le(cr41_0,i1350). +Le(cr41_0,i1360). +Le(cr41_0,i1380). +Le(cr41_0,i1390). +Le(cr41_0,i1420). +Le(cr41_0,i1430). +Le(cr41_0,i1450). +Le(cr41_0,i1460). +Le(cr41_0,i1490). +Le(cr41_0,i1520). +Le(cr41_0,i1530). +Le(cr41_0,i1540). +Le(cr41_0,i1560). +Le(cr41_0,i1590). +Le(cr41_0,i1630). +Le(cr41_0,i1660). +Le(cr41_0,i1700). +Le(cr41_0,i1730). +Le(cr41_0,i1760). +Le(cr41_0,i1770). +Le(cr41_0,i1810). +Le(cr41_0,i1840). +Le(cr41_0,i1880). +Le(cr41_0,i1910). +Le(cr41_0,i1950). +Le(cr41_0,i1980). +Le(cr41_0,i2020). +Le(cr41_0,i2050). +Le(cr41_0,i2090). +Le(cr41_0,i2120). +Le(cr41_0,i2160). +Le(cr41_0,i2190). +Le(cr41_0,i2200). +Le(cr41_0,i2230). +Le(cr41_0,i2270). +Le(cr41_0,i2300). +Le(cr41_0,i2340). +Le(cr41_0,i2370). +Le(cr41_0,i2410). +Le(cr41_0,i2420). +Le(cr41_0,i2440). +Le(cr41_0,i2480). +Le(cr41_0,i2510). +Le(cr41_0,i2550). +Le(cr41_0,i2580). +Le(cr41_0,i2620). +Le(cr41_0,i2640). +Le(cr41_0,i2660). +Le(cr41_0,i2730). +Le(cr41_0,i2760). +Le(cr41_0,i2800). +Le(cr41_0,i2830). +Le(cr41_0,i2860). +Le(cr41_0,i2870). +Le(cr41_0,i2940). +Le(cr41_0,i2970). +Le(cr41_0,i3010). +Le(cr41_0,i3040). +Le(cr41_0,i3080). +Le(cr41_0,i3120). +Le(cr41_0,i3150). +Le(cr41_0,i3220). +Le(cr41_0,i3260). +Le(cr41_0,i3290). +Le(cr41_0,i3300). +Le(cr41_0,i3330). +Le(cr41_0,i3400). +Le(cr41_0,i3430). +Le(cr41_0,i3500). +Le(cr41_0,i3520). +Le(cr41_0,i3580). +Le(cr41_0,i3610). +Le(cr41_0,i3650). +Le(cr41_0,i3680). +Le(cr41_0,i3720). +Le(cr41_0,i3740). +Le(cr41_0,i3790). +Le(cr41_0,i3820). +Le(cr41_0,i3860). +Le(cr41_0,i3960). +Le(cr41_0,i4040). +Le(cr41_0,i4140). +Le(cr41_0,i4180). +Le(cr41_0,i4400). +Le(cr41_0,i4620). +Le(cr41_0,i4840). +Le(cr41_0,i5060). +Le(cr41_0,i5280). +Le(cr41_0,i5500). +Le(cr41_0,i5720). +Le(cr41_0,i5940). +Le(cr41_0,i6160). +Le(cr41_0,i6380). +Le(cr41_0,i6600). +Le(cr41_0,i6820). +Le(cr41_0,i7040). +Le(cr41_0,i7260). +Le(cr41_0,i7480). +Le(cr41_0,i7700). +Le(cr41_0,i7920). +Le(cr41_0,i8140). +Le(cr41_0,i8360). +Le(cr41_0,i8580). +Eq(i1100,i1100). +Le(i1100,cr42_0). +Le(cr42_0,i1130). +Le(i-30,cr42_0). +Le(i0,cr42_0). +Le(i13,cr42_0). +Le(i26,cr42_0). +Le(i39,cr42_0). +Le(i52,cr42_0). +Le(i60,cr42_0). +Le(i65,cr42_0). +Le(i70,cr42_0). +Le(i78,cr42_0). +Le(i90,cr42_0). +Le(i91,cr42_0). +Le(i104,cr42_0). +Le(i117,cr42_0). +Le(i130,cr42_0). +Le(i143,cr42_0). +Le(i156,cr42_0). +Le(i169,cr42_0). +Le(i182,cr42_0). +Le(i195,cr42_0). +Le(i208,cr42_0). +Le(i221,cr42_0). +Le(i234,cr42_0). +Le(i247,cr42_0). +Le(i260,cr42_0). +Le(i460,cr42_0). +Le(i530,cr42_0). +Le(i600,cr42_0). +Le(i660,cr42_0). +Le(i670,cr42_0). +Le(i710,cr42_0). +Le(i740,cr42_0). +Le(i810,cr42_0). +Le(i850,cr42_0). +Le(i880,cr42_0). +Le(i890,cr42_0). +Le(i920,cr42_0). +Le(i960,cr42_0). +Le(i990,cr42_0). +Le(i1030,cr42_0). +Le(i1060,cr42_0). +Le(cr42_0,i1170). +Le(cr42_0,i1200). +Le(cr42_0,i1240). +Le(cr42_0,i1260). +Le(cr42_0,i1270). +Le(cr42_0,i1290). +Le(cr42_0,i1310). +Le(cr42_0,i1320). +Le(cr42_0,i1330). +Le(cr42_0,i1350). +Le(cr42_0,i1360). +Le(cr42_0,i1380). +Le(cr42_0,i1390). +Le(cr42_0,i1420). +Le(cr42_0,i1430). +Le(cr42_0,i1450). +Le(cr42_0,i1460). +Le(cr42_0,i1490). +Le(cr42_0,i1520). +Le(cr42_0,i1530). +Le(cr42_0,i1540). +Le(cr42_0,i1560). +Le(cr42_0,i1590). +Le(cr42_0,i1630). +Le(cr42_0,i1660). +Le(cr42_0,i1700). +Le(cr42_0,i1730). +Le(cr42_0,i1760). +Le(cr42_0,i1770). +Le(cr42_0,i1810). +Le(cr42_0,i1840). +Le(cr42_0,i1880). +Le(cr42_0,i1910). +Le(cr42_0,i1950). +Le(cr42_0,i1980). +Le(cr42_0,i2020). +Le(cr42_0,i2050). +Le(cr42_0,i2090). +Le(cr42_0,i2120). +Le(cr42_0,i2160). +Le(cr42_0,i2190). +Le(cr42_0,i2200). +Le(cr42_0,i2230). +Le(cr42_0,i2270). +Le(cr42_0,i2300). +Le(cr42_0,i2340). +Le(cr42_0,i2370). +Le(cr42_0,i2410). +Le(cr42_0,i2420). +Le(cr42_0,i2440). +Le(cr42_0,i2480). +Le(cr42_0,i2510). +Le(cr42_0,i2550). +Le(cr42_0,i2580). +Le(cr42_0,i2620). +Le(cr42_0,i2640). +Le(cr42_0,i2660). +Le(cr42_0,i2730). +Le(cr42_0,i2760). +Le(cr42_0,i2800). +Le(cr42_0,i2830). +Le(cr42_0,i2860). +Le(cr42_0,i2870). +Le(cr42_0,i2940). +Le(cr42_0,i2970). +Le(cr42_0,i3010). +Le(cr42_0,i3040). +Le(cr42_0,i3080). +Le(cr42_0,i3120). +Le(cr42_0,i3150). +Le(cr42_0,i3220). +Le(cr42_0,i3260). +Le(cr42_0,i3290). +Le(cr42_0,i3300). +Le(cr42_0,i3330). +Le(cr42_0,i3400). +Le(cr42_0,i3430). +Le(cr42_0,i3500). +Le(cr42_0,i3520). +Le(cr42_0,i3580). +Le(cr42_0,i3610). +Le(cr42_0,i3650). +Le(cr42_0,i3680). +Le(cr42_0,i3720). +Le(cr42_0,i3740). +Le(cr42_0,i3790). +Le(cr42_0,i3820). +Le(cr42_0,i3860). +Le(cr42_0,i3960). +Le(cr42_0,i4040). +Le(cr42_0,i4140). +Le(cr42_0,i4180). +Le(cr42_0,i4400). +Le(cr42_0,i4620). +Le(cr42_0,i4840). +Le(cr42_0,i5060). +Le(cr42_0,i5280). +Le(cr42_0,i5500). +Le(cr42_0,i5720). +Le(cr42_0,i5940). +Le(cr42_0,i6160). +Le(cr42_0,i6380). +Le(cr42_0,i6600). +Le(cr42_0,i6820). +Le(cr42_0,i7040). +Le(cr42_0,i7260). +Le(cr42_0,i7480). +Le(cr42_0,i7700). +Le(cr42_0,i7920). +Le(cr42_0,i8140). +Le(cr42_0,i8360). +Le(cr42_0,i8580). +Eq(i1130,i1130). +Le(i1130,cr43_0). +Le(cr43_0,i1170). +Le(i-30,cr43_0). +Le(i0,cr43_0). +Le(i13,cr43_0). +Le(i26,cr43_0). +Le(i39,cr43_0). +Le(i52,cr43_0). +Le(i60,cr43_0). +Le(i65,cr43_0). +Le(i70,cr43_0). +Le(i78,cr43_0). +Le(i90,cr43_0). +Le(i91,cr43_0). +Le(i104,cr43_0). +Le(i117,cr43_0). +Le(i130,cr43_0). +Le(i143,cr43_0). +Le(i156,cr43_0). +Le(i169,cr43_0). +Le(i182,cr43_0). +Le(i195,cr43_0). +Le(i208,cr43_0). +Le(i221,cr43_0). +Le(i234,cr43_0). +Le(i247,cr43_0). +Le(i260,cr43_0). +Le(i460,cr43_0). +Le(i530,cr43_0). +Le(i600,cr43_0). +Le(i660,cr43_0). +Le(i670,cr43_0). +Le(i710,cr43_0). +Le(i740,cr43_0). +Le(i810,cr43_0). +Le(i850,cr43_0). +Le(i880,cr43_0). +Le(i890,cr43_0). +Le(i920,cr43_0). +Le(i960,cr43_0). +Le(i990,cr43_0). +Le(i1030,cr43_0). +Le(i1060,cr43_0). +Le(i1100,cr43_0). +Le(cr43_0,i1200). +Le(cr43_0,i1240). +Le(cr43_0,i1260). +Le(cr43_0,i1270). +Le(cr43_0,i1290). +Le(cr43_0,i1310). +Le(cr43_0,i1320). +Le(cr43_0,i1330). +Le(cr43_0,i1350). +Le(cr43_0,i1360). +Le(cr43_0,i1380). +Le(cr43_0,i1390). +Le(cr43_0,i1420). +Le(cr43_0,i1430). +Le(cr43_0,i1450). +Le(cr43_0,i1460). +Le(cr43_0,i1490). +Le(cr43_0,i1520). +Le(cr43_0,i1530). +Le(cr43_0,i1540). +Le(cr43_0,i1560). +Le(cr43_0,i1590). +Le(cr43_0,i1630). +Le(cr43_0,i1660). +Le(cr43_0,i1700). +Le(cr43_0,i1730). +Le(cr43_0,i1760). +Le(cr43_0,i1770). +Le(cr43_0,i1810). +Le(cr43_0,i1840). +Le(cr43_0,i1880). +Le(cr43_0,i1910). +Le(cr43_0,i1950). +Le(cr43_0,i1980). +Le(cr43_0,i2020). +Le(cr43_0,i2050). +Le(cr43_0,i2090). +Le(cr43_0,i2120). +Le(cr43_0,i2160). +Le(cr43_0,i2190). +Le(cr43_0,i2200). +Le(cr43_0,i2230). +Le(cr43_0,i2270). +Le(cr43_0,i2300). +Le(cr43_0,i2340). +Le(cr43_0,i2370). +Le(cr43_0,i2410). +Le(cr43_0,i2420). +Le(cr43_0,i2440). +Le(cr43_0,i2480). +Le(cr43_0,i2510). +Le(cr43_0,i2550). +Le(cr43_0,i2580). +Le(cr43_0,i2620). +Le(cr43_0,i2640). +Le(cr43_0,i2660). +Le(cr43_0,i2730). +Le(cr43_0,i2760). +Le(cr43_0,i2800). +Le(cr43_0,i2830). +Le(cr43_0,i2860). +Le(cr43_0,i2870). +Le(cr43_0,i2940). +Le(cr43_0,i2970). +Le(cr43_0,i3010). +Le(cr43_0,i3040). +Le(cr43_0,i3080). +Le(cr43_0,i3120). +Le(cr43_0,i3150). +Le(cr43_0,i3220). +Le(cr43_0,i3260). +Le(cr43_0,i3290). +Le(cr43_0,i3300). +Le(cr43_0,i3330). +Le(cr43_0,i3400). +Le(cr43_0,i3430). +Le(cr43_0,i3500). +Le(cr43_0,i3520). +Le(cr43_0,i3580). +Le(cr43_0,i3610). +Le(cr43_0,i3650). +Le(cr43_0,i3680). +Le(cr43_0,i3720). +Le(cr43_0,i3740). +Le(cr43_0,i3790). +Le(cr43_0,i3820). +Le(cr43_0,i3860). +Le(cr43_0,i3960). +Le(cr43_0,i4040). +Le(cr43_0,i4140). +Le(cr43_0,i4180). +Le(cr43_0,i4400). +Le(cr43_0,i4620). +Le(cr43_0,i4840). +Le(cr43_0,i5060). +Le(cr43_0,i5280). +Le(cr43_0,i5500). +Le(cr43_0,i5720). +Le(cr43_0,i5940). +Le(cr43_0,i6160). +Le(cr43_0,i6380). +Le(cr43_0,i6600). +Le(cr43_0,i6820). +Le(cr43_0,i7040). +Le(cr43_0,i7260). +Le(cr43_0,i7480). +Le(cr43_0,i7700). +Le(cr43_0,i7920). +Le(cr43_0,i8140). +Le(cr43_0,i8360). +Le(cr43_0,i8580). +Eq(i1170,i1170). +Le(i1170,cr44_0). +Le(cr44_0,i1200). +Le(i-30,cr44_0). +Le(i0,cr44_0). +Le(i13,cr44_0). +Le(i26,cr44_0). +Le(i39,cr44_0). +Le(i52,cr44_0). +Le(i60,cr44_0). +Le(i65,cr44_0). +Le(i70,cr44_0). +Le(i78,cr44_0). +Le(i90,cr44_0). +Le(i91,cr44_0). +Le(i104,cr44_0). +Le(i117,cr44_0). +Le(i130,cr44_0). +Le(i143,cr44_0). +Le(i156,cr44_0). +Le(i169,cr44_0). +Le(i182,cr44_0). +Le(i195,cr44_0). +Le(i208,cr44_0). +Le(i221,cr44_0). +Le(i234,cr44_0). +Le(i247,cr44_0). +Le(i260,cr44_0). +Le(i460,cr44_0). +Le(i530,cr44_0). +Le(i600,cr44_0). +Le(i660,cr44_0). +Le(i670,cr44_0). +Le(i710,cr44_0). +Le(i740,cr44_0). +Le(i810,cr44_0). +Le(i850,cr44_0). +Le(i880,cr44_0). +Le(i890,cr44_0). +Le(i920,cr44_0). +Le(i960,cr44_0). +Le(i990,cr44_0). +Le(i1030,cr44_0). +Le(i1060,cr44_0). +Le(i1100,cr44_0). +Le(i1130,cr44_0). +Le(cr44_0,i1240). +Le(cr44_0,i1260). +Le(cr44_0,i1270). +Le(cr44_0,i1290). +Le(cr44_0,i1310). +Le(cr44_0,i1320). +Le(cr44_0,i1330). +Le(cr44_0,i1350). +Le(cr44_0,i1360). +Le(cr44_0,i1380). +Le(cr44_0,i1390). +Le(cr44_0,i1420). +Le(cr44_0,i1430). +Le(cr44_0,i1450). +Le(cr44_0,i1460). +Le(cr44_0,i1490). +Le(cr44_0,i1520). +Le(cr44_0,i1530). +Le(cr44_0,i1540). +Le(cr44_0,i1560). +Le(cr44_0,i1590). +Le(cr44_0,i1630). +Le(cr44_0,i1660). +Le(cr44_0,i1700). +Le(cr44_0,i1730). +Le(cr44_0,i1760). +Le(cr44_0,i1770). +Le(cr44_0,i1810). +Le(cr44_0,i1840). +Le(cr44_0,i1880). +Le(cr44_0,i1910). +Le(cr44_0,i1950). +Le(cr44_0,i1980). +Le(cr44_0,i2020). +Le(cr44_0,i2050). +Le(cr44_0,i2090). +Le(cr44_0,i2120). +Le(cr44_0,i2160). +Le(cr44_0,i2190). +Le(cr44_0,i2200). +Le(cr44_0,i2230). +Le(cr44_0,i2270). +Le(cr44_0,i2300). +Le(cr44_0,i2340). +Le(cr44_0,i2370). +Le(cr44_0,i2410). +Le(cr44_0,i2420). +Le(cr44_0,i2440). +Le(cr44_0,i2480). +Le(cr44_0,i2510). +Le(cr44_0,i2550). +Le(cr44_0,i2580). +Le(cr44_0,i2620). +Le(cr44_0,i2640). +Le(cr44_0,i2660). +Le(cr44_0,i2730). +Le(cr44_0,i2760). +Le(cr44_0,i2800). +Le(cr44_0,i2830). +Le(cr44_0,i2860). +Le(cr44_0,i2870). +Le(cr44_0,i2940). +Le(cr44_0,i2970). +Le(cr44_0,i3010). +Le(cr44_0,i3040). +Le(cr44_0,i3080). +Le(cr44_0,i3120). +Le(cr44_0,i3150). +Le(cr44_0,i3220). +Le(cr44_0,i3260). +Le(cr44_0,i3290). +Le(cr44_0,i3300). +Le(cr44_0,i3330). +Le(cr44_0,i3400). +Le(cr44_0,i3430). +Le(cr44_0,i3500). +Le(cr44_0,i3520). +Le(cr44_0,i3580). +Le(cr44_0,i3610). +Le(cr44_0,i3650). +Le(cr44_0,i3680). +Le(cr44_0,i3720). +Le(cr44_0,i3740). +Le(cr44_0,i3790). +Le(cr44_0,i3820). +Le(cr44_0,i3860). +Le(cr44_0,i3960). +Le(cr44_0,i4040). +Le(cr44_0,i4140). +Le(cr44_0,i4180). +Le(cr44_0,i4400). +Le(cr44_0,i4620). +Le(cr44_0,i4840). +Le(cr44_0,i5060). +Le(cr44_0,i5280). +Le(cr44_0,i5500). +Le(cr44_0,i5720). +Le(cr44_0,i5940). +Le(cr44_0,i6160). +Le(cr44_0,i6380). +Le(cr44_0,i6600). +Le(cr44_0,i6820). +Le(cr44_0,i7040). +Le(cr44_0,i7260). +Le(cr44_0,i7480). +Le(cr44_0,i7700). +Le(cr44_0,i7920). +Le(cr44_0,i8140). +Le(cr44_0,i8360). +Le(cr44_0,i8580). +Eq(i1200,i1200). +Le(i1200,cr45_0). +Le(cr45_0,i1240). +Le(i-30,cr45_0). +Le(i0,cr45_0). +Le(i13,cr45_0). +Le(i26,cr45_0). +Le(i39,cr45_0). +Le(i52,cr45_0). +Le(i60,cr45_0). +Le(i65,cr45_0). +Le(i70,cr45_0). +Le(i78,cr45_0). +Le(i90,cr45_0). +Le(i91,cr45_0). +Le(i104,cr45_0). +Le(i117,cr45_0). +Le(i130,cr45_0). +Le(i143,cr45_0). +Le(i156,cr45_0). +Le(i169,cr45_0). +Le(i182,cr45_0). +Le(i195,cr45_0). +Le(i208,cr45_0). +Le(i221,cr45_0). +Le(i234,cr45_0). +Le(i247,cr45_0). +Le(i260,cr45_0). +Le(i460,cr45_0). +Le(i530,cr45_0). +Le(i600,cr45_0). +Le(i660,cr45_0). +Le(i670,cr45_0). +Le(i710,cr45_0). +Le(i740,cr45_0). +Le(i810,cr45_0). +Le(i850,cr45_0). +Le(i880,cr45_0). +Le(i890,cr45_0). +Le(i920,cr45_0). +Le(i960,cr45_0). +Le(i990,cr45_0). +Le(i1030,cr45_0). +Le(i1060,cr45_0). +Le(i1100,cr45_0). +Le(i1130,cr45_0). +Le(i1170,cr45_0). +Le(cr45_0,i1260). +Le(cr45_0,i1270). +Le(cr45_0,i1290). +Le(cr45_0,i1310). +Le(cr45_0,i1320). +Le(cr45_0,i1330). +Le(cr45_0,i1350). +Le(cr45_0,i1360). +Le(cr45_0,i1380). +Le(cr45_0,i1390). +Le(cr45_0,i1420). +Le(cr45_0,i1430). +Le(cr45_0,i1450). +Le(cr45_0,i1460). +Le(cr45_0,i1490). +Le(cr45_0,i1520). +Le(cr45_0,i1530). +Le(cr45_0,i1540). +Le(cr45_0,i1560). +Le(cr45_0,i1590). +Le(cr45_0,i1630). +Le(cr45_0,i1660). +Le(cr45_0,i1700). +Le(cr45_0,i1730). +Le(cr45_0,i1760). +Le(cr45_0,i1770). +Le(cr45_0,i1810). +Le(cr45_0,i1840). +Le(cr45_0,i1880). +Le(cr45_0,i1910). +Le(cr45_0,i1950). +Le(cr45_0,i1980). +Le(cr45_0,i2020). +Le(cr45_0,i2050). +Le(cr45_0,i2090). +Le(cr45_0,i2120). +Le(cr45_0,i2160). +Le(cr45_0,i2190). +Le(cr45_0,i2200). +Le(cr45_0,i2230). +Le(cr45_0,i2270). +Le(cr45_0,i2300). +Le(cr45_0,i2340). +Le(cr45_0,i2370). +Le(cr45_0,i2410). +Le(cr45_0,i2420). +Le(cr45_0,i2440). +Le(cr45_0,i2480). +Le(cr45_0,i2510). +Le(cr45_0,i2550). +Le(cr45_0,i2580). +Le(cr45_0,i2620). +Le(cr45_0,i2640). +Le(cr45_0,i2660). +Le(cr45_0,i2730). +Le(cr45_0,i2760). +Le(cr45_0,i2800). +Le(cr45_0,i2830). +Le(cr45_0,i2860). +Le(cr45_0,i2870). +Le(cr45_0,i2940). +Le(cr45_0,i2970). +Le(cr45_0,i3010). +Le(cr45_0,i3040). +Le(cr45_0,i3080). +Le(cr45_0,i3120). +Le(cr45_0,i3150). +Le(cr45_0,i3220). +Le(cr45_0,i3260). +Le(cr45_0,i3290). +Le(cr45_0,i3300). +Le(cr45_0,i3330). +Le(cr45_0,i3400). +Le(cr45_0,i3430). +Le(cr45_0,i3500). +Le(cr45_0,i3520). +Le(cr45_0,i3580). +Le(cr45_0,i3610). +Le(cr45_0,i3650). +Le(cr45_0,i3680). +Le(cr45_0,i3720). +Le(cr45_0,i3740). +Le(cr45_0,i3790). +Le(cr45_0,i3820). +Le(cr45_0,i3860). +Le(cr45_0,i3960). +Le(cr45_0,i4040). +Le(cr45_0,i4140). +Le(cr45_0,i4180). +Le(cr45_0,i4400). +Le(cr45_0,i4620). +Le(cr45_0,i4840). +Le(cr45_0,i5060). +Le(cr45_0,i5280). +Le(cr45_0,i5500). +Le(cr45_0,i5720). +Le(cr45_0,i5940). +Le(cr45_0,i6160). +Le(cr45_0,i6380). +Le(cr45_0,i6600). +Le(cr45_0,i6820). +Le(cr45_0,i7040). +Le(cr45_0,i7260). +Le(cr45_0,i7480). +Le(cr45_0,i7700). +Le(cr45_0,i7920). +Le(cr45_0,i8140). +Le(cr45_0,i8360). +Le(cr45_0,i8580). +Eq(i1240,i1240). +Le(i1240,cr46_0). +Le(cr46_0,i1260). +Le(i-30,cr46_0). +Le(i0,cr46_0). +Le(i13,cr46_0). +Le(i26,cr46_0). +Le(i39,cr46_0). +Le(i52,cr46_0). +Le(i60,cr46_0). +Le(i65,cr46_0). +Le(i70,cr46_0). +Le(i78,cr46_0). +Le(i90,cr46_0). +Le(i91,cr46_0). +Le(i104,cr46_0). +Le(i117,cr46_0). +Le(i130,cr46_0). +Le(i143,cr46_0). +Le(i156,cr46_0). +Le(i169,cr46_0). +Le(i182,cr46_0). +Le(i195,cr46_0). +Le(i208,cr46_0). +Le(i221,cr46_0). +Le(i234,cr46_0). +Le(i247,cr46_0). +Le(i260,cr46_0). +Le(i460,cr46_0). +Le(i530,cr46_0). +Le(i600,cr46_0). +Le(i660,cr46_0). +Le(i670,cr46_0). +Le(i710,cr46_0). +Le(i740,cr46_0). +Le(i810,cr46_0). +Le(i850,cr46_0). +Le(i880,cr46_0). +Le(i890,cr46_0). +Le(i920,cr46_0). +Le(i960,cr46_0). +Le(i990,cr46_0). +Le(i1030,cr46_0). +Le(i1060,cr46_0). +Le(i1100,cr46_0). +Le(i1130,cr46_0). +Le(i1170,cr46_0). +Le(i1200,cr46_0). +Le(cr46_0,i1270). +Le(cr46_0,i1290). +Le(cr46_0,i1310). +Le(cr46_0,i1320). +Le(cr46_0,i1330). +Le(cr46_0,i1350). +Le(cr46_0,i1360). +Le(cr46_0,i1380). +Le(cr46_0,i1390). +Le(cr46_0,i1420). +Le(cr46_0,i1430). +Le(cr46_0,i1450). +Le(cr46_0,i1460). +Le(cr46_0,i1490). +Le(cr46_0,i1520). +Le(cr46_0,i1530). +Le(cr46_0,i1540). +Le(cr46_0,i1560). +Le(cr46_0,i1590). +Le(cr46_0,i1630). +Le(cr46_0,i1660). +Le(cr46_0,i1700). +Le(cr46_0,i1730). +Le(cr46_0,i1760). +Le(cr46_0,i1770). +Le(cr46_0,i1810). +Le(cr46_0,i1840). +Le(cr46_0,i1880). +Le(cr46_0,i1910). +Le(cr46_0,i1950). +Le(cr46_0,i1980). +Le(cr46_0,i2020). +Le(cr46_0,i2050). +Le(cr46_0,i2090). +Le(cr46_0,i2120). +Le(cr46_0,i2160). +Le(cr46_0,i2190). +Le(cr46_0,i2200). +Le(cr46_0,i2230). +Le(cr46_0,i2270). +Le(cr46_0,i2300). +Le(cr46_0,i2340). +Le(cr46_0,i2370). +Le(cr46_0,i2410). +Le(cr46_0,i2420). +Le(cr46_0,i2440). +Le(cr46_0,i2480). +Le(cr46_0,i2510). +Le(cr46_0,i2550). +Le(cr46_0,i2580). +Le(cr46_0,i2620). +Le(cr46_0,i2640). +Le(cr46_0,i2660). +Le(cr46_0,i2730). +Le(cr46_0,i2760). +Le(cr46_0,i2800). +Le(cr46_0,i2830). +Le(cr46_0,i2860). +Le(cr46_0,i2870). +Le(cr46_0,i2940). +Le(cr46_0,i2970). +Le(cr46_0,i3010). +Le(cr46_0,i3040). +Le(cr46_0,i3080). +Le(cr46_0,i3120). +Le(cr46_0,i3150). +Le(cr46_0,i3220). +Le(cr46_0,i3260). +Le(cr46_0,i3290). +Le(cr46_0,i3300). +Le(cr46_0,i3330). +Le(cr46_0,i3400). +Le(cr46_0,i3430). +Le(cr46_0,i3500). +Le(cr46_0,i3520). +Le(cr46_0,i3580). +Le(cr46_0,i3610). +Le(cr46_0,i3650). +Le(cr46_0,i3680). +Le(cr46_0,i3720). +Le(cr46_0,i3740). +Le(cr46_0,i3790). +Le(cr46_0,i3820). +Le(cr46_0,i3860). +Le(cr46_0,i3960). +Le(cr46_0,i4040). +Le(cr46_0,i4140). +Le(cr46_0,i4180). +Le(cr46_0,i4400). +Le(cr46_0,i4620). +Le(cr46_0,i4840). +Le(cr46_0,i5060). +Le(cr46_0,i5280). +Le(cr46_0,i5500). +Le(cr46_0,i5720). +Le(cr46_0,i5940). +Le(cr46_0,i6160). +Le(cr46_0,i6380). +Le(cr46_0,i6600). +Le(cr46_0,i6820). +Le(cr46_0,i7040). +Le(cr46_0,i7260). +Le(cr46_0,i7480). +Le(cr46_0,i7700). +Le(cr46_0,i7920). +Le(cr46_0,i8140). +Le(cr46_0,i8360). +Le(cr46_0,i8580). +Eq(i1260,i1260). +Le(i1260,cr47_0). +Le(cr47_0,i1270). +Le(i-30,cr47_0). +Le(i0,cr47_0). +Le(i13,cr47_0). +Le(i26,cr47_0). +Le(i39,cr47_0). +Le(i52,cr47_0). +Le(i60,cr47_0). +Le(i65,cr47_0). +Le(i70,cr47_0). +Le(i78,cr47_0). +Le(i90,cr47_0). +Le(i91,cr47_0). +Le(i104,cr47_0). +Le(i117,cr47_0). +Le(i130,cr47_0). +Le(i143,cr47_0). +Le(i156,cr47_0). +Le(i169,cr47_0). +Le(i182,cr47_0). +Le(i195,cr47_0). +Le(i208,cr47_0). +Le(i221,cr47_0). +Le(i234,cr47_0). +Le(i247,cr47_0). +Le(i260,cr47_0). +Le(i460,cr47_0). +Le(i530,cr47_0). +Le(i600,cr47_0). +Le(i660,cr47_0). +Le(i670,cr47_0). +Le(i710,cr47_0). +Le(i740,cr47_0). +Le(i810,cr47_0). +Le(i850,cr47_0). +Le(i880,cr47_0). +Le(i890,cr47_0). +Le(i920,cr47_0). +Le(i960,cr47_0). +Le(i990,cr47_0). +Le(i1030,cr47_0). +Le(i1060,cr47_0). +Le(i1100,cr47_0). +Le(i1130,cr47_0). +Le(i1170,cr47_0). +Le(i1200,cr47_0). +Le(i1240,cr47_0). +Le(cr47_0,i1290). +Le(cr47_0,i1310). +Le(cr47_0,i1320). +Le(cr47_0,i1330). +Le(cr47_0,i1350). +Le(cr47_0,i1360). +Le(cr47_0,i1380). +Le(cr47_0,i1390). +Le(cr47_0,i1420). +Le(cr47_0,i1430). +Le(cr47_0,i1450). +Le(cr47_0,i1460). +Le(cr47_0,i1490). +Le(cr47_0,i1520). +Le(cr47_0,i1530). +Le(cr47_0,i1540). +Le(cr47_0,i1560). +Le(cr47_0,i1590). +Le(cr47_0,i1630). +Le(cr47_0,i1660). +Le(cr47_0,i1700). +Le(cr47_0,i1730). +Le(cr47_0,i1760). +Le(cr47_0,i1770). +Le(cr47_0,i1810). +Le(cr47_0,i1840). +Le(cr47_0,i1880). +Le(cr47_0,i1910). +Le(cr47_0,i1950). +Le(cr47_0,i1980). +Le(cr47_0,i2020). +Le(cr47_0,i2050). +Le(cr47_0,i2090). +Le(cr47_0,i2120). +Le(cr47_0,i2160). +Le(cr47_0,i2190). +Le(cr47_0,i2200). +Le(cr47_0,i2230). +Le(cr47_0,i2270). +Le(cr47_0,i2300). +Le(cr47_0,i2340). +Le(cr47_0,i2370). +Le(cr47_0,i2410). +Le(cr47_0,i2420). +Le(cr47_0,i2440). +Le(cr47_0,i2480). +Le(cr47_0,i2510). +Le(cr47_0,i2550). +Le(cr47_0,i2580). +Le(cr47_0,i2620). +Le(cr47_0,i2640). +Le(cr47_0,i2660). +Le(cr47_0,i2730). +Le(cr47_0,i2760). +Le(cr47_0,i2800). +Le(cr47_0,i2830). +Le(cr47_0,i2860). +Le(cr47_0,i2870). +Le(cr47_0,i2940). +Le(cr47_0,i2970). +Le(cr47_0,i3010). +Le(cr47_0,i3040). +Le(cr47_0,i3080). +Le(cr47_0,i3120). +Le(cr47_0,i3150). +Le(cr47_0,i3220). +Le(cr47_0,i3260). +Le(cr47_0,i3290). +Le(cr47_0,i3300). +Le(cr47_0,i3330). +Le(cr47_0,i3400). +Le(cr47_0,i3430). +Le(cr47_0,i3500). +Le(cr47_0,i3520). +Le(cr47_0,i3580). +Le(cr47_0,i3610). +Le(cr47_0,i3650). +Le(cr47_0,i3680). +Le(cr47_0,i3720). +Le(cr47_0,i3740). +Le(cr47_0,i3790). +Le(cr47_0,i3820). +Le(cr47_0,i3860). +Le(cr47_0,i3960). +Le(cr47_0,i4040). +Le(cr47_0,i4140). +Le(cr47_0,i4180). +Le(cr47_0,i4400). +Le(cr47_0,i4620). +Le(cr47_0,i4840). +Le(cr47_0,i5060). +Le(cr47_0,i5280). +Le(cr47_0,i5500). +Le(cr47_0,i5720). +Le(cr47_0,i5940). +Le(cr47_0,i6160). +Le(cr47_0,i6380). +Le(cr47_0,i6600). +Le(cr47_0,i6820). +Le(cr47_0,i7040). +Le(cr47_0,i7260). +Le(cr47_0,i7480). +Le(cr47_0,i7700). +Le(cr47_0,i7920). +Le(cr47_0,i8140). +Le(cr47_0,i8360). +Le(cr47_0,i8580). +Eq(i1270,i1270). +Le(i1270,cr48_0). +Le(cr48_0,i1290). +Le(i-30,cr48_0). +Le(i0,cr48_0). +Le(i13,cr48_0). +Le(i26,cr48_0). +Le(i39,cr48_0). +Le(i52,cr48_0). +Le(i60,cr48_0). +Le(i65,cr48_0). +Le(i70,cr48_0). +Le(i78,cr48_0). +Le(i90,cr48_0). +Le(i91,cr48_0). +Le(i104,cr48_0). +Le(i117,cr48_0). +Le(i130,cr48_0). +Le(i143,cr48_0). +Le(i156,cr48_0). +Le(i169,cr48_0). +Le(i182,cr48_0). +Le(i195,cr48_0). +Le(i208,cr48_0). +Le(i221,cr48_0). +Le(i234,cr48_0). +Le(i247,cr48_0). +Le(i260,cr48_0). +Le(i460,cr48_0). +Le(i530,cr48_0). +Le(i600,cr48_0). +Le(i660,cr48_0). +Le(i670,cr48_0). +Le(i710,cr48_0). +Le(i740,cr48_0). +Le(i810,cr48_0). +Le(i850,cr48_0). +Le(i880,cr48_0). +Le(i890,cr48_0). +Le(i920,cr48_0). +Le(i960,cr48_0). +Le(i990,cr48_0). +Le(i1030,cr48_0). +Le(i1060,cr48_0). +Le(i1100,cr48_0). +Le(i1130,cr48_0). +Le(i1170,cr48_0). +Le(i1200,cr48_0). +Le(i1240,cr48_0). +Le(i1260,cr48_0). +Le(cr48_0,i1310). +Le(cr48_0,i1320). +Le(cr48_0,i1330). +Le(cr48_0,i1350). +Le(cr48_0,i1360). +Le(cr48_0,i1380). +Le(cr48_0,i1390). +Le(cr48_0,i1420). +Le(cr48_0,i1430). +Le(cr48_0,i1450). +Le(cr48_0,i1460). +Le(cr48_0,i1490). +Le(cr48_0,i1520). +Le(cr48_0,i1530). +Le(cr48_0,i1540). +Le(cr48_0,i1560). +Le(cr48_0,i1590). +Le(cr48_0,i1630). +Le(cr48_0,i1660). +Le(cr48_0,i1700). +Le(cr48_0,i1730). +Le(cr48_0,i1760). +Le(cr48_0,i1770). +Le(cr48_0,i1810). +Le(cr48_0,i1840). +Le(cr48_0,i1880). +Le(cr48_0,i1910). +Le(cr48_0,i1950). +Le(cr48_0,i1980). +Le(cr48_0,i2020). +Le(cr48_0,i2050). +Le(cr48_0,i2090). +Le(cr48_0,i2120). +Le(cr48_0,i2160). +Le(cr48_0,i2190). +Le(cr48_0,i2200). +Le(cr48_0,i2230). +Le(cr48_0,i2270). +Le(cr48_0,i2300). +Le(cr48_0,i2340). +Le(cr48_0,i2370). +Le(cr48_0,i2410). +Le(cr48_0,i2420). +Le(cr48_0,i2440). +Le(cr48_0,i2480). +Le(cr48_0,i2510). +Le(cr48_0,i2550). +Le(cr48_0,i2580). +Le(cr48_0,i2620). +Le(cr48_0,i2640). +Le(cr48_0,i2660). +Le(cr48_0,i2730). +Le(cr48_0,i2760). +Le(cr48_0,i2800). +Le(cr48_0,i2830). +Le(cr48_0,i2860). +Le(cr48_0,i2870). +Le(cr48_0,i2940). +Le(cr48_0,i2970). +Le(cr48_0,i3010). +Le(cr48_0,i3040). +Le(cr48_0,i3080). +Le(cr48_0,i3120). +Le(cr48_0,i3150). +Le(cr48_0,i3220). +Le(cr48_0,i3260). +Le(cr48_0,i3290). +Le(cr48_0,i3300). +Le(cr48_0,i3330). +Le(cr48_0,i3400). +Le(cr48_0,i3430). +Le(cr48_0,i3500). +Le(cr48_0,i3520). +Le(cr48_0,i3580). +Le(cr48_0,i3610). +Le(cr48_0,i3650). +Le(cr48_0,i3680). +Le(cr48_0,i3720). +Le(cr48_0,i3740). +Le(cr48_0,i3790). +Le(cr48_0,i3820). +Le(cr48_0,i3860). +Le(cr48_0,i3960). +Le(cr48_0,i4040). +Le(cr48_0,i4140). +Le(cr48_0,i4180). +Le(cr48_0,i4400). +Le(cr48_0,i4620). +Le(cr48_0,i4840). +Le(cr48_0,i5060). +Le(cr48_0,i5280). +Le(cr48_0,i5500). +Le(cr48_0,i5720). +Le(cr48_0,i5940). +Le(cr48_0,i6160). +Le(cr48_0,i6380). +Le(cr48_0,i6600). +Le(cr48_0,i6820). +Le(cr48_0,i7040). +Le(cr48_0,i7260). +Le(cr48_0,i7480). +Le(cr48_0,i7700). +Le(cr48_0,i7920). +Le(cr48_0,i8140). +Le(cr48_0,i8360). +Le(cr48_0,i8580). +Eq(i1290,i1290). +Le(i1290,cr49_0). +Le(cr49_0,i1310). +Le(i-30,cr49_0). +Le(i0,cr49_0). +Le(i13,cr49_0). +Le(i26,cr49_0). +Le(i39,cr49_0). +Le(i52,cr49_0). +Le(i60,cr49_0). +Le(i65,cr49_0). +Le(i70,cr49_0). +Le(i78,cr49_0). +Le(i90,cr49_0). +Le(i91,cr49_0). +Le(i104,cr49_0). +Le(i117,cr49_0). +Le(i130,cr49_0). +Le(i143,cr49_0). +Le(i156,cr49_0). +Le(i169,cr49_0). +Le(i182,cr49_0). +Le(i195,cr49_0). +Le(i208,cr49_0). +Le(i221,cr49_0). +Le(i234,cr49_0). +Le(i247,cr49_0). +Le(i260,cr49_0). +Le(i460,cr49_0). +Le(i530,cr49_0). +Le(i600,cr49_0). +Le(i660,cr49_0). +Le(i670,cr49_0). +Le(i710,cr49_0). +Le(i740,cr49_0). +Le(i810,cr49_0). +Le(i850,cr49_0). +Le(i880,cr49_0). +Le(i890,cr49_0). +Le(i920,cr49_0). +Le(i960,cr49_0). +Le(i990,cr49_0). +Le(i1030,cr49_0). +Le(i1060,cr49_0). +Le(i1100,cr49_0). +Le(i1130,cr49_0). +Le(i1170,cr49_0). +Le(i1200,cr49_0). +Le(i1240,cr49_0). +Le(i1260,cr49_0). +Le(i1270,cr49_0). +Le(cr49_0,i1320). +Le(cr49_0,i1330). +Le(cr49_0,i1350). +Le(cr49_0,i1360). +Le(cr49_0,i1380). +Le(cr49_0,i1390). +Le(cr49_0,i1420). +Le(cr49_0,i1430). +Le(cr49_0,i1450). +Le(cr49_0,i1460). +Le(cr49_0,i1490). +Le(cr49_0,i1520). +Le(cr49_0,i1530). +Le(cr49_0,i1540). +Le(cr49_0,i1560). +Le(cr49_0,i1590). +Le(cr49_0,i1630). +Le(cr49_0,i1660). +Le(cr49_0,i1700). +Le(cr49_0,i1730). +Le(cr49_0,i1760). +Le(cr49_0,i1770). +Le(cr49_0,i1810). +Le(cr49_0,i1840). +Le(cr49_0,i1880). +Le(cr49_0,i1910). +Le(cr49_0,i1950). +Le(cr49_0,i1980). +Le(cr49_0,i2020). +Le(cr49_0,i2050). +Le(cr49_0,i2090). +Le(cr49_0,i2120). +Le(cr49_0,i2160). +Le(cr49_0,i2190). +Le(cr49_0,i2200). +Le(cr49_0,i2230). +Le(cr49_0,i2270). +Le(cr49_0,i2300). +Le(cr49_0,i2340). +Le(cr49_0,i2370). +Le(cr49_0,i2410). +Le(cr49_0,i2420). +Le(cr49_0,i2440). +Le(cr49_0,i2480). +Le(cr49_0,i2510). +Le(cr49_0,i2550). +Le(cr49_0,i2580). +Le(cr49_0,i2620). +Le(cr49_0,i2640). +Le(cr49_0,i2660). +Le(cr49_0,i2730). +Le(cr49_0,i2760). +Le(cr49_0,i2800). +Le(cr49_0,i2830). +Le(cr49_0,i2860). +Le(cr49_0,i2870). +Le(cr49_0,i2940). +Le(cr49_0,i2970). +Le(cr49_0,i3010). +Le(cr49_0,i3040). +Le(cr49_0,i3080). +Le(cr49_0,i3120). +Le(cr49_0,i3150). +Le(cr49_0,i3220). +Le(cr49_0,i3260). +Le(cr49_0,i3290). +Le(cr49_0,i3300). +Le(cr49_0,i3330). +Le(cr49_0,i3400). +Le(cr49_0,i3430). +Le(cr49_0,i3500). +Le(cr49_0,i3520). +Le(cr49_0,i3580). +Le(cr49_0,i3610). +Le(cr49_0,i3650). +Le(cr49_0,i3680). +Le(cr49_0,i3720). +Le(cr49_0,i3740). +Le(cr49_0,i3790). +Le(cr49_0,i3820). +Le(cr49_0,i3860). +Le(cr49_0,i3960). +Le(cr49_0,i4040). +Le(cr49_0,i4140). +Le(cr49_0,i4180). +Le(cr49_0,i4400). +Le(cr49_0,i4620). +Le(cr49_0,i4840). +Le(cr49_0,i5060). +Le(cr49_0,i5280). +Le(cr49_0,i5500). +Le(cr49_0,i5720). +Le(cr49_0,i5940). +Le(cr49_0,i6160). +Le(cr49_0,i6380). +Le(cr49_0,i6600). +Le(cr49_0,i6820). +Le(cr49_0,i7040). +Le(cr49_0,i7260). +Le(cr49_0,i7480). +Le(cr49_0,i7700). +Le(cr49_0,i7920). +Le(cr49_0,i8140). +Le(cr49_0,i8360). +Le(cr49_0,i8580). +Eq(i1310,i1310). +Le(i1310,cr50_0). +Le(cr50_0,i1320). +Le(i-30,cr50_0). +Le(i0,cr50_0). +Le(i13,cr50_0). +Le(i26,cr50_0). +Le(i39,cr50_0). +Le(i52,cr50_0). +Le(i60,cr50_0). +Le(i65,cr50_0). +Le(i70,cr50_0). +Le(i78,cr50_0). +Le(i90,cr50_0). +Le(i91,cr50_0). +Le(i104,cr50_0). +Le(i117,cr50_0). +Le(i130,cr50_0). +Le(i143,cr50_0). +Le(i156,cr50_0). +Le(i169,cr50_0). +Le(i182,cr50_0). +Le(i195,cr50_0). +Le(i208,cr50_0). +Le(i221,cr50_0). +Le(i234,cr50_0). +Le(i247,cr50_0). +Le(i260,cr50_0). +Le(i460,cr50_0). +Le(i530,cr50_0). +Le(i600,cr50_0). +Le(i660,cr50_0). +Le(i670,cr50_0). +Le(i710,cr50_0). +Le(i740,cr50_0). +Le(i810,cr50_0). +Le(i850,cr50_0). +Le(i880,cr50_0). +Le(i890,cr50_0). +Le(i920,cr50_0). +Le(i960,cr50_0). +Le(i990,cr50_0). +Le(i1030,cr50_0). +Le(i1060,cr50_0). +Le(i1100,cr50_0). +Le(i1130,cr50_0). +Le(i1170,cr50_0). +Le(i1200,cr50_0). +Le(i1240,cr50_0). +Le(i1260,cr50_0). +Le(i1270,cr50_0). +Le(i1290,cr50_0). +Le(cr50_0,i1330). +Le(cr50_0,i1350). +Le(cr50_0,i1360). +Le(cr50_0,i1380). +Le(cr50_0,i1390). +Le(cr50_0,i1420). +Le(cr50_0,i1430). +Le(cr50_0,i1450). +Le(cr50_0,i1460). +Le(cr50_0,i1490). +Le(cr50_0,i1520). +Le(cr50_0,i1530). +Le(cr50_0,i1540). +Le(cr50_0,i1560). +Le(cr50_0,i1590). +Le(cr50_0,i1630). +Le(cr50_0,i1660). +Le(cr50_0,i1700). +Le(cr50_0,i1730). +Le(cr50_0,i1760). +Le(cr50_0,i1770). +Le(cr50_0,i1810). +Le(cr50_0,i1840). +Le(cr50_0,i1880). +Le(cr50_0,i1910). +Le(cr50_0,i1950). +Le(cr50_0,i1980). +Le(cr50_0,i2020). +Le(cr50_0,i2050). +Le(cr50_0,i2090). +Le(cr50_0,i2120). +Le(cr50_0,i2160). +Le(cr50_0,i2190). +Le(cr50_0,i2200). +Le(cr50_0,i2230). +Le(cr50_0,i2270). +Le(cr50_0,i2300). +Le(cr50_0,i2340). +Le(cr50_0,i2370). +Le(cr50_0,i2410). +Le(cr50_0,i2420). +Le(cr50_0,i2440). +Le(cr50_0,i2480). +Le(cr50_0,i2510). +Le(cr50_0,i2550). +Le(cr50_0,i2580). +Le(cr50_0,i2620). +Le(cr50_0,i2640). +Le(cr50_0,i2660). +Le(cr50_0,i2730). +Le(cr50_0,i2760). +Le(cr50_0,i2800). +Le(cr50_0,i2830). +Le(cr50_0,i2860). +Le(cr50_0,i2870). +Le(cr50_0,i2940). +Le(cr50_0,i2970). +Le(cr50_0,i3010). +Le(cr50_0,i3040). +Le(cr50_0,i3080). +Le(cr50_0,i3120). +Le(cr50_0,i3150). +Le(cr50_0,i3220). +Le(cr50_0,i3260). +Le(cr50_0,i3290). +Le(cr50_0,i3300). +Le(cr50_0,i3330). +Le(cr50_0,i3400). +Le(cr50_0,i3430). +Le(cr50_0,i3500). +Le(cr50_0,i3520). +Le(cr50_0,i3580). +Le(cr50_0,i3610). +Le(cr50_0,i3650). +Le(cr50_0,i3680). +Le(cr50_0,i3720). +Le(cr50_0,i3740). +Le(cr50_0,i3790). +Le(cr50_0,i3820). +Le(cr50_0,i3860). +Le(cr50_0,i3960). +Le(cr50_0,i4040). +Le(cr50_0,i4140). +Le(cr50_0,i4180). +Le(cr50_0,i4400). +Le(cr50_0,i4620). +Le(cr50_0,i4840). +Le(cr50_0,i5060). +Le(cr50_0,i5280). +Le(cr50_0,i5500). +Le(cr50_0,i5720). +Le(cr50_0,i5940). +Le(cr50_0,i6160). +Le(cr50_0,i6380). +Le(cr50_0,i6600). +Le(cr50_0,i6820). +Le(cr50_0,i7040). +Le(cr50_0,i7260). +Le(cr50_0,i7480). +Le(cr50_0,i7700). +Le(cr50_0,i7920). +Le(cr50_0,i8140). +Le(cr50_0,i8360). +Le(cr50_0,i8580). +Eq(i1320,i1320). +Le(i1320,cr51_0). +Le(cr51_0,i1330). +Le(i-30,cr51_0). +Le(i0,cr51_0). +Le(i13,cr51_0). +Le(i26,cr51_0). +Le(i39,cr51_0). +Le(i52,cr51_0). +Le(i60,cr51_0). +Le(i65,cr51_0). +Le(i70,cr51_0). +Le(i78,cr51_0). +Le(i90,cr51_0). +Le(i91,cr51_0). +Le(i104,cr51_0). +Le(i117,cr51_0). +Le(i130,cr51_0). +Le(i143,cr51_0). +Le(i156,cr51_0). +Le(i169,cr51_0). +Le(i182,cr51_0). +Le(i195,cr51_0). +Le(i208,cr51_0). +Le(i221,cr51_0). +Le(i234,cr51_0). +Le(i247,cr51_0). +Le(i260,cr51_0). +Le(i460,cr51_0). +Le(i530,cr51_0). +Le(i600,cr51_0). +Le(i660,cr51_0). +Le(i670,cr51_0). +Le(i710,cr51_0). +Le(i740,cr51_0). +Le(i810,cr51_0). +Le(i850,cr51_0). +Le(i880,cr51_0). +Le(i890,cr51_0). +Le(i920,cr51_0). +Le(i960,cr51_0). +Le(i990,cr51_0). +Le(i1030,cr51_0). +Le(i1060,cr51_0). +Le(i1100,cr51_0). +Le(i1130,cr51_0). +Le(i1170,cr51_0). +Le(i1200,cr51_0). +Le(i1240,cr51_0). +Le(i1260,cr51_0). +Le(i1270,cr51_0). +Le(i1290,cr51_0). +Le(i1310,cr51_0). +Le(cr51_0,i1350). +Le(cr51_0,i1360). +Le(cr51_0,i1380). +Le(cr51_0,i1390). +Le(cr51_0,i1420). +Le(cr51_0,i1430). +Le(cr51_0,i1450). +Le(cr51_0,i1460). +Le(cr51_0,i1490). +Le(cr51_0,i1520). +Le(cr51_0,i1530). +Le(cr51_0,i1540). +Le(cr51_0,i1560). +Le(cr51_0,i1590). +Le(cr51_0,i1630). +Le(cr51_0,i1660). +Le(cr51_0,i1700). +Le(cr51_0,i1730). +Le(cr51_0,i1760). +Le(cr51_0,i1770). +Le(cr51_0,i1810). +Le(cr51_0,i1840). +Le(cr51_0,i1880). +Le(cr51_0,i1910). +Le(cr51_0,i1950). +Le(cr51_0,i1980). +Le(cr51_0,i2020). +Le(cr51_0,i2050). +Le(cr51_0,i2090). +Le(cr51_0,i2120). +Le(cr51_0,i2160). +Le(cr51_0,i2190). +Le(cr51_0,i2200). +Le(cr51_0,i2230). +Le(cr51_0,i2270). +Le(cr51_0,i2300). +Le(cr51_0,i2340). +Le(cr51_0,i2370). +Le(cr51_0,i2410). +Le(cr51_0,i2420). +Le(cr51_0,i2440). +Le(cr51_0,i2480). +Le(cr51_0,i2510). +Le(cr51_0,i2550). +Le(cr51_0,i2580). +Le(cr51_0,i2620). +Le(cr51_0,i2640). +Le(cr51_0,i2660). +Le(cr51_0,i2730). +Le(cr51_0,i2760). +Le(cr51_0,i2800). +Le(cr51_0,i2830). +Le(cr51_0,i2860). +Le(cr51_0,i2870). +Le(cr51_0,i2940). +Le(cr51_0,i2970). +Le(cr51_0,i3010). +Le(cr51_0,i3040). +Le(cr51_0,i3080). +Le(cr51_0,i3120). +Le(cr51_0,i3150). +Le(cr51_0,i3220). +Le(cr51_0,i3260). +Le(cr51_0,i3290). +Le(cr51_0,i3300). +Le(cr51_0,i3330). +Le(cr51_0,i3400). +Le(cr51_0,i3430). +Le(cr51_0,i3500). +Le(cr51_0,i3520). +Le(cr51_0,i3580). +Le(cr51_0,i3610). +Le(cr51_0,i3650). +Le(cr51_0,i3680). +Le(cr51_0,i3720). +Le(cr51_0,i3740). +Le(cr51_0,i3790). +Le(cr51_0,i3820). +Le(cr51_0,i3860). +Le(cr51_0,i3960). +Le(cr51_0,i4040). +Le(cr51_0,i4140). +Le(cr51_0,i4180). +Le(cr51_0,i4400). +Le(cr51_0,i4620). +Le(cr51_0,i4840). +Le(cr51_0,i5060). +Le(cr51_0,i5280). +Le(cr51_0,i5500). +Le(cr51_0,i5720). +Le(cr51_0,i5940). +Le(cr51_0,i6160). +Le(cr51_0,i6380). +Le(cr51_0,i6600). +Le(cr51_0,i6820). +Le(cr51_0,i7040). +Le(cr51_0,i7260). +Le(cr51_0,i7480). +Le(cr51_0,i7700). +Le(cr51_0,i7920). +Le(cr51_0,i8140). +Le(cr51_0,i8360). +Le(cr51_0,i8580). +Eq(i1330,i1330). +Le(i1330,cr52_0). +Le(cr52_0,i1350). +Le(i-30,cr52_0). +Le(i0,cr52_0). +Le(i13,cr52_0). +Le(i26,cr52_0). +Le(i39,cr52_0). +Le(i52,cr52_0). +Le(i60,cr52_0). +Le(i65,cr52_0). +Le(i70,cr52_0). +Le(i78,cr52_0). +Le(i90,cr52_0). +Le(i91,cr52_0). +Le(i104,cr52_0). +Le(i117,cr52_0). +Le(i130,cr52_0). +Le(i143,cr52_0). +Le(i156,cr52_0). +Le(i169,cr52_0). +Le(i182,cr52_0). +Le(i195,cr52_0). +Le(i208,cr52_0). +Le(i221,cr52_0). +Le(i234,cr52_0). +Le(i247,cr52_0). +Le(i260,cr52_0). +Le(i460,cr52_0). +Le(i530,cr52_0). +Le(i600,cr52_0). +Le(i660,cr52_0). +Le(i670,cr52_0). +Le(i710,cr52_0). +Le(i740,cr52_0). +Le(i810,cr52_0). +Le(i850,cr52_0). +Le(i880,cr52_0). +Le(i890,cr52_0). +Le(i920,cr52_0). +Le(i960,cr52_0). +Le(i990,cr52_0). +Le(i1030,cr52_0). +Le(i1060,cr52_0). +Le(i1100,cr52_0). +Le(i1130,cr52_0). +Le(i1170,cr52_0). +Le(i1200,cr52_0). +Le(i1240,cr52_0). +Le(i1260,cr52_0). +Le(i1270,cr52_0). +Le(i1290,cr52_0). +Le(i1310,cr52_0). +Le(i1320,cr52_0). +Le(cr52_0,i1360). +Le(cr52_0,i1380). +Le(cr52_0,i1390). +Le(cr52_0,i1420). +Le(cr52_0,i1430). +Le(cr52_0,i1450). +Le(cr52_0,i1460). +Le(cr52_0,i1490). +Le(cr52_0,i1520). +Le(cr52_0,i1530). +Le(cr52_0,i1540). +Le(cr52_0,i1560). +Le(cr52_0,i1590). +Le(cr52_0,i1630). +Le(cr52_0,i1660). +Le(cr52_0,i1700). +Le(cr52_0,i1730). +Le(cr52_0,i1760). +Le(cr52_0,i1770). +Le(cr52_0,i1810). +Le(cr52_0,i1840). +Le(cr52_0,i1880). +Le(cr52_0,i1910). +Le(cr52_0,i1950). +Le(cr52_0,i1980). +Le(cr52_0,i2020). +Le(cr52_0,i2050). +Le(cr52_0,i2090). +Le(cr52_0,i2120). +Le(cr52_0,i2160). +Le(cr52_0,i2190). +Le(cr52_0,i2200). +Le(cr52_0,i2230). +Le(cr52_0,i2270). +Le(cr52_0,i2300). +Le(cr52_0,i2340). +Le(cr52_0,i2370). +Le(cr52_0,i2410). +Le(cr52_0,i2420). +Le(cr52_0,i2440). +Le(cr52_0,i2480). +Le(cr52_0,i2510). +Le(cr52_0,i2550). +Le(cr52_0,i2580). +Le(cr52_0,i2620). +Le(cr52_0,i2640). +Le(cr52_0,i2660). +Le(cr52_0,i2730). +Le(cr52_0,i2760). +Le(cr52_0,i2800). +Le(cr52_0,i2830). +Le(cr52_0,i2860). +Le(cr52_0,i2870). +Le(cr52_0,i2940). +Le(cr52_0,i2970). +Le(cr52_0,i3010). +Le(cr52_0,i3040). +Le(cr52_0,i3080). +Le(cr52_0,i3120). +Le(cr52_0,i3150). +Le(cr52_0,i3220). +Le(cr52_0,i3260). +Le(cr52_0,i3290). +Le(cr52_0,i3300). +Le(cr52_0,i3330). +Le(cr52_0,i3400). +Le(cr52_0,i3430). +Le(cr52_0,i3500). +Le(cr52_0,i3520). +Le(cr52_0,i3580). +Le(cr52_0,i3610). +Le(cr52_0,i3650). +Le(cr52_0,i3680). +Le(cr52_0,i3720). +Le(cr52_0,i3740). +Le(cr52_0,i3790). +Le(cr52_0,i3820). +Le(cr52_0,i3860). +Le(cr52_0,i3960). +Le(cr52_0,i4040). +Le(cr52_0,i4140). +Le(cr52_0,i4180). +Le(cr52_0,i4400). +Le(cr52_0,i4620). +Le(cr52_0,i4840). +Le(cr52_0,i5060). +Le(cr52_0,i5280). +Le(cr52_0,i5500). +Le(cr52_0,i5720). +Le(cr52_0,i5940). +Le(cr52_0,i6160). +Le(cr52_0,i6380). +Le(cr52_0,i6600). +Le(cr52_0,i6820). +Le(cr52_0,i7040). +Le(cr52_0,i7260). +Le(cr52_0,i7480). +Le(cr52_0,i7700). +Le(cr52_0,i7920). +Le(cr52_0,i8140). +Le(cr52_0,i8360). +Le(cr52_0,i8580). +Eq(i1350,i1350). +Le(i1350,cr53_0). +Le(cr53_0,i1360). +Le(i-30,cr53_0). +Le(i0,cr53_0). +Le(i13,cr53_0). +Le(i26,cr53_0). +Le(i39,cr53_0). +Le(i52,cr53_0). +Le(i60,cr53_0). +Le(i65,cr53_0). +Le(i70,cr53_0). +Le(i78,cr53_0). +Le(i90,cr53_0). +Le(i91,cr53_0). +Le(i104,cr53_0). +Le(i117,cr53_0). +Le(i130,cr53_0). +Le(i143,cr53_0). +Le(i156,cr53_0). +Le(i169,cr53_0). +Le(i182,cr53_0). +Le(i195,cr53_0). +Le(i208,cr53_0). +Le(i221,cr53_0). +Le(i234,cr53_0). +Le(i247,cr53_0). +Le(i260,cr53_0). +Le(i460,cr53_0). +Le(i530,cr53_0). +Le(i600,cr53_0). +Le(i660,cr53_0). +Le(i670,cr53_0). +Le(i710,cr53_0). +Le(i740,cr53_0). +Le(i810,cr53_0). +Le(i850,cr53_0). +Le(i880,cr53_0). +Le(i890,cr53_0). +Le(i920,cr53_0). +Le(i960,cr53_0). +Le(i990,cr53_0). +Le(i1030,cr53_0). +Le(i1060,cr53_0). +Le(i1100,cr53_0). +Le(i1130,cr53_0). +Le(i1170,cr53_0). +Le(i1200,cr53_0). +Le(i1240,cr53_0). +Le(i1260,cr53_0). +Le(i1270,cr53_0). +Le(i1290,cr53_0). +Le(i1310,cr53_0). +Le(i1320,cr53_0). +Le(i1330,cr53_0). +Le(cr53_0,i1380). +Le(cr53_0,i1390). +Le(cr53_0,i1420). +Le(cr53_0,i1430). +Le(cr53_0,i1450). +Le(cr53_0,i1460). +Le(cr53_0,i1490). +Le(cr53_0,i1520). +Le(cr53_0,i1530). +Le(cr53_0,i1540). +Le(cr53_0,i1560). +Le(cr53_0,i1590). +Le(cr53_0,i1630). +Le(cr53_0,i1660). +Le(cr53_0,i1700). +Le(cr53_0,i1730). +Le(cr53_0,i1760). +Le(cr53_0,i1770). +Le(cr53_0,i1810). +Le(cr53_0,i1840). +Le(cr53_0,i1880). +Le(cr53_0,i1910). +Le(cr53_0,i1950). +Le(cr53_0,i1980). +Le(cr53_0,i2020). +Le(cr53_0,i2050). +Le(cr53_0,i2090). +Le(cr53_0,i2120). +Le(cr53_0,i2160). +Le(cr53_0,i2190). +Le(cr53_0,i2200). +Le(cr53_0,i2230). +Le(cr53_0,i2270). +Le(cr53_0,i2300). +Le(cr53_0,i2340). +Le(cr53_0,i2370). +Le(cr53_0,i2410). +Le(cr53_0,i2420). +Le(cr53_0,i2440). +Le(cr53_0,i2480). +Le(cr53_0,i2510). +Le(cr53_0,i2550). +Le(cr53_0,i2580). +Le(cr53_0,i2620). +Le(cr53_0,i2640). +Le(cr53_0,i2660). +Le(cr53_0,i2730). +Le(cr53_0,i2760). +Le(cr53_0,i2800). +Le(cr53_0,i2830). +Le(cr53_0,i2860). +Le(cr53_0,i2870). +Le(cr53_0,i2940). +Le(cr53_0,i2970). +Le(cr53_0,i3010). +Le(cr53_0,i3040). +Le(cr53_0,i3080). +Le(cr53_0,i3120). +Le(cr53_0,i3150). +Le(cr53_0,i3220). +Le(cr53_0,i3260). +Le(cr53_0,i3290). +Le(cr53_0,i3300). +Le(cr53_0,i3330). +Le(cr53_0,i3400). +Le(cr53_0,i3430). +Le(cr53_0,i3500). +Le(cr53_0,i3520). +Le(cr53_0,i3580). +Le(cr53_0,i3610). +Le(cr53_0,i3650). +Le(cr53_0,i3680). +Le(cr53_0,i3720). +Le(cr53_0,i3740). +Le(cr53_0,i3790). +Le(cr53_0,i3820). +Le(cr53_0,i3860). +Le(cr53_0,i3960). +Le(cr53_0,i4040). +Le(cr53_0,i4140). +Le(cr53_0,i4180). +Le(cr53_0,i4400). +Le(cr53_0,i4620). +Le(cr53_0,i4840). +Le(cr53_0,i5060). +Le(cr53_0,i5280). +Le(cr53_0,i5500). +Le(cr53_0,i5720). +Le(cr53_0,i5940). +Le(cr53_0,i6160). +Le(cr53_0,i6380). +Le(cr53_0,i6600). +Le(cr53_0,i6820). +Le(cr53_0,i7040). +Le(cr53_0,i7260). +Le(cr53_0,i7480). +Le(cr53_0,i7700). +Le(cr53_0,i7920). +Le(cr53_0,i8140). +Le(cr53_0,i8360). +Le(cr53_0,i8580). +Eq(i1360,i1360). +Le(i1360,cr54_0). +Le(cr54_0,i1380). +Le(i-30,cr54_0). +Le(i0,cr54_0). +Le(i13,cr54_0). +Le(i26,cr54_0). +Le(i39,cr54_0). +Le(i52,cr54_0). +Le(i60,cr54_0). +Le(i65,cr54_0). +Le(i70,cr54_0). +Le(i78,cr54_0). +Le(i90,cr54_0). +Le(i91,cr54_0). +Le(i104,cr54_0). +Le(i117,cr54_0). +Le(i130,cr54_0). +Le(i143,cr54_0). +Le(i156,cr54_0). +Le(i169,cr54_0). +Le(i182,cr54_0). +Le(i195,cr54_0). +Le(i208,cr54_0). +Le(i221,cr54_0). +Le(i234,cr54_0). +Le(i247,cr54_0). +Le(i260,cr54_0). +Le(i460,cr54_0). +Le(i530,cr54_0). +Le(i600,cr54_0). +Le(i660,cr54_0). +Le(i670,cr54_0). +Le(i710,cr54_0). +Le(i740,cr54_0). +Le(i810,cr54_0). +Le(i850,cr54_0). +Le(i880,cr54_0). +Le(i890,cr54_0). +Le(i920,cr54_0). +Le(i960,cr54_0). +Le(i990,cr54_0). +Le(i1030,cr54_0). +Le(i1060,cr54_0). +Le(i1100,cr54_0). +Le(i1130,cr54_0). +Le(i1170,cr54_0). +Le(i1200,cr54_0). +Le(i1240,cr54_0). +Le(i1260,cr54_0). +Le(i1270,cr54_0). +Le(i1290,cr54_0). +Le(i1310,cr54_0). +Le(i1320,cr54_0). +Le(i1330,cr54_0). +Le(i1350,cr54_0). +Le(cr54_0,i1390). +Le(cr54_0,i1420). +Le(cr54_0,i1430). +Le(cr54_0,i1450). +Le(cr54_0,i1460). +Le(cr54_0,i1490). +Le(cr54_0,i1520). +Le(cr54_0,i1530). +Le(cr54_0,i1540). +Le(cr54_0,i1560). +Le(cr54_0,i1590). +Le(cr54_0,i1630). +Le(cr54_0,i1660). +Le(cr54_0,i1700). +Le(cr54_0,i1730). +Le(cr54_0,i1760). +Le(cr54_0,i1770). +Le(cr54_0,i1810). +Le(cr54_0,i1840). +Le(cr54_0,i1880). +Le(cr54_0,i1910). +Le(cr54_0,i1950). +Le(cr54_0,i1980). +Le(cr54_0,i2020). +Le(cr54_0,i2050). +Le(cr54_0,i2090). +Le(cr54_0,i2120). +Le(cr54_0,i2160). +Le(cr54_0,i2190). +Le(cr54_0,i2200). +Le(cr54_0,i2230). +Le(cr54_0,i2270). +Le(cr54_0,i2300). +Le(cr54_0,i2340). +Le(cr54_0,i2370). +Le(cr54_0,i2410). +Le(cr54_0,i2420). +Le(cr54_0,i2440). +Le(cr54_0,i2480). +Le(cr54_0,i2510). +Le(cr54_0,i2550). +Le(cr54_0,i2580). +Le(cr54_0,i2620). +Le(cr54_0,i2640). +Le(cr54_0,i2660). +Le(cr54_0,i2730). +Le(cr54_0,i2760). +Le(cr54_0,i2800). +Le(cr54_0,i2830). +Le(cr54_0,i2860). +Le(cr54_0,i2870). +Le(cr54_0,i2940). +Le(cr54_0,i2970). +Le(cr54_0,i3010). +Le(cr54_0,i3040). +Le(cr54_0,i3080). +Le(cr54_0,i3120). +Le(cr54_0,i3150). +Le(cr54_0,i3220). +Le(cr54_0,i3260). +Le(cr54_0,i3290). +Le(cr54_0,i3300). +Le(cr54_0,i3330). +Le(cr54_0,i3400). +Le(cr54_0,i3430). +Le(cr54_0,i3500). +Le(cr54_0,i3520). +Le(cr54_0,i3580). +Le(cr54_0,i3610). +Le(cr54_0,i3650). +Le(cr54_0,i3680). +Le(cr54_0,i3720). +Le(cr54_0,i3740). +Le(cr54_0,i3790). +Le(cr54_0,i3820). +Le(cr54_0,i3860). +Le(cr54_0,i3960). +Le(cr54_0,i4040). +Le(cr54_0,i4140). +Le(cr54_0,i4180). +Le(cr54_0,i4400). +Le(cr54_0,i4620). +Le(cr54_0,i4840). +Le(cr54_0,i5060). +Le(cr54_0,i5280). +Le(cr54_0,i5500). +Le(cr54_0,i5720). +Le(cr54_0,i5940). +Le(cr54_0,i6160). +Le(cr54_0,i6380). +Le(cr54_0,i6600). +Le(cr54_0,i6820). +Le(cr54_0,i7040). +Le(cr54_0,i7260). +Le(cr54_0,i7480). +Le(cr54_0,i7700). +Le(cr54_0,i7920). +Le(cr54_0,i8140). +Le(cr54_0,i8360). +Le(cr54_0,i8580). +Eq(i1380,i1380). +Le(i1380,cr55_0). +Le(cr55_0,i1390). +Le(i-30,cr55_0). +Le(i0,cr55_0). +Le(i13,cr55_0). +Le(i26,cr55_0). +Le(i39,cr55_0). +Le(i52,cr55_0). +Le(i60,cr55_0). +Le(i65,cr55_0). +Le(i70,cr55_0). +Le(i78,cr55_0). +Le(i90,cr55_0). +Le(i91,cr55_0). +Le(i104,cr55_0). +Le(i117,cr55_0). +Le(i130,cr55_0). +Le(i143,cr55_0). +Le(i156,cr55_0). +Le(i169,cr55_0). +Le(i182,cr55_0). +Le(i195,cr55_0). +Le(i208,cr55_0). +Le(i221,cr55_0). +Le(i234,cr55_0). +Le(i247,cr55_0). +Le(i260,cr55_0). +Le(i460,cr55_0). +Le(i530,cr55_0). +Le(i600,cr55_0). +Le(i660,cr55_0). +Le(i670,cr55_0). +Le(i710,cr55_0). +Le(i740,cr55_0). +Le(i810,cr55_0). +Le(i850,cr55_0). +Le(i880,cr55_0). +Le(i890,cr55_0). +Le(i920,cr55_0). +Le(i960,cr55_0). +Le(i990,cr55_0). +Le(i1030,cr55_0). +Le(i1060,cr55_0). +Le(i1100,cr55_0). +Le(i1130,cr55_0). +Le(i1170,cr55_0). +Le(i1200,cr55_0). +Le(i1240,cr55_0). +Le(i1260,cr55_0). +Le(i1270,cr55_0). +Le(i1290,cr55_0). +Le(i1310,cr55_0). +Le(i1320,cr55_0). +Le(i1330,cr55_0). +Le(i1350,cr55_0). +Le(i1360,cr55_0). +Le(cr55_0,i1420). +Le(cr55_0,i1430). +Le(cr55_0,i1450). +Le(cr55_0,i1460). +Le(cr55_0,i1490). +Le(cr55_0,i1520). +Le(cr55_0,i1530). +Le(cr55_0,i1540). +Le(cr55_0,i1560). +Le(cr55_0,i1590). +Le(cr55_0,i1630). +Le(cr55_0,i1660). +Le(cr55_0,i1700). +Le(cr55_0,i1730). +Le(cr55_0,i1760). +Le(cr55_0,i1770). +Le(cr55_0,i1810). +Le(cr55_0,i1840). +Le(cr55_0,i1880). +Le(cr55_0,i1910). +Le(cr55_0,i1950). +Le(cr55_0,i1980). +Le(cr55_0,i2020). +Le(cr55_0,i2050). +Le(cr55_0,i2090). +Le(cr55_0,i2120). +Le(cr55_0,i2160). +Le(cr55_0,i2190). +Le(cr55_0,i2200). +Le(cr55_0,i2230). +Le(cr55_0,i2270). +Le(cr55_0,i2300). +Le(cr55_0,i2340). +Le(cr55_0,i2370). +Le(cr55_0,i2410). +Le(cr55_0,i2420). +Le(cr55_0,i2440). +Le(cr55_0,i2480). +Le(cr55_0,i2510). +Le(cr55_0,i2550). +Le(cr55_0,i2580). +Le(cr55_0,i2620). +Le(cr55_0,i2640). +Le(cr55_0,i2660). +Le(cr55_0,i2730). +Le(cr55_0,i2760). +Le(cr55_0,i2800). +Le(cr55_0,i2830). +Le(cr55_0,i2860). +Le(cr55_0,i2870). +Le(cr55_0,i2940). +Le(cr55_0,i2970). +Le(cr55_0,i3010). +Le(cr55_0,i3040). +Le(cr55_0,i3080). +Le(cr55_0,i3120). +Le(cr55_0,i3150). +Le(cr55_0,i3220). +Le(cr55_0,i3260). +Le(cr55_0,i3290). +Le(cr55_0,i3300). +Le(cr55_0,i3330). +Le(cr55_0,i3400). +Le(cr55_0,i3430). +Le(cr55_0,i3500). +Le(cr55_0,i3520). +Le(cr55_0,i3580). +Le(cr55_0,i3610). +Le(cr55_0,i3650). +Le(cr55_0,i3680). +Le(cr55_0,i3720). +Le(cr55_0,i3740). +Le(cr55_0,i3790). +Le(cr55_0,i3820). +Le(cr55_0,i3860). +Le(cr55_0,i3960). +Le(cr55_0,i4040). +Le(cr55_0,i4140). +Le(cr55_0,i4180). +Le(cr55_0,i4400). +Le(cr55_0,i4620). +Le(cr55_0,i4840). +Le(cr55_0,i5060). +Le(cr55_0,i5280). +Le(cr55_0,i5500). +Le(cr55_0,i5720). +Le(cr55_0,i5940). +Le(cr55_0,i6160). +Le(cr55_0,i6380). +Le(cr55_0,i6600). +Le(cr55_0,i6820). +Le(cr55_0,i7040). +Le(cr55_0,i7260). +Le(cr55_0,i7480). +Le(cr55_0,i7700). +Le(cr55_0,i7920). +Le(cr55_0,i8140). +Le(cr55_0,i8360). +Le(cr55_0,i8580). +Eq(i1390,i1390). +Le(i1390,cr56_0). +Le(cr56_0,i1420). +Le(i-30,cr56_0). +Le(i0,cr56_0). +Le(i13,cr56_0). +Le(i26,cr56_0). +Le(i39,cr56_0). +Le(i52,cr56_0). +Le(i60,cr56_0). +Le(i65,cr56_0). +Le(i70,cr56_0). +Le(i78,cr56_0). +Le(i90,cr56_0). +Le(i91,cr56_0). +Le(i104,cr56_0). +Le(i117,cr56_0). +Le(i130,cr56_0). +Le(i143,cr56_0). +Le(i156,cr56_0). +Le(i169,cr56_0). +Le(i182,cr56_0). +Le(i195,cr56_0). +Le(i208,cr56_0). +Le(i221,cr56_0). +Le(i234,cr56_0). +Le(i247,cr56_0). +Le(i260,cr56_0). +Le(i460,cr56_0). +Le(i530,cr56_0). +Le(i600,cr56_0). +Le(i660,cr56_0). +Le(i670,cr56_0). +Le(i710,cr56_0). +Le(i740,cr56_0). +Le(i810,cr56_0). +Le(i850,cr56_0). +Le(i880,cr56_0). +Le(i890,cr56_0). +Le(i920,cr56_0). +Le(i960,cr56_0). +Le(i990,cr56_0). +Le(i1030,cr56_0). +Le(i1060,cr56_0). +Le(i1100,cr56_0). +Le(i1130,cr56_0). +Le(i1170,cr56_0). +Le(i1200,cr56_0). +Le(i1240,cr56_0). +Le(i1260,cr56_0). +Le(i1270,cr56_0). +Le(i1290,cr56_0). +Le(i1310,cr56_0). +Le(i1320,cr56_0). +Le(i1330,cr56_0). +Le(i1350,cr56_0). +Le(i1360,cr56_0). +Le(i1380,cr56_0). +Le(cr56_0,i1430). +Le(cr56_0,i1450). +Le(cr56_0,i1460). +Le(cr56_0,i1490). +Le(cr56_0,i1520). +Le(cr56_0,i1530). +Le(cr56_0,i1540). +Le(cr56_0,i1560). +Le(cr56_0,i1590). +Le(cr56_0,i1630). +Le(cr56_0,i1660). +Le(cr56_0,i1700). +Le(cr56_0,i1730). +Le(cr56_0,i1760). +Le(cr56_0,i1770). +Le(cr56_0,i1810). +Le(cr56_0,i1840). +Le(cr56_0,i1880). +Le(cr56_0,i1910). +Le(cr56_0,i1950). +Le(cr56_0,i1980). +Le(cr56_0,i2020). +Le(cr56_0,i2050). +Le(cr56_0,i2090). +Le(cr56_0,i2120). +Le(cr56_0,i2160). +Le(cr56_0,i2190). +Le(cr56_0,i2200). +Le(cr56_0,i2230). +Le(cr56_0,i2270). +Le(cr56_0,i2300). +Le(cr56_0,i2340). +Le(cr56_0,i2370). +Le(cr56_0,i2410). +Le(cr56_0,i2420). +Le(cr56_0,i2440). +Le(cr56_0,i2480). +Le(cr56_0,i2510). +Le(cr56_0,i2550). +Le(cr56_0,i2580). +Le(cr56_0,i2620). +Le(cr56_0,i2640). +Le(cr56_0,i2660). +Le(cr56_0,i2730). +Le(cr56_0,i2760). +Le(cr56_0,i2800). +Le(cr56_0,i2830). +Le(cr56_0,i2860). +Le(cr56_0,i2870). +Le(cr56_0,i2940). +Le(cr56_0,i2970). +Le(cr56_0,i3010). +Le(cr56_0,i3040). +Le(cr56_0,i3080). +Le(cr56_0,i3120). +Le(cr56_0,i3150). +Le(cr56_0,i3220). +Le(cr56_0,i3260). +Le(cr56_0,i3290). +Le(cr56_0,i3300). +Le(cr56_0,i3330). +Le(cr56_0,i3400). +Le(cr56_0,i3430). +Le(cr56_0,i3500). +Le(cr56_0,i3520). +Le(cr56_0,i3580). +Le(cr56_0,i3610). +Le(cr56_0,i3650). +Le(cr56_0,i3680). +Le(cr56_0,i3720). +Le(cr56_0,i3740). +Le(cr56_0,i3790). +Le(cr56_0,i3820). +Le(cr56_0,i3860). +Le(cr56_0,i3960). +Le(cr56_0,i4040). +Le(cr56_0,i4140). +Le(cr56_0,i4180). +Le(cr56_0,i4400). +Le(cr56_0,i4620). +Le(cr56_0,i4840). +Le(cr56_0,i5060). +Le(cr56_0,i5280). +Le(cr56_0,i5500). +Le(cr56_0,i5720). +Le(cr56_0,i5940). +Le(cr56_0,i6160). +Le(cr56_0,i6380). +Le(cr56_0,i6600). +Le(cr56_0,i6820). +Le(cr56_0,i7040). +Le(cr56_0,i7260). +Le(cr56_0,i7480). +Le(cr56_0,i7700). +Le(cr56_0,i7920). +Le(cr56_0,i8140). +Le(cr56_0,i8360). +Le(cr56_0,i8580). +Eq(i1420,i1420). +Le(i1420,cr57_0). +Le(cr57_0,i1430). +Le(i-30,cr57_0). +Le(i0,cr57_0). +Le(i13,cr57_0). +Le(i26,cr57_0). +Le(i39,cr57_0). +Le(i52,cr57_0). +Le(i60,cr57_0). +Le(i65,cr57_0). +Le(i70,cr57_0). +Le(i78,cr57_0). +Le(i90,cr57_0). +Le(i91,cr57_0). +Le(i104,cr57_0). +Le(i117,cr57_0). +Le(i130,cr57_0). +Le(i143,cr57_0). +Le(i156,cr57_0). +Le(i169,cr57_0). +Le(i182,cr57_0). +Le(i195,cr57_0). +Le(i208,cr57_0). +Le(i221,cr57_0). +Le(i234,cr57_0). +Le(i247,cr57_0). +Le(i260,cr57_0). +Le(i460,cr57_0). +Le(i530,cr57_0). +Le(i600,cr57_0). +Le(i660,cr57_0). +Le(i670,cr57_0). +Le(i710,cr57_0). +Le(i740,cr57_0). +Le(i810,cr57_0). +Le(i850,cr57_0). +Le(i880,cr57_0). +Le(i890,cr57_0). +Le(i920,cr57_0). +Le(i960,cr57_0). +Le(i990,cr57_0). +Le(i1030,cr57_0). +Le(i1060,cr57_0). +Le(i1100,cr57_0). +Le(i1130,cr57_0). +Le(i1170,cr57_0). +Le(i1200,cr57_0). +Le(i1240,cr57_0). +Le(i1260,cr57_0). +Le(i1270,cr57_0). +Le(i1290,cr57_0). +Le(i1310,cr57_0). +Le(i1320,cr57_0). +Le(i1330,cr57_0). +Le(i1350,cr57_0). +Le(i1360,cr57_0). +Le(i1380,cr57_0). +Le(i1390,cr57_0). +Le(cr57_0,i1450). +Le(cr57_0,i1460). +Le(cr57_0,i1490). +Le(cr57_0,i1520). +Le(cr57_0,i1530). +Le(cr57_0,i1540). +Le(cr57_0,i1560). +Le(cr57_0,i1590). +Le(cr57_0,i1630). +Le(cr57_0,i1660). +Le(cr57_0,i1700). +Le(cr57_0,i1730). +Le(cr57_0,i1760). +Le(cr57_0,i1770). +Le(cr57_0,i1810). +Le(cr57_0,i1840). +Le(cr57_0,i1880). +Le(cr57_0,i1910). +Le(cr57_0,i1950). +Le(cr57_0,i1980). +Le(cr57_0,i2020). +Le(cr57_0,i2050). +Le(cr57_0,i2090). +Le(cr57_0,i2120). +Le(cr57_0,i2160). +Le(cr57_0,i2190). +Le(cr57_0,i2200). +Le(cr57_0,i2230). +Le(cr57_0,i2270). +Le(cr57_0,i2300). +Le(cr57_0,i2340). +Le(cr57_0,i2370). +Le(cr57_0,i2410). +Le(cr57_0,i2420). +Le(cr57_0,i2440). +Le(cr57_0,i2480). +Le(cr57_0,i2510). +Le(cr57_0,i2550). +Le(cr57_0,i2580). +Le(cr57_0,i2620). +Le(cr57_0,i2640). +Le(cr57_0,i2660). +Le(cr57_0,i2730). +Le(cr57_0,i2760). +Le(cr57_0,i2800). +Le(cr57_0,i2830). +Le(cr57_0,i2860). +Le(cr57_0,i2870). +Le(cr57_0,i2940). +Le(cr57_0,i2970). +Le(cr57_0,i3010). +Le(cr57_0,i3040). +Le(cr57_0,i3080). +Le(cr57_0,i3120). +Le(cr57_0,i3150). +Le(cr57_0,i3220). +Le(cr57_0,i3260). +Le(cr57_0,i3290). +Le(cr57_0,i3300). +Le(cr57_0,i3330). +Le(cr57_0,i3400). +Le(cr57_0,i3430). +Le(cr57_0,i3500). +Le(cr57_0,i3520). +Le(cr57_0,i3580). +Le(cr57_0,i3610). +Le(cr57_0,i3650). +Le(cr57_0,i3680). +Le(cr57_0,i3720). +Le(cr57_0,i3740). +Le(cr57_0,i3790). +Le(cr57_0,i3820). +Le(cr57_0,i3860). +Le(cr57_0,i3960). +Le(cr57_0,i4040). +Le(cr57_0,i4140). +Le(cr57_0,i4180). +Le(cr57_0,i4400). +Le(cr57_0,i4620). +Le(cr57_0,i4840). +Le(cr57_0,i5060). +Le(cr57_0,i5280). +Le(cr57_0,i5500). +Le(cr57_0,i5720). +Le(cr57_0,i5940). +Le(cr57_0,i6160). +Le(cr57_0,i6380). +Le(cr57_0,i6600). +Le(cr57_0,i6820). +Le(cr57_0,i7040). +Le(cr57_0,i7260). +Le(cr57_0,i7480). +Le(cr57_0,i7700). +Le(cr57_0,i7920). +Le(cr57_0,i8140). +Le(cr57_0,i8360). +Le(cr57_0,i8580). +Eq(i1430,i1430). +Le(i1430,cr58_0). +Le(cr58_0,i1450). +Le(i-30,cr58_0). +Le(i0,cr58_0). +Le(i13,cr58_0). +Le(i26,cr58_0). +Le(i39,cr58_0). +Le(i52,cr58_0). +Le(i60,cr58_0). +Le(i65,cr58_0). +Le(i70,cr58_0). +Le(i78,cr58_0). +Le(i90,cr58_0). +Le(i91,cr58_0). +Le(i104,cr58_0). +Le(i117,cr58_0). +Le(i130,cr58_0). +Le(i143,cr58_0). +Le(i156,cr58_0). +Le(i169,cr58_0). +Le(i182,cr58_0). +Le(i195,cr58_0). +Le(i208,cr58_0). +Le(i221,cr58_0). +Le(i234,cr58_0). +Le(i247,cr58_0). +Le(i260,cr58_0). +Le(i460,cr58_0). +Le(i530,cr58_0). +Le(i600,cr58_0). +Le(i660,cr58_0). +Le(i670,cr58_0). +Le(i710,cr58_0). +Le(i740,cr58_0). +Le(i810,cr58_0). +Le(i850,cr58_0). +Le(i880,cr58_0). +Le(i890,cr58_0). +Le(i920,cr58_0). +Le(i960,cr58_0). +Le(i990,cr58_0). +Le(i1030,cr58_0). +Le(i1060,cr58_0). +Le(i1100,cr58_0). +Le(i1130,cr58_0). +Le(i1170,cr58_0). +Le(i1200,cr58_0). +Le(i1240,cr58_0). +Le(i1260,cr58_0). +Le(i1270,cr58_0). +Le(i1290,cr58_0). +Le(i1310,cr58_0). +Le(i1320,cr58_0). +Le(i1330,cr58_0). +Le(i1350,cr58_0). +Le(i1360,cr58_0). +Le(i1380,cr58_0). +Le(i1390,cr58_0). +Le(i1420,cr58_0). +Le(cr58_0,i1460). +Le(cr58_0,i1490). +Le(cr58_0,i1520). +Le(cr58_0,i1530). +Le(cr58_0,i1540). +Le(cr58_0,i1560). +Le(cr58_0,i1590). +Le(cr58_0,i1630). +Le(cr58_0,i1660). +Le(cr58_0,i1700). +Le(cr58_0,i1730). +Le(cr58_0,i1760). +Le(cr58_0,i1770). +Le(cr58_0,i1810). +Le(cr58_0,i1840). +Le(cr58_0,i1880). +Le(cr58_0,i1910). +Le(cr58_0,i1950). +Le(cr58_0,i1980). +Le(cr58_0,i2020). +Le(cr58_0,i2050). +Le(cr58_0,i2090). +Le(cr58_0,i2120). +Le(cr58_0,i2160). +Le(cr58_0,i2190). +Le(cr58_0,i2200). +Le(cr58_0,i2230). +Le(cr58_0,i2270). +Le(cr58_0,i2300). +Le(cr58_0,i2340). +Le(cr58_0,i2370). +Le(cr58_0,i2410). +Le(cr58_0,i2420). +Le(cr58_0,i2440). +Le(cr58_0,i2480). +Le(cr58_0,i2510). +Le(cr58_0,i2550). +Le(cr58_0,i2580). +Le(cr58_0,i2620). +Le(cr58_0,i2640). +Le(cr58_0,i2660). +Le(cr58_0,i2730). +Le(cr58_0,i2760). +Le(cr58_0,i2800). +Le(cr58_0,i2830). +Le(cr58_0,i2860). +Le(cr58_0,i2870). +Le(cr58_0,i2940). +Le(cr58_0,i2970). +Le(cr58_0,i3010). +Le(cr58_0,i3040). +Le(cr58_0,i3080). +Le(cr58_0,i3120). +Le(cr58_0,i3150). +Le(cr58_0,i3220). +Le(cr58_0,i3260). +Le(cr58_0,i3290). +Le(cr58_0,i3300). +Le(cr58_0,i3330). +Le(cr58_0,i3400). +Le(cr58_0,i3430). +Le(cr58_0,i3500). +Le(cr58_0,i3520). +Le(cr58_0,i3580). +Le(cr58_0,i3610). +Le(cr58_0,i3650). +Le(cr58_0,i3680). +Le(cr58_0,i3720). +Le(cr58_0,i3740). +Le(cr58_0,i3790). +Le(cr58_0,i3820). +Le(cr58_0,i3860). +Le(cr58_0,i3960). +Le(cr58_0,i4040). +Le(cr58_0,i4140). +Le(cr58_0,i4180). +Le(cr58_0,i4400). +Le(cr58_0,i4620). +Le(cr58_0,i4840). +Le(cr58_0,i5060). +Le(cr58_0,i5280). +Le(cr58_0,i5500). +Le(cr58_0,i5720). +Le(cr58_0,i5940). +Le(cr58_0,i6160). +Le(cr58_0,i6380). +Le(cr58_0,i6600). +Le(cr58_0,i6820). +Le(cr58_0,i7040). +Le(cr58_0,i7260). +Le(cr58_0,i7480). +Le(cr58_0,i7700). +Le(cr58_0,i7920). +Le(cr58_0,i8140). +Le(cr58_0,i8360). +Le(cr58_0,i8580). +Eq(i1450,i1450). +Le(i1450,cr59_0). +Le(cr59_0,i1460). +Le(i-30,cr59_0). +Le(i0,cr59_0). +Le(i13,cr59_0). +Le(i26,cr59_0). +Le(i39,cr59_0). +Le(i52,cr59_0). +Le(i60,cr59_0). +Le(i65,cr59_0). +Le(i70,cr59_0). +Le(i78,cr59_0). +Le(i90,cr59_0). +Le(i91,cr59_0). +Le(i104,cr59_0). +Le(i117,cr59_0). +Le(i130,cr59_0). +Le(i143,cr59_0). +Le(i156,cr59_0). +Le(i169,cr59_0). +Le(i182,cr59_0). +Le(i195,cr59_0). +Le(i208,cr59_0). +Le(i221,cr59_0). +Le(i234,cr59_0). +Le(i247,cr59_0). +Le(i260,cr59_0). +Le(i460,cr59_0). +Le(i530,cr59_0). +Le(i600,cr59_0). +Le(i660,cr59_0). +Le(i670,cr59_0). +Le(i710,cr59_0). +Le(i740,cr59_0). +Le(i810,cr59_0). +Le(i850,cr59_0). +Le(i880,cr59_0). +Le(i890,cr59_0). +Le(i920,cr59_0). +Le(i960,cr59_0). +Le(i990,cr59_0). +Le(i1030,cr59_0). +Le(i1060,cr59_0). +Le(i1100,cr59_0). +Le(i1130,cr59_0). +Le(i1170,cr59_0). +Le(i1200,cr59_0). +Le(i1240,cr59_0). +Le(i1260,cr59_0). +Le(i1270,cr59_0). +Le(i1290,cr59_0). +Le(i1310,cr59_0). +Le(i1320,cr59_0). +Le(i1330,cr59_0). +Le(i1350,cr59_0). +Le(i1360,cr59_0). +Le(i1380,cr59_0). +Le(i1390,cr59_0). +Le(i1420,cr59_0). +Le(i1430,cr59_0). +Le(cr59_0,i1490). +Le(cr59_0,i1520). +Le(cr59_0,i1530). +Le(cr59_0,i1540). +Le(cr59_0,i1560). +Le(cr59_0,i1590). +Le(cr59_0,i1630). +Le(cr59_0,i1660). +Le(cr59_0,i1700). +Le(cr59_0,i1730). +Le(cr59_0,i1760). +Le(cr59_0,i1770). +Le(cr59_0,i1810). +Le(cr59_0,i1840). +Le(cr59_0,i1880). +Le(cr59_0,i1910). +Le(cr59_0,i1950). +Le(cr59_0,i1980). +Le(cr59_0,i2020). +Le(cr59_0,i2050). +Le(cr59_0,i2090). +Le(cr59_0,i2120). +Le(cr59_0,i2160). +Le(cr59_0,i2190). +Le(cr59_0,i2200). +Le(cr59_0,i2230). +Le(cr59_0,i2270). +Le(cr59_0,i2300). +Le(cr59_0,i2340). +Le(cr59_0,i2370). +Le(cr59_0,i2410). +Le(cr59_0,i2420). +Le(cr59_0,i2440). +Le(cr59_0,i2480). +Le(cr59_0,i2510). +Le(cr59_0,i2550). +Le(cr59_0,i2580). +Le(cr59_0,i2620). +Le(cr59_0,i2640). +Le(cr59_0,i2660). +Le(cr59_0,i2730). +Le(cr59_0,i2760). +Le(cr59_0,i2800). +Le(cr59_0,i2830). +Le(cr59_0,i2860). +Le(cr59_0,i2870). +Le(cr59_0,i2940). +Le(cr59_0,i2970). +Le(cr59_0,i3010). +Le(cr59_0,i3040). +Le(cr59_0,i3080). +Le(cr59_0,i3120). +Le(cr59_0,i3150). +Le(cr59_0,i3220). +Le(cr59_0,i3260). +Le(cr59_0,i3290). +Le(cr59_0,i3300). +Le(cr59_0,i3330). +Le(cr59_0,i3400). +Le(cr59_0,i3430). +Le(cr59_0,i3500). +Le(cr59_0,i3520). +Le(cr59_0,i3580). +Le(cr59_0,i3610). +Le(cr59_0,i3650). +Le(cr59_0,i3680). +Le(cr59_0,i3720). +Le(cr59_0,i3740). +Le(cr59_0,i3790). +Le(cr59_0,i3820). +Le(cr59_0,i3860). +Le(cr59_0,i3960). +Le(cr59_0,i4040). +Le(cr59_0,i4140). +Le(cr59_0,i4180). +Le(cr59_0,i4400). +Le(cr59_0,i4620). +Le(cr59_0,i4840). +Le(cr59_0,i5060). +Le(cr59_0,i5280). +Le(cr59_0,i5500). +Le(cr59_0,i5720). +Le(cr59_0,i5940). +Le(cr59_0,i6160). +Le(cr59_0,i6380). +Le(cr59_0,i6600). +Le(cr59_0,i6820). +Le(cr59_0,i7040). +Le(cr59_0,i7260). +Le(cr59_0,i7480). +Le(cr59_0,i7700). +Le(cr59_0,i7920). +Le(cr59_0,i8140). +Le(cr59_0,i8360). +Le(cr59_0,i8580). +Eq(i1460,i1460). +Le(i1460,cr60_0). +Le(cr60_0,i1490). +Le(i-30,cr60_0). +Le(i0,cr60_0). +Le(i13,cr60_0). +Le(i26,cr60_0). +Le(i39,cr60_0). +Le(i52,cr60_0). +Le(i60,cr60_0). +Le(i65,cr60_0). +Le(i70,cr60_0). +Le(i78,cr60_0). +Le(i90,cr60_0). +Le(i91,cr60_0). +Le(i104,cr60_0). +Le(i117,cr60_0). +Le(i130,cr60_0). +Le(i143,cr60_0). +Le(i156,cr60_0). +Le(i169,cr60_0). +Le(i182,cr60_0). +Le(i195,cr60_0). +Le(i208,cr60_0). +Le(i221,cr60_0). +Le(i234,cr60_0). +Le(i247,cr60_0). +Le(i260,cr60_0). +Le(i460,cr60_0). +Le(i530,cr60_0). +Le(i600,cr60_0). +Le(i660,cr60_0). +Le(i670,cr60_0). +Le(i710,cr60_0). +Le(i740,cr60_0). +Le(i810,cr60_0). +Le(i850,cr60_0). +Le(i880,cr60_0). +Le(i890,cr60_0). +Le(i920,cr60_0). +Le(i960,cr60_0). +Le(i990,cr60_0). +Le(i1030,cr60_0). +Le(i1060,cr60_0). +Le(i1100,cr60_0). +Le(i1130,cr60_0). +Le(i1170,cr60_0). +Le(i1200,cr60_0). +Le(i1240,cr60_0). +Le(i1260,cr60_0). +Le(i1270,cr60_0). +Le(i1290,cr60_0). +Le(i1310,cr60_0). +Le(i1320,cr60_0). +Le(i1330,cr60_0). +Le(i1350,cr60_0). +Le(i1360,cr60_0). +Le(i1380,cr60_0). +Le(i1390,cr60_0). +Le(i1420,cr60_0). +Le(i1430,cr60_0). +Le(i1450,cr60_0). +Le(cr60_0,i1520). +Le(cr60_0,i1530). +Le(cr60_0,i1540). +Le(cr60_0,i1560). +Le(cr60_0,i1590). +Le(cr60_0,i1630). +Le(cr60_0,i1660). +Le(cr60_0,i1700). +Le(cr60_0,i1730). +Le(cr60_0,i1760). +Le(cr60_0,i1770). +Le(cr60_0,i1810). +Le(cr60_0,i1840). +Le(cr60_0,i1880). +Le(cr60_0,i1910). +Le(cr60_0,i1950). +Le(cr60_0,i1980). +Le(cr60_0,i2020). +Le(cr60_0,i2050). +Le(cr60_0,i2090). +Le(cr60_0,i2120). +Le(cr60_0,i2160). +Le(cr60_0,i2190). +Le(cr60_0,i2200). +Le(cr60_0,i2230). +Le(cr60_0,i2270). +Le(cr60_0,i2300). +Le(cr60_0,i2340). +Le(cr60_0,i2370). +Le(cr60_0,i2410). +Le(cr60_0,i2420). +Le(cr60_0,i2440). +Le(cr60_0,i2480). +Le(cr60_0,i2510). +Le(cr60_0,i2550). +Le(cr60_0,i2580). +Le(cr60_0,i2620). +Le(cr60_0,i2640). +Le(cr60_0,i2660). +Le(cr60_0,i2730). +Le(cr60_0,i2760). +Le(cr60_0,i2800). +Le(cr60_0,i2830). +Le(cr60_0,i2860). +Le(cr60_0,i2870). +Le(cr60_0,i2940). +Le(cr60_0,i2970). +Le(cr60_0,i3010). +Le(cr60_0,i3040). +Le(cr60_0,i3080). +Le(cr60_0,i3120). +Le(cr60_0,i3150). +Le(cr60_0,i3220). +Le(cr60_0,i3260). +Le(cr60_0,i3290). +Le(cr60_0,i3300). +Le(cr60_0,i3330). +Le(cr60_0,i3400). +Le(cr60_0,i3430). +Le(cr60_0,i3500). +Le(cr60_0,i3520). +Le(cr60_0,i3580). +Le(cr60_0,i3610). +Le(cr60_0,i3650). +Le(cr60_0,i3680). +Le(cr60_0,i3720). +Le(cr60_0,i3740). +Le(cr60_0,i3790). +Le(cr60_0,i3820). +Le(cr60_0,i3860). +Le(cr60_0,i3960). +Le(cr60_0,i4040). +Le(cr60_0,i4140). +Le(cr60_0,i4180). +Le(cr60_0,i4400). +Le(cr60_0,i4620). +Le(cr60_0,i4840). +Le(cr60_0,i5060). +Le(cr60_0,i5280). +Le(cr60_0,i5500). +Le(cr60_0,i5720). +Le(cr60_0,i5940). +Le(cr60_0,i6160). +Le(cr60_0,i6380). +Le(cr60_0,i6600). +Le(cr60_0,i6820). +Le(cr60_0,i7040). +Le(cr60_0,i7260). +Le(cr60_0,i7480). +Le(cr60_0,i7700). +Le(cr60_0,i7920). +Le(cr60_0,i8140). +Le(cr60_0,i8360). +Le(cr60_0,i8580). +Eq(i1490,i1490). +Le(i1490,cr61_0). +Le(cr61_0,i1520). +Le(i-30,cr61_0). +Le(i0,cr61_0). +Le(i13,cr61_0). +Le(i26,cr61_0). +Le(i39,cr61_0). +Le(i52,cr61_0). +Le(i60,cr61_0). +Le(i65,cr61_0). +Le(i70,cr61_0). +Le(i78,cr61_0). +Le(i90,cr61_0). +Le(i91,cr61_0). +Le(i104,cr61_0). +Le(i117,cr61_0). +Le(i130,cr61_0). +Le(i143,cr61_0). +Le(i156,cr61_0). +Le(i169,cr61_0). +Le(i182,cr61_0). +Le(i195,cr61_0). +Le(i208,cr61_0). +Le(i221,cr61_0). +Le(i234,cr61_0). +Le(i247,cr61_0). +Le(i260,cr61_0). +Le(i460,cr61_0). +Le(i530,cr61_0). +Le(i600,cr61_0). +Le(i660,cr61_0). +Le(i670,cr61_0). +Le(i710,cr61_0). +Le(i740,cr61_0). +Le(i810,cr61_0). +Le(i850,cr61_0). +Le(i880,cr61_0). +Le(i890,cr61_0). +Le(i920,cr61_0). +Le(i960,cr61_0). +Le(i990,cr61_0). +Le(i1030,cr61_0). +Le(i1060,cr61_0). +Le(i1100,cr61_0). +Le(i1130,cr61_0). +Le(i1170,cr61_0). +Le(i1200,cr61_0). +Le(i1240,cr61_0). +Le(i1260,cr61_0). +Le(i1270,cr61_0). +Le(i1290,cr61_0). +Le(i1310,cr61_0). +Le(i1320,cr61_0). +Le(i1330,cr61_0). +Le(i1350,cr61_0). +Le(i1360,cr61_0). +Le(i1380,cr61_0). +Le(i1390,cr61_0). +Le(i1420,cr61_0). +Le(i1430,cr61_0). +Le(i1450,cr61_0). +Le(i1460,cr61_0). +Le(cr61_0,i1530). +Le(cr61_0,i1540). +Le(cr61_0,i1560). +Le(cr61_0,i1590). +Le(cr61_0,i1630). +Le(cr61_0,i1660). +Le(cr61_0,i1700). +Le(cr61_0,i1730). +Le(cr61_0,i1760). +Le(cr61_0,i1770). +Le(cr61_0,i1810). +Le(cr61_0,i1840). +Le(cr61_0,i1880). +Le(cr61_0,i1910). +Le(cr61_0,i1950). +Le(cr61_0,i1980). +Le(cr61_0,i2020). +Le(cr61_0,i2050). +Le(cr61_0,i2090). +Le(cr61_0,i2120). +Le(cr61_0,i2160). +Le(cr61_0,i2190). +Le(cr61_0,i2200). +Le(cr61_0,i2230). +Le(cr61_0,i2270). +Le(cr61_0,i2300). +Le(cr61_0,i2340). +Le(cr61_0,i2370). +Le(cr61_0,i2410). +Le(cr61_0,i2420). +Le(cr61_0,i2440). +Le(cr61_0,i2480). +Le(cr61_0,i2510). +Le(cr61_0,i2550). +Le(cr61_0,i2580). +Le(cr61_0,i2620). +Le(cr61_0,i2640). +Le(cr61_0,i2660). +Le(cr61_0,i2730). +Le(cr61_0,i2760). +Le(cr61_0,i2800). +Le(cr61_0,i2830). +Le(cr61_0,i2860). +Le(cr61_0,i2870). +Le(cr61_0,i2940). +Le(cr61_0,i2970). +Le(cr61_0,i3010). +Le(cr61_0,i3040). +Le(cr61_0,i3080). +Le(cr61_0,i3120). +Le(cr61_0,i3150). +Le(cr61_0,i3220). +Le(cr61_0,i3260). +Le(cr61_0,i3290). +Le(cr61_0,i3300). +Le(cr61_0,i3330). +Le(cr61_0,i3400). +Le(cr61_0,i3430). +Le(cr61_0,i3500). +Le(cr61_0,i3520). +Le(cr61_0,i3580). +Le(cr61_0,i3610). +Le(cr61_0,i3650). +Le(cr61_0,i3680). +Le(cr61_0,i3720). +Le(cr61_0,i3740). +Le(cr61_0,i3790). +Le(cr61_0,i3820). +Le(cr61_0,i3860). +Le(cr61_0,i3960). +Le(cr61_0,i4040). +Le(cr61_0,i4140). +Le(cr61_0,i4180). +Le(cr61_0,i4400). +Le(cr61_0,i4620). +Le(cr61_0,i4840). +Le(cr61_0,i5060). +Le(cr61_0,i5280). +Le(cr61_0,i5500). +Le(cr61_0,i5720). +Le(cr61_0,i5940). +Le(cr61_0,i6160). +Le(cr61_0,i6380). +Le(cr61_0,i6600). +Le(cr61_0,i6820). +Le(cr61_0,i7040). +Le(cr61_0,i7260). +Le(cr61_0,i7480). +Le(cr61_0,i7700). +Le(cr61_0,i7920). +Le(cr61_0,i8140). +Le(cr61_0,i8360). +Le(cr61_0,i8580). +Eq(i1520,i1520). +Le(i1520,cr62_0). +Le(cr62_0,i1530). +Le(i-30,cr62_0). +Le(i0,cr62_0). +Le(i13,cr62_0). +Le(i26,cr62_0). +Le(i39,cr62_0). +Le(i52,cr62_0). +Le(i60,cr62_0). +Le(i65,cr62_0). +Le(i70,cr62_0). +Le(i78,cr62_0). +Le(i90,cr62_0). +Le(i91,cr62_0). +Le(i104,cr62_0). +Le(i117,cr62_0). +Le(i130,cr62_0). +Le(i143,cr62_0). +Le(i156,cr62_0). +Le(i169,cr62_0). +Le(i182,cr62_0). +Le(i195,cr62_0). +Le(i208,cr62_0). +Le(i221,cr62_0). +Le(i234,cr62_0). +Le(i247,cr62_0). +Le(i260,cr62_0). +Le(i460,cr62_0). +Le(i530,cr62_0). +Le(i600,cr62_0). +Le(i660,cr62_0). +Le(i670,cr62_0). +Le(i710,cr62_0). +Le(i740,cr62_0). +Le(i810,cr62_0). +Le(i850,cr62_0). +Le(i880,cr62_0). +Le(i890,cr62_0). +Le(i920,cr62_0). +Le(i960,cr62_0). +Le(i990,cr62_0). +Le(i1030,cr62_0). +Le(i1060,cr62_0). +Le(i1100,cr62_0). +Le(i1130,cr62_0). +Le(i1170,cr62_0). +Le(i1200,cr62_0). +Le(i1240,cr62_0). +Le(i1260,cr62_0). +Le(i1270,cr62_0). +Le(i1290,cr62_0). +Le(i1310,cr62_0). +Le(i1320,cr62_0). +Le(i1330,cr62_0). +Le(i1350,cr62_0). +Le(i1360,cr62_0). +Le(i1380,cr62_0). +Le(i1390,cr62_0). +Le(i1420,cr62_0). +Le(i1430,cr62_0). +Le(i1450,cr62_0). +Le(i1460,cr62_0). +Le(i1490,cr62_0). +Le(cr62_0,i1540). +Le(cr62_0,i1560). +Le(cr62_0,i1590). +Le(cr62_0,i1630). +Le(cr62_0,i1660). +Le(cr62_0,i1700). +Le(cr62_0,i1730). +Le(cr62_0,i1760). +Le(cr62_0,i1770). +Le(cr62_0,i1810). +Le(cr62_0,i1840). +Le(cr62_0,i1880). +Le(cr62_0,i1910). +Le(cr62_0,i1950). +Le(cr62_0,i1980). +Le(cr62_0,i2020). +Le(cr62_0,i2050). +Le(cr62_0,i2090). +Le(cr62_0,i2120). +Le(cr62_0,i2160). +Le(cr62_0,i2190). +Le(cr62_0,i2200). +Le(cr62_0,i2230). +Le(cr62_0,i2270). +Le(cr62_0,i2300). +Le(cr62_0,i2340). +Le(cr62_0,i2370). +Le(cr62_0,i2410). +Le(cr62_0,i2420). +Le(cr62_0,i2440). +Le(cr62_0,i2480). +Le(cr62_0,i2510). +Le(cr62_0,i2550). +Le(cr62_0,i2580). +Le(cr62_0,i2620). +Le(cr62_0,i2640). +Le(cr62_0,i2660). +Le(cr62_0,i2730). +Le(cr62_0,i2760). +Le(cr62_0,i2800). +Le(cr62_0,i2830). +Le(cr62_0,i2860). +Le(cr62_0,i2870). +Le(cr62_0,i2940). +Le(cr62_0,i2970). +Le(cr62_0,i3010). +Le(cr62_0,i3040). +Le(cr62_0,i3080). +Le(cr62_0,i3120). +Le(cr62_0,i3150). +Le(cr62_0,i3220). +Le(cr62_0,i3260). +Le(cr62_0,i3290). +Le(cr62_0,i3300). +Le(cr62_0,i3330). +Le(cr62_0,i3400). +Le(cr62_0,i3430). +Le(cr62_0,i3500). +Le(cr62_0,i3520). +Le(cr62_0,i3580). +Le(cr62_0,i3610). +Le(cr62_0,i3650). +Le(cr62_0,i3680). +Le(cr62_0,i3720). +Le(cr62_0,i3740). +Le(cr62_0,i3790). +Le(cr62_0,i3820). +Le(cr62_0,i3860). +Le(cr62_0,i3960). +Le(cr62_0,i4040). +Le(cr62_0,i4140). +Le(cr62_0,i4180). +Le(cr62_0,i4400). +Le(cr62_0,i4620). +Le(cr62_0,i4840). +Le(cr62_0,i5060). +Le(cr62_0,i5280). +Le(cr62_0,i5500). +Le(cr62_0,i5720). +Le(cr62_0,i5940). +Le(cr62_0,i6160). +Le(cr62_0,i6380). +Le(cr62_0,i6600). +Le(cr62_0,i6820). +Le(cr62_0,i7040). +Le(cr62_0,i7260). +Le(cr62_0,i7480). +Le(cr62_0,i7700). +Le(cr62_0,i7920). +Le(cr62_0,i8140). +Le(cr62_0,i8360). +Le(cr62_0,i8580). +Eq(i1530,i1530). +Le(i1530,cr63_0). +Le(cr63_0,i1540). +Le(i-30,cr63_0). +Le(i0,cr63_0). +Le(i13,cr63_0). +Le(i26,cr63_0). +Le(i39,cr63_0). +Le(i52,cr63_0). +Le(i60,cr63_0). +Le(i65,cr63_0). +Le(i70,cr63_0). +Le(i78,cr63_0). +Le(i90,cr63_0). +Le(i91,cr63_0). +Le(i104,cr63_0). +Le(i117,cr63_0). +Le(i130,cr63_0). +Le(i143,cr63_0). +Le(i156,cr63_0). +Le(i169,cr63_0). +Le(i182,cr63_0). +Le(i195,cr63_0). +Le(i208,cr63_0). +Le(i221,cr63_0). +Le(i234,cr63_0). +Le(i247,cr63_0). +Le(i260,cr63_0). +Le(i460,cr63_0). +Le(i530,cr63_0). +Le(i600,cr63_0). +Le(i660,cr63_0). +Le(i670,cr63_0). +Le(i710,cr63_0). +Le(i740,cr63_0). +Le(i810,cr63_0). +Le(i850,cr63_0). +Le(i880,cr63_0). +Le(i890,cr63_0). +Le(i920,cr63_0). +Le(i960,cr63_0). +Le(i990,cr63_0). +Le(i1030,cr63_0). +Le(i1060,cr63_0). +Le(i1100,cr63_0). +Le(i1130,cr63_0). +Le(i1170,cr63_0). +Le(i1200,cr63_0). +Le(i1240,cr63_0). +Le(i1260,cr63_0). +Le(i1270,cr63_0). +Le(i1290,cr63_0). +Le(i1310,cr63_0). +Le(i1320,cr63_0). +Le(i1330,cr63_0). +Le(i1350,cr63_0). +Le(i1360,cr63_0). +Le(i1380,cr63_0). +Le(i1390,cr63_0). +Le(i1420,cr63_0). +Le(i1430,cr63_0). +Le(i1450,cr63_0). +Le(i1460,cr63_0). +Le(i1490,cr63_0). +Le(i1520,cr63_0). +Le(cr63_0,i1560). +Le(cr63_0,i1590). +Le(cr63_0,i1630). +Le(cr63_0,i1660). +Le(cr63_0,i1700). +Le(cr63_0,i1730). +Le(cr63_0,i1760). +Le(cr63_0,i1770). +Le(cr63_0,i1810). +Le(cr63_0,i1840). +Le(cr63_0,i1880). +Le(cr63_0,i1910). +Le(cr63_0,i1950). +Le(cr63_0,i1980). +Le(cr63_0,i2020). +Le(cr63_0,i2050). +Le(cr63_0,i2090). +Le(cr63_0,i2120). +Le(cr63_0,i2160). +Le(cr63_0,i2190). +Le(cr63_0,i2200). +Le(cr63_0,i2230). +Le(cr63_0,i2270). +Le(cr63_0,i2300). +Le(cr63_0,i2340). +Le(cr63_0,i2370). +Le(cr63_0,i2410). +Le(cr63_0,i2420). +Le(cr63_0,i2440). +Le(cr63_0,i2480). +Le(cr63_0,i2510). +Le(cr63_0,i2550). +Le(cr63_0,i2580). +Le(cr63_0,i2620). +Le(cr63_0,i2640). +Le(cr63_0,i2660). +Le(cr63_0,i2730). +Le(cr63_0,i2760). +Le(cr63_0,i2800). +Le(cr63_0,i2830). +Le(cr63_0,i2860). +Le(cr63_0,i2870). +Le(cr63_0,i2940). +Le(cr63_0,i2970). +Le(cr63_0,i3010). +Le(cr63_0,i3040). +Le(cr63_0,i3080). +Le(cr63_0,i3120). +Le(cr63_0,i3150). +Le(cr63_0,i3220). +Le(cr63_0,i3260). +Le(cr63_0,i3290). +Le(cr63_0,i3300). +Le(cr63_0,i3330). +Le(cr63_0,i3400). +Le(cr63_0,i3430). +Le(cr63_0,i3500). +Le(cr63_0,i3520). +Le(cr63_0,i3580). +Le(cr63_0,i3610). +Le(cr63_0,i3650). +Le(cr63_0,i3680). +Le(cr63_0,i3720). +Le(cr63_0,i3740). +Le(cr63_0,i3790). +Le(cr63_0,i3820). +Le(cr63_0,i3860). +Le(cr63_0,i3960). +Le(cr63_0,i4040). +Le(cr63_0,i4140). +Le(cr63_0,i4180). +Le(cr63_0,i4400). +Le(cr63_0,i4620). +Le(cr63_0,i4840). +Le(cr63_0,i5060). +Le(cr63_0,i5280). +Le(cr63_0,i5500). +Le(cr63_0,i5720). +Le(cr63_0,i5940). +Le(cr63_0,i6160). +Le(cr63_0,i6380). +Le(cr63_0,i6600). +Le(cr63_0,i6820). +Le(cr63_0,i7040). +Le(cr63_0,i7260). +Le(cr63_0,i7480). +Le(cr63_0,i7700). +Le(cr63_0,i7920). +Le(cr63_0,i8140). +Le(cr63_0,i8360). +Le(cr63_0,i8580). +Eq(i1540,i1540). +Le(i1540,cr64_0). +Le(cr64_0,i1560). +Le(i-30,cr64_0). +Le(i0,cr64_0). +Le(i13,cr64_0). +Le(i26,cr64_0). +Le(i39,cr64_0). +Le(i52,cr64_0). +Le(i60,cr64_0). +Le(i65,cr64_0). +Le(i70,cr64_0). +Le(i78,cr64_0). +Le(i90,cr64_0). +Le(i91,cr64_0). +Le(i104,cr64_0). +Le(i117,cr64_0). +Le(i130,cr64_0). +Le(i143,cr64_0). +Le(i156,cr64_0). +Le(i169,cr64_0). +Le(i182,cr64_0). +Le(i195,cr64_0). +Le(i208,cr64_0). +Le(i221,cr64_0). +Le(i234,cr64_0). +Le(i247,cr64_0). +Le(i260,cr64_0). +Le(i460,cr64_0). +Le(i530,cr64_0). +Le(i600,cr64_0). +Le(i660,cr64_0). +Le(i670,cr64_0). +Le(i710,cr64_0). +Le(i740,cr64_0). +Le(i810,cr64_0). +Le(i850,cr64_0). +Le(i880,cr64_0). +Le(i890,cr64_0). +Le(i920,cr64_0). +Le(i960,cr64_0). +Le(i990,cr64_0). +Le(i1030,cr64_0). +Le(i1060,cr64_0). +Le(i1100,cr64_0). +Le(i1130,cr64_0). +Le(i1170,cr64_0). +Le(i1200,cr64_0). +Le(i1240,cr64_0). +Le(i1260,cr64_0). +Le(i1270,cr64_0). +Le(i1290,cr64_0). +Le(i1310,cr64_0). +Le(i1320,cr64_0). +Le(i1330,cr64_0). +Le(i1350,cr64_0). +Le(i1360,cr64_0). +Le(i1380,cr64_0). +Le(i1390,cr64_0). +Le(i1420,cr64_0). +Le(i1430,cr64_0). +Le(i1450,cr64_0). +Le(i1460,cr64_0). +Le(i1490,cr64_0). +Le(i1520,cr64_0). +Le(i1530,cr64_0). +Le(cr64_0,i1590). +Le(cr64_0,i1630). +Le(cr64_0,i1660). +Le(cr64_0,i1700). +Le(cr64_0,i1730). +Le(cr64_0,i1760). +Le(cr64_0,i1770). +Le(cr64_0,i1810). +Le(cr64_0,i1840). +Le(cr64_0,i1880). +Le(cr64_0,i1910). +Le(cr64_0,i1950). +Le(cr64_0,i1980). +Le(cr64_0,i2020). +Le(cr64_0,i2050). +Le(cr64_0,i2090). +Le(cr64_0,i2120). +Le(cr64_0,i2160). +Le(cr64_0,i2190). +Le(cr64_0,i2200). +Le(cr64_0,i2230). +Le(cr64_0,i2270). +Le(cr64_0,i2300). +Le(cr64_0,i2340). +Le(cr64_0,i2370). +Le(cr64_0,i2410). +Le(cr64_0,i2420). +Le(cr64_0,i2440). +Le(cr64_0,i2480). +Le(cr64_0,i2510). +Le(cr64_0,i2550). +Le(cr64_0,i2580). +Le(cr64_0,i2620). +Le(cr64_0,i2640). +Le(cr64_0,i2660). +Le(cr64_0,i2730). +Le(cr64_0,i2760). +Le(cr64_0,i2800). +Le(cr64_0,i2830). +Le(cr64_0,i2860). +Le(cr64_0,i2870). +Le(cr64_0,i2940). +Le(cr64_0,i2970). +Le(cr64_0,i3010). +Le(cr64_0,i3040). +Le(cr64_0,i3080). +Le(cr64_0,i3120). +Le(cr64_0,i3150). +Le(cr64_0,i3220). +Le(cr64_0,i3260). +Le(cr64_0,i3290). +Le(cr64_0,i3300). +Le(cr64_0,i3330). +Le(cr64_0,i3400). +Le(cr64_0,i3430). +Le(cr64_0,i3500). +Le(cr64_0,i3520). +Le(cr64_0,i3580). +Le(cr64_0,i3610). +Le(cr64_0,i3650). +Le(cr64_0,i3680). +Le(cr64_0,i3720). +Le(cr64_0,i3740). +Le(cr64_0,i3790). +Le(cr64_0,i3820). +Le(cr64_0,i3860). +Le(cr64_0,i3960). +Le(cr64_0,i4040). +Le(cr64_0,i4140). +Le(cr64_0,i4180). +Le(cr64_0,i4400). +Le(cr64_0,i4620). +Le(cr64_0,i4840). +Le(cr64_0,i5060). +Le(cr64_0,i5280). +Le(cr64_0,i5500). +Le(cr64_0,i5720). +Le(cr64_0,i5940). +Le(cr64_0,i6160). +Le(cr64_0,i6380). +Le(cr64_0,i6600). +Le(cr64_0,i6820). +Le(cr64_0,i7040). +Le(cr64_0,i7260). +Le(cr64_0,i7480). +Le(cr64_0,i7700). +Le(cr64_0,i7920). +Le(cr64_0,i8140). +Le(cr64_0,i8360). +Le(cr64_0,i8580). +Eq(i1560,i1560). +Le(i1560,cr65_0). +Le(cr65_0,i1590). +Le(i-30,cr65_0). +Le(i0,cr65_0). +Le(i13,cr65_0). +Le(i26,cr65_0). +Le(i39,cr65_0). +Le(i52,cr65_0). +Le(i60,cr65_0). +Le(i65,cr65_0). +Le(i70,cr65_0). +Le(i78,cr65_0). +Le(i90,cr65_0). +Le(i91,cr65_0). +Le(i104,cr65_0). +Le(i117,cr65_0). +Le(i130,cr65_0). +Le(i143,cr65_0). +Le(i156,cr65_0). +Le(i169,cr65_0). +Le(i182,cr65_0). +Le(i195,cr65_0). +Le(i208,cr65_0). +Le(i221,cr65_0). +Le(i234,cr65_0). +Le(i247,cr65_0). +Le(i260,cr65_0). +Le(i460,cr65_0). +Le(i530,cr65_0). +Le(i600,cr65_0). +Le(i660,cr65_0). +Le(i670,cr65_0). +Le(i710,cr65_0). +Le(i740,cr65_0). +Le(i810,cr65_0). +Le(i850,cr65_0). +Le(i880,cr65_0). +Le(i890,cr65_0). +Le(i920,cr65_0). +Le(i960,cr65_0). +Le(i990,cr65_0). +Le(i1030,cr65_0). +Le(i1060,cr65_0). +Le(i1100,cr65_0). +Le(i1130,cr65_0). +Le(i1170,cr65_0). +Le(i1200,cr65_0). +Le(i1240,cr65_0). +Le(i1260,cr65_0). +Le(i1270,cr65_0). +Le(i1290,cr65_0). +Le(i1310,cr65_0). +Le(i1320,cr65_0). +Le(i1330,cr65_0). +Le(i1350,cr65_0). +Le(i1360,cr65_0). +Le(i1380,cr65_0). +Le(i1390,cr65_0). +Le(i1420,cr65_0). +Le(i1430,cr65_0). +Le(i1450,cr65_0). +Le(i1460,cr65_0). +Le(i1490,cr65_0). +Le(i1520,cr65_0). +Le(i1530,cr65_0). +Le(i1540,cr65_0). +Le(cr65_0,i1630). +Le(cr65_0,i1660). +Le(cr65_0,i1700). +Le(cr65_0,i1730). +Le(cr65_0,i1760). +Le(cr65_0,i1770). +Le(cr65_0,i1810). +Le(cr65_0,i1840). +Le(cr65_0,i1880). +Le(cr65_0,i1910). +Le(cr65_0,i1950). +Le(cr65_0,i1980). +Le(cr65_0,i2020). +Le(cr65_0,i2050). +Le(cr65_0,i2090). +Le(cr65_0,i2120). +Le(cr65_0,i2160). +Le(cr65_0,i2190). +Le(cr65_0,i2200). +Le(cr65_0,i2230). +Le(cr65_0,i2270). +Le(cr65_0,i2300). +Le(cr65_0,i2340). +Le(cr65_0,i2370). +Le(cr65_0,i2410). +Le(cr65_0,i2420). +Le(cr65_0,i2440). +Le(cr65_0,i2480). +Le(cr65_0,i2510). +Le(cr65_0,i2550). +Le(cr65_0,i2580). +Le(cr65_0,i2620). +Le(cr65_0,i2640). +Le(cr65_0,i2660). +Le(cr65_0,i2730). +Le(cr65_0,i2760). +Le(cr65_0,i2800). +Le(cr65_0,i2830). +Le(cr65_0,i2860). +Le(cr65_0,i2870). +Le(cr65_0,i2940). +Le(cr65_0,i2970). +Le(cr65_0,i3010). +Le(cr65_0,i3040). +Le(cr65_0,i3080). +Le(cr65_0,i3120). +Le(cr65_0,i3150). +Le(cr65_0,i3220). +Le(cr65_0,i3260). +Le(cr65_0,i3290). +Le(cr65_0,i3300). +Le(cr65_0,i3330). +Le(cr65_0,i3400). +Le(cr65_0,i3430). +Le(cr65_0,i3500). +Le(cr65_0,i3520). +Le(cr65_0,i3580). +Le(cr65_0,i3610). +Le(cr65_0,i3650). +Le(cr65_0,i3680). +Le(cr65_0,i3720). +Le(cr65_0,i3740). +Le(cr65_0,i3790). +Le(cr65_0,i3820). +Le(cr65_0,i3860). +Le(cr65_0,i3960). +Le(cr65_0,i4040). +Le(cr65_0,i4140). +Le(cr65_0,i4180). +Le(cr65_0,i4400). +Le(cr65_0,i4620). +Le(cr65_0,i4840). +Le(cr65_0,i5060). +Le(cr65_0,i5280). +Le(cr65_0,i5500). +Le(cr65_0,i5720). +Le(cr65_0,i5940). +Le(cr65_0,i6160). +Le(cr65_0,i6380). +Le(cr65_0,i6600). +Le(cr65_0,i6820). +Le(cr65_0,i7040). +Le(cr65_0,i7260). +Le(cr65_0,i7480). +Le(cr65_0,i7700). +Le(cr65_0,i7920). +Le(cr65_0,i8140). +Le(cr65_0,i8360). +Le(cr65_0,i8580). +Eq(i1590,i1590). +Le(i1590,cr66_0). +Le(cr66_0,i1630). +Le(i-30,cr66_0). +Le(i0,cr66_0). +Le(i13,cr66_0). +Le(i26,cr66_0). +Le(i39,cr66_0). +Le(i52,cr66_0). +Le(i60,cr66_0). +Le(i65,cr66_0). +Le(i70,cr66_0). +Le(i78,cr66_0). +Le(i90,cr66_0). +Le(i91,cr66_0). +Le(i104,cr66_0). +Le(i117,cr66_0). +Le(i130,cr66_0). +Le(i143,cr66_0). +Le(i156,cr66_0). +Le(i169,cr66_0). +Le(i182,cr66_0). +Le(i195,cr66_0). +Le(i208,cr66_0). +Le(i221,cr66_0). +Le(i234,cr66_0). +Le(i247,cr66_0). +Le(i260,cr66_0). +Le(i460,cr66_0). +Le(i530,cr66_0). +Le(i600,cr66_0). +Le(i660,cr66_0). +Le(i670,cr66_0). +Le(i710,cr66_0). +Le(i740,cr66_0). +Le(i810,cr66_0). +Le(i850,cr66_0). +Le(i880,cr66_0). +Le(i890,cr66_0). +Le(i920,cr66_0). +Le(i960,cr66_0). +Le(i990,cr66_0). +Le(i1030,cr66_0). +Le(i1060,cr66_0). +Le(i1100,cr66_0). +Le(i1130,cr66_0). +Le(i1170,cr66_0). +Le(i1200,cr66_0). +Le(i1240,cr66_0). +Le(i1260,cr66_0). +Le(i1270,cr66_0). +Le(i1290,cr66_0). +Le(i1310,cr66_0). +Le(i1320,cr66_0). +Le(i1330,cr66_0). +Le(i1350,cr66_0). +Le(i1360,cr66_0). +Le(i1380,cr66_0). +Le(i1390,cr66_0). +Le(i1420,cr66_0). +Le(i1430,cr66_0). +Le(i1450,cr66_0). +Le(i1460,cr66_0). +Le(i1490,cr66_0). +Le(i1520,cr66_0). +Le(i1530,cr66_0). +Le(i1540,cr66_0). +Le(i1560,cr66_0). +Le(cr66_0,i1660). +Le(cr66_0,i1700). +Le(cr66_0,i1730). +Le(cr66_0,i1760). +Le(cr66_0,i1770). +Le(cr66_0,i1810). +Le(cr66_0,i1840). +Le(cr66_0,i1880). +Le(cr66_0,i1910). +Le(cr66_0,i1950). +Le(cr66_0,i1980). +Le(cr66_0,i2020). +Le(cr66_0,i2050). +Le(cr66_0,i2090). +Le(cr66_0,i2120). +Le(cr66_0,i2160). +Le(cr66_0,i2190). +Le(cr66_0,i2200). +Le(cr66_0,i2230). +Le(cr66_0,i2270). +Le(cr66_0,i2300). +Le(cr66_0,i2340). +Le(cr66_0,i2370). +Le(cr66_0,i2410). +Le(cr66_0,i2420). +Le(cr66_0,i2440). +Le(cr66_0,i2480). +Le(cr66_0,i2510). +Le(cr66_0,i2550). +Le(cr66_0,i2580). +Le(cr66_0,i2620). +Le(cr66_0,i2640). +Le(cr66_0,i2660). +Le(cr66_0,i2730). +Le(cr66_0,i2760). +Le(cr66_0,i2800). +Le(cr66_0,i2830). +Le(cr66_0,i2860). +Le(cr66_0,i2870). +Le(cr66_0,i2940). +Le(cr66_0,i2970). +Le(cr66_0,i3010). +Le(cr66_0,i3040). +Le(cr66_0,i3080). +Le(cr66_0,i3120). +Le(cr66_0,i3150). +Le(cr66_0,i3220). +Le(cr66_0,i3260). +Le(cr66_0,i3290). +Le(cr66_0,i3300). +Le(cr66_0,i3330). +Le(cr66_0,i3400). +Le(cr66_0,i3430). +Le(cr66_0,i3500). +Le(cr66_0,i3520). +Le(cr66_0,i3580). +Le(cr66_0,i3610). +Le(cr66_0,i3650). +Le(cr66_0,i3680). +Le(cr66_0,i3720). +Le(cr66_0,i3740). +Le(cr66_0,i3790). +Le(cr66_0,i3820). +Le(cr66_0,i3860). +Le(cr66_0,i3960). +Le(cr66_0,i4040). +Le(cr66_0,i4140). +Le(cr66_0,i4180). +Le(cr66_0,i4400). +Le(cr66_0,i4620). +Le(cr66_0,i4840). +Le(cr66_0,i5060). +Le(cr66_0,i5280). +Le(cr66_0,i5500). +Le(cr66_0,i5720). +Le(cr66_0,i5940). +Le(cr66_0,i6160). +Le(cr66_0,i6380). +Le(cr66_0,i6600). +Le(cr66_0,i6820). +Le(cr66_0,i7040). +Le(cr66_0,i7260). +Le(cr66_0,i7480). +Le(cr66_0,i7700). +Le(cr66_0,i7920). +Le(cr66_0,i8140). +Le(cr66_0,i8360). +Le(cr66_0,i8580). +Eq(i1630,i1630). +Le(i1630,cr67_0). +Le(cr67_0,i1660). +Le(i-30,cr67_0). +Le(i0,cr67_0). +Le(i13,cr67_0). +Le(i26,cr67_0). +Le(i39,cr67_0). +Le(i52,cr67_0). +Le(i60,cr67_0). +Le(i65,cr67_0). +Le(i70,cr67_0). +Le(i78,cr67_0). +Le(i90,cr67_0). +Le(i91,cr67_0). +Le(i104,cr67_0). +Le(i117,cr67_0). +Le(i130,cr67_0). +Le(i143,cr67_0). +Le(i156,cr67_0). +Le(i169,cr67_0). +Le(i182,cr67_0). +Le(i195,cr67_0). +Le(i208,cr67_0). +Le(i221,cr67_0). +Le(i234,cr67_0). +Le(i247,cr67_0). +Le(i260,cr67_0). +Le(i460,cr67_0). +Le(i530,cr67_0). +Le(i600,cr67_0). +Le(i660,cr67_0). +Le(i670,cr67_0). +Le(i710,cr67_0). +Le(i740,cr67_0). +Le(i810,cr67_0). +Le(i850,cr67_0). +Le(i880,cr67_0). +Le(i890,cr67_0). +Le(i920,cr67_0). +Le(i960,cr67_0). +Le(i990,cr67_0). +Le(i1030,cr67_0). +Le(i1060,cr67_0). +Le(i1100,cr67_0). +Le(i1130,cr67_0). +Le(i1170,cr67_0). +Le(i1200,cr67_0). +Le(i1240,cr67_0). +Le(i1260,cr67_0). +Le(i1270,cr67_0). +Le(i1290,cr67_0). +Le(i1310,cr67_0). +Le(i1320,cr67_0). +Le(i1330,cr67_0). +Le(i1350,cr67_0). +Le(i1360,cr67_0). +Le(i1380,cr67_0). +Le(i1390,cr67_0). +Le(i1420,cr67_0). +Le(i1430,cr67_0). +Le(i1450,cr67_0). +Le(i1460,cr67_0). +Le(i1490,cr67_0). +Le(i1520,cr67_0). +Le(i1530,cr67_0). +Le(i1540,cr67_0). +Le(i1560,cr67_0). +Le(i1590,cr67_0). +Le(cr67_0,i1700). +Le(cr67_0,i1730). +Le(cr67_0,i1760). +Le(cr67_0,i1770). +Le(cr67_0,i1810). +Le(cr67_0,i1840). +Le(cr67_0,i1880). +Le(cr67_0,i1910). +Le(cr67_0,i1950). +Le(cr67_0,i1980). +Le(cr67_0,i2020). +Le(cr67_0,i2050). +Le(cr67_0,i2090). +Le(cr67_0,i2120). +Le(cr67_0,i2160). +Le(cr67_0,i2190). +Le(cr67_0,i2200). +Le(cr67_0,i2230). +Le(cr67_0,i2270). +Le(cr67_0,i2300). +Le(cr67_0,i2340). +Le(cr67_0,i2370). +Le(cr67_0,i2410). +Le(cr67_0,i2420). +Le(cr67_0,i2440). +Le(cr67_0,i2480). +Le(cr67_0,i2510). +Le(cr67_0,i2550). +Le(cr67_0,i2580). +Le(cr67_0,i2620). +Le(cr67_0,i2640). +Le(cr67_0,i2660). +Le(cr67_0,i2730). +Le(cr67_0,i2760). +Le(cr67_0,i2800). +Le(cr67_0,i2830). +Le(cr67_0,i2860). +Le(cr67_0,i2870). +Le(cr67_0,i2940). +Le(cr67_0,i2970). +Le(cr67_0,i3010). +Le(cr67_0,i3040). +Le(cr67_0,i3080). +Le(cr67_0,i3120). +Le(cr67_0,i3150). +Le(cr67_0,i3220). +Le(cr67_0,i3260). +Le(cr67_0,i3290). +Le(cr67_0,i3300). +Le(cr67_0,i3330). +Le(cr67_0,i3400). +Le(cr67_0,i3430). +Le(cr67_0,i3500). +Le(cr67_0,i3520). +Le(cr67_0,i3580). +Le(cr67_0,i3610). +Le(cr67_0,i3650). +Le(cr67_0,i3680). +Le(cr67_0,i3720). +Le(cr67_0,i3740). +Le(cr67_0,i3790). +Le(cr67_0,i3820). +Le(cr67_0,i3860). +Le(cr67_0,i3960). +Le(cr67_0,i4040). +Le(cr67_0,i4140). +Le(cr67_0,i4180). +Le(cr67_0,i4400). +Le(cr67_0,i4620). +Le(cr67_0,i4840). +Le(cr67_0,i5060). +Le(cr67_0,i5280). +Le(cr67_0,i5500). +Le(cr67_0,i5720). +Le(cr67_0,i5940). +Le(cr67_0,i6160). +Le(cr67_0,i6380). +Le(cr67_0,i6600). +Le(cr67_0,i6820). +Le(cr67_0,i7040). +Le(cr67_0,i7260). +Le(cr67_0,i7480). +Le(cr67_0,i7700). +Le(cr67_0,i7920). +Le(cr67_0,i8140). +Le(cr67_0,i8360). +Le(cr67_0,i8580). +Eq(i1660,i1660). +Le(i1660,cr68_0). +Le(cr68_0,i1700). +Le(i-30,cr68_0). +Le(i0,cr68_0). +Le(i13,cr68_0). +Le(i26,cr68_0). +Le(i39,cr68_0). +Le(i52,cr68_0). +Le(i60,cr68_0). +Le(i65,cr68_0). +Le(i70,cr68_0). +Le(i78,cr68_0). +Le(i90,cr68_0). +Le(i91,cr68_0). +Le(i104,cr68_0). +Le(i117,cr68_0). +Le(i130,cr68_0). +Le(i143,cr68_0). +Le(i156,cr68_0). +Le(i169,cr68_0). +Le(i182,cr68_0). +Le(i195,cr68_0). +Le(i208,cr68_0). +Le(i221,cr68_0). +Le(i234,cr68_0). +Le(i247,cr68_0). +Le(i260,cr68_0). +Le(i460,cr68_0). +Le(i530,cr68_0). +Le(i600,cr68_0). +Le(i660,cr68_0). +Le(i670,cr68_0). +Le(i710,cr68_0). +Le(i740,cr68_0). +Le(i810,cr68_0). +Le(i850,cr68_0). +Le(i880,cr68_0). +Le(i890,cr68_0). +Le(i920,cr68_0). +Le(i960,cr68_0). +Le(i990,cr68_0). +Le(i1030,cr68_0). +Le(i1060,cr68_0). +Le(i1100,cr68_0). +Le(i1130,cr68_0). +Le(i1170,cr68_0). +Le(i1200,cr68_0). +Le(i1240,cr68_0). +Le(i1260,cr68_0). +Le(i1270,cr68_0). +Le(i1290,cr68_0). +Le(i1310,cr68_0). +Le(i1320,cr68_0). +Le(i1330,cr68_0). +Le(i1350,cr68_0). +Le(i1360,cr68_0). +Le(i1380,cr68_0). +Le(i1390,cr68_0). +Le(i1420,cr68_0). +Le(i1430,cr68_0). +Le(i1450,cr68_0). +Le(i1460,cr68_0). +Le(i1490,cr68_0). +Le(i1520,cr68_0). +Le(i1530,cr68_0). +Le(i1540,cr68_0). +Le(i1560,cr68_0). +Le(i1590,cr68_0). +Le(i1630,cr68_0). +Le(cr68_0,i1730). +Le(cr68_0,i1760). +Le(cr68_0,i1770). +Le(cr68_0,i1810). +Le(cr68_0,i1840). +Le(cr68_0,i1880). +Le(cr68_0,i1910). +Le(cr68_0,i1950). +Le(cr68_0,i1980). +Le(cr68_0,i2020). +Le(cr68_0,i2050). +Le(cr68_0,i2090). +Le(cr68_0,i2120). +Le(cr68_0,i2160). +Le(cr68_0,i2190). +Le(cr68_0,i2200). +Le(cr68_0,i2230). +Le(cr68_0,i2270). +Le(cr68_0,i2300). +Le(cr68_0,i2340). +Le(cr68_0,i2370). +Le(cr68_0,i2410). +Le(cr68_0,i2420). +Le(cr68_0,i2440). +Le(cr68_0,i2480). +Le(cr68_0,i2510). +Le(cr68_0,i2550). +Le(cr68_0,i2580). +Le(cr68_0,i2620). +Le(cr68_0,i2640). +Le(cr68_0,i2660). +Le(cr68_0,i2730). +Le(cr68_0,i2760). +Le(cr68_0,i2800). +Le(cr68_0,i2830). +Le(cr68_0,i2860). +Le(cr68_0,i2870). +Le(cr68_0,i2940). +Le(cr68_0,i2970). +Le(cr68_0,i3010). +Le(cr68_0,i3040). +Le(cr68_0,i3080). +Le(cr68_0,i3120). +Le(cr68_0,i3150). +Le(cr68_0,i3220). +Le(cr68_0,i3260). +Le(cr68_0,i3290). +Le(cr68_0,i3300). +Le(cr68_0,i3330). +Le(cr68_0,i3400). +Le(cr68_0,i3430). +Le(cr68_0,i3500). +Le(cr68_0,i3520). +Le(cr68_0,i3580). +Le(cr68_0,i3610). +Le(cr68_0,i3650). +Le(cr68_0,i3680). +Le(cr68_0,i3720). +Le(cr68_0,i3740). +Le(cr68_0,i3790). +Le(cr68_0,i3820). +Le(cr68_0,i3860). +Le(cr68_0,i3960). +Le(cr68_0,i4040). +Le(cr68_0,i4140). +Le(cr68_0,i4180). +Le(cr68_0,i4400). +Le(cr68_0,i4620). +Le(cr68_0,i4840). +Le(cr68_0,i5060). +Le(cr68_0,i5280). +Le(cr68_0,i5500). +Le(cr68_0,i5720). +Le(cr68_0,i5940). +Le(cr68_0,i6160). +Le(cr68_0,i6380). +Le(cr68_0,i6600). +Le(cr68_0,i6820). +Le(cr68_0,i7040). +Le(cr68_0,i7260). +Le(cr68_0,i7480). +Le(cr68_0,i7700). +Le(cr68_0,i7920). +Le(cr68_0,i8140). +Le(cr68_0,i8360). +Le(cr68_0,i8580). +Eq(i1700,i1700). +Le(i1700,cr69_0). +Le(cr69_0,i1730). +Le(i-30,cr69_0). +Le(i0,cr69_0). +Le(i13,cr69_0). +Le(i26,cr69_0). +Le(i39,cr69_0). +Le(i52,cr69_0). +Le(i60,cr69_0). +Le(i65,cr69_0). +Le(i70,cr69_0). +Le(i78,cr69_0). +Le(i90,cr69_0). +Le(i91,cr69_0). +Le(i104,cr69_0). +Le(i117,cr69_0). +Le(i130,cr69_0). +Le(i143,cr69_0). +Le(i156,cr69_0). +Le(i169,cr69_0). +Le(i182,cr69_0). +Le(i195,cr69_0). +Le(i208,cr69_0). +Le(i221,cr69_0). +Le(i234,cr69_0). +Le(i247,cr69_0). +Le(i260,cr69_0). +Le(i460,cr69_0). +Le(i530,cr69_0). +Le(i600,cr69_0). +Le(i660,cr69_0). +Le(i670,cr69_0). +Le(i710,cr69_0). +Le(i740,cr69_0). +Le(i810,cr69_0). +Le(i850,cr69_0). +Le(i880,cr69_0). +Le(i890,cr69_0). +Le(i920,cr69_0). +Le(i960,cr69_0). +Le(i990,cr69_0). +Le(i1030,cr69_0). +Le(i1060,cr69_0). +Le(i1100,cr69_0). +Le(i1130,cr69_0). +Le(i1170,cr69_0). +Le(i1200,cr69_0). +Le(i1240,cr69_0). +Le(i1260,cr69_0). +Le(i1270,cr69_0). +Le(i1290,cr69_0). +Le(i1310,cr69_0). +Le(i1320,cr69_0). +Le(i1330,cr69_0). +Le(i1350,cr69_0). +Le(i1360,cr69_0). +Le(i1380,cr69_0). +Le(i1390,cr69_0). +Le(i1420,cr69_0). +Le(i1430,cr69_0). +Le(i1450,cr69_0). +Le(i1460,cr69_0). +Le(i1490,cr69_0). +Le(i1520,cr69_0). +Le(i1530,cr69_0). +Le(i1540,cr69_0). +Le(i1560,cr69_0). +Le(i1590,cr69_0). +Le(i1630,cr69_0). +Le(i1660,cr69_0). +Le(cr69_0,i1760). +Le(cr69_0,i1770). +Le(cr69_0,i1810). +Le(cr69_0,i1840). +Le(cr69_0,i1880). +Le(cr69_0,i1910). +Le(cr69_0,i1950). +Le(cr69_0,i1980). +Le(cr69_0,i2020). +Le(cr69_0,i2050). +Le(cr69_0,i2090). +Le(cr69_0,i2120). +Le(cr69_0,i2160). +Le(cr69_0,i2190). +Le(cr69_0,i2200). +Le(cr69_0,i2230). +Le(cr69_0,i2270). +Le(cr69_0,i2300). +Le(cr69_0,i2340). +Le(cr69_0,i2370). +Le(cr69_0,i2410). +Le(cr69_0,i2420). +Le(cr69_0,i2440). +Le(cr69_0,i2480). +Le(cr69_0,i2510). +Le(cr69_0,i2550). +Le(cr69_0,i2580). +Le(cr69_0,i2620). +Le(cr69_0,i2640). +Le(cr69_0,i2660). +Le(cr69_0,i2730). +Le(cr69_0,i2760). +Le(cr69_0,i2800). +Le(cr69_0,i2830). +Le(cr69_0,i2860). +Le(cr69_0,i2870). +Le(cr69_0,i2940). +Le(cr69_0,i2970). +Le(cr69_0,i3010). +Le(cr69_0,i3040). +Le(cr69_0,i3080). +Le(cr69_0,i3120). +Le(cr69_0,i3150). +Le(cr69_0,i3220). +Le(cr69_0,i3260). +Le(cr69_0,i3290). +Le(cr69_0,i3300). +Le(cr69_0,i3330). +Le(cr69_0,i3400). +Le(cr69_0,i3430). +Le(cr69_0,i3500). +Le(cr69_0,i3520). +Le(cr69_0,i3580). +Le(cr69_0,i3610). +Le(cr69_0,i3650). +Le(cr69_0,i3680). +Le(cr69_0,i3720). +Le(cr69_0,i3740). +Le(cr69_0,i3790). +Le(cr69_0,i3820). +Le(cr69_0,i3860). +Le(cr69_0,i3960). +Le(cr69_0,i4040). +Le(cr69_0,i4140). +Le(cr69_0,i4180). +Le(cr69_0,i4400). +Le(cr69_0,i4620). +Le(cr69_0,i4840). +Le(cr69_0,i5060). +Le(cr69_0,i5280). +Le(cr69_0,i5500). +Le(cr69_0,i5720). +Le(cr69_0,i5940). +Le(cr69_0,i6160). +Le(cr69_0,i6380). +Le(cr69_0,i6600). +Le(cr69_0,i6820). +Le(cr69_0,i7040). +Le(cr69_0,i7260). +Le(cr69_0,i7480). +Le(cr69_0,i7700). +Le(cr69_0,i7920). +Le(cr69_0,i8140). +Le(cr69_0,i8360). +Le(cr69_0,i8580). +Eq(i1730,i1730). +Le(i1730,cr70_0). +Le(cr70_0,i1760). +Le(i-30,cr70_0). +Le(i0,cr70_0). +Le(i13,cr70_0). +Le(i26,cr70_0). +Le(i39,cr70_0). +Le(i52,cr70_0). +Le(i60,cr70_0). +Le(i65,cr70_0). +Le(i70,cr70_0). +Le(i78,cr70_0). +Le(i90,cr70_0). +Le(i91,cr70_0). +Le(i104,cr70_0). +Le(i117,cr70_0). +Le(i130,cr70_0). +Le(i143,cr70_0). +Le(i156,cr70_0). +Le(i169,cr70_0). +Le(i182,cr70_0). +Le(i195,cr70_0). +Le(i208,cr70_0). +Le(i221,cr70_0). +Le(i234,cr70_0). +Le(i247,cr70_0). +Le(i260,cr70_0). +Le(i460,cr70_0). +Le(i530,cr70_0). +Le(i600,cr70_0). +Le(i660,cr70_0). +Le(i670,cr70_0). +Le(i710,cr70_0). +Le(i740,cr70_0). +Le(i810,cr70_0). +Le(i850,cr70_0). +Le(i880,cr70_0). +Le(i890,cr70_0). +Le(i920,cr70_0). +Le(i960,cr70_0). +Le(i990,cr70_0). +Le(i1030,cr70_0). +Le(i1060,cr70_0). +Le(i1100,cr70_0). +Le(i1130,cr70_0). +Le(i1170,cr70_0). +Le(i1200,cr70_0). +Le(i1240,cr70_0). +Le(i1260,cr70_0). +Le(i1270,cr70_0). +Le(i1290,cr70_0). +Le(i1310,cr70_0). +Le(i1320,cr70_0). +Le(i1330,cr70_0). +Le(i1350,cr70_0). +Le(i1360,cr70_0). +Le(i1380,cr70_0). +Le(i1390,cr70_0). +Le(i1420,cr70_0). +Le(i1430,cr70_0). +Le(i1450,cr70_0). +Le(i1460,cr70_0). +Le(i1490,cr70_0). +Le(i1520,cr70_0). +Le(i1530,cr70_0). +Le(i1540,cr70_0). +Le(i1560,cr70_0). +Le(i1590,cr70_0). +Le(i1630,cr70_0). +Le(i1660,cr70_0). +Le(i1700,cr70_0). +Le(cr70_0,i1770). +Le(cr70_0,i1810). +Le(cr70_0,i1840). +Le(cr70_0,i1880). +Le(cr70_0,i1910). +Le(cr70_0,i1950). +Le(cr70_0,i1980). +Le(cr70_0,i2020). +Le(cr70_0,i2050). +Le(cr70_0,i2090). +Le(cr70_0,i2120). +Le(cr70_0,i2160). +Le(cr70_0,i2190). +Le(cr70_0,i2200). +Le(cr70_0,i2230). +Le(cr70_0,i2270). +Le(cr70_0,i2300). +Le(cr70_0,i2340). +Le(cr70_0,i2370). +Le(cr70_0,i2410). +Le(cr70_0,i2420). +Le(cr70_0,i2440). +Le(cr70_0,i2480). +Le(cr70_0,i2510). +Le(cr70_0,i2550). +Le(cr70_0,i2580). +Le(cr70_0,i2620). +Le(cr70_0,i2640). +Le(cr70_0,i2660). +Le(cr70_0,i2730). +Le(cr70_0,i2760). +Le(cr70_0,i2800). +Le(cr70_0,i2830). +Le(cr70_0,i2860). +Le(cr70_0,i2870). +Le(cr70_0,i2940). +Le(cr70_0,i2970). +Le(cr70_0,i3010). +Le(cr70_0,i3040). +Le(cr70_0,i3080). +Le(cr70_0,i3120). +Le(cr70_0,i3150). +Le(cr70_0,i3220). +Le(cr70_0,i3260). +Le(cr70_0,i3290). +Le(cr70_0,i3300). +Le(cr70_0,i3330). +Le(cr70_0,i3400). +Le(cr70_0,i3430). +Le(cr70_0,i3500). +Le(cr70_0,i3520). +Le(cr70_0,i3580). +Le(cr70_0,i3610). +Le(cr70_0,i3650). +Le(cr70_0,i3680). +Le(cr70_0,i3720). +Le(cr70_0,i3740). +Le(cr70_0,i3790). +Le(cr70_0,i3820). +Le(cr70_0,i3860). +Le(cr70_0,i3960). +Le(cr70_0,i4040). +Le(cr70_0,i4140). +Le(cr70_0,i4180). +Le(cr70_0,i4400). +Le(cr70_0,i4620). +Le(cr70_0,i4840). +Le(cr70_0,i5060). +Le(cr70_0,i5280). +Le(cr70_0,i5500). +Le(cr70_0,i5720). +Le(cr70_0,i5940). +Le(cr70_0,i6160). +Le(cr70_0,i6380). +Le(cr70_0,i6600). +Le(cr70_0,i6820). +Le(cr70_0,i7040). +Le(cr70_0,i7260). +Le(cr70_0,i7480). +Le(cr70_0,i7700). +Le(cr70_0,i7920). +Le(cr70_0,i8140). +Le(cr70_0,i8360). +Le(cr70_0,i8580). +Eq(i1760,i1760). +Le(i1760,cr71_0). +Le(cr71_0,i1770). +Le(i-30,cr71_0). +Le(i0,cr71_0). +Le(i13,cr71_0). +Le(i26,cr71_0). +Le(i39,cr71_0). +Le(i52,cr71_0). +Le(i60,cr71_0). +Le(i65,cr71_0). +Le(i70,cr71_0). +Le(i78,cr71_0). +Le(i90,cr71_0). +Le(i91,cr71_0). +Le(i104,cr71_0). +Le(i117,cr71_0). +Le(i130,cr71_0). +Le(i143,cr71_0). +Le(i156,cr71_0). +Le(i169,cr71_0). +Le(i182,cr71_0). +Le(i195,cr71_0). +Le(i208,cr71_0). +Le(i221,cr71_0). +Le(i234,cr71_0). +Le(i247,cr71_0). +Le(i260,cr71_0). +Le(i460,cr71_0). +Le(i530,cr71_0). +Le(i600,cr71_0). +Le(i660,cr71_0). +Le(i670,cr71_0). +Le(i710,cr71_0). +Le(i740,cr71_0). +Le(i810,cr71_0). +Le(i850,cr71_0). +Le(i880,cr71_0). +Le(i890,cr71_0). +Le(i920,cr71_0). +Le(i960,cr71_0). +Le(i990,cr71_0). +Le(i1030,cr71_0). +Le(i1060,cr71_0). +Le(i1100,cr71_0). +Le(i1130,cr71_0). +Le(i1170,cr71_0). +Le(i1200,cr71_0). +Le(i1240,cr71_0). +Le(i1260,cr71_0). +Le(i1270,cr71_0). +Le(i1290,cr71_0). +Le(i1310,cr71_0). +Le(i1320,cr71_0). +Le(i1330,cr71_0). +Le(i1350,cr71_0). +Le(i1360,cr71_0). +Le(i1380,cr71_0). +Le(i1390,cr71_0). +Le(i1420,cr71_0). +Le(i1430,cr71_0). +Le(i1450,cr71_0). +Le(i1460,cr71_0). +Le(i1490,cr71_0). +Le(i1520,cr71_0). +Le(i1530,cr71_0). +Le(i1540,cr71_0). +Le(i1560,cr71_0). +Le(i1590,cr71_0). +Le(i1630,cr71_0). +Le(i1660,cr71_0). +Le(i1700,cr71_0). +Le(i1730,cr71_0). +Le(cr71_0,i1810). +Le(cr71_0,i1840). +Le(cr71_0,i1880). +Le(cr71_0,i1910). +Le(cr71_0,i1950). +Le(cr71_0,i1980). +Le(cr71_0,i2020). +Le(cr71_0,i2050). +Le(cr71_0,i2090). +Le(cr71_0,i2120). +Le(cr71_0,i2160). +Le(cr71_0,i2190). +Le(cr71_0,i2200). +Le(cr71_0,i2230). +Le(cr71_0,i2270). +Le(cr71_0,i2300). +Le(cr71_0,i2340). +Le(cr71_0,i2370). +Le(cr71_0,i2410). +Le(cr71_0,i2420). +Le(cr71_0,i2440). +Le(cr71_0,i2480). +Le(cr71_0,i2510). +Le(cr71_0,i2550). +Le(cr71_0,i2580). +Le(cr71_0,i2620). +Le(cr71_0,i2640). +Le(cr71_0,i2660). +Le(cr71_0,i2730). +Le(cr71_0,i2760). +Le(cr71_0,i2800). +Le(cr71_0,i2830). +Le(cr71_0,i2860). +Le(cr71_0,i2870). +Le(cr71_0,i2940). +Le(cr71_0,i2970). +Le(cr71_0,i3010). +Le(cr71_0,i3040). +Le(cr71_0,i3080). +Le(cr71_0,i3120). +Le(cr71_0,i3150). +Le(cr71_0,i3220). +Le(cr71_0,i3260). +Le(cr71_0,i3290). +Le(cr71_0,i3300). +Le(cr71_0,i3330). +Le(cr71_0,i3400). +Le(cr71_0,i3430). +Le(cr71_0,i3500). +Le(cr71_0,i3520). +Le(cr71_0,i3580). +Le(cr71_0,i3610). +Le(cr71_0,i3650). +Le(cr71_0,i3680). +Le(cr71_0,i3720). +Le(cr71_0,i3740). +Le(cr71_0,i3790). +Le(cr71_0,i3820). +Le(cr71_0,i3860). +Le(cr71_0,i3960). +Le(cr71_0,i4040). +Le(cr71_0,i4140). +Le(cr71_0,i4180). +Le(cr71_0,i4400). +Le(cr71_0,i4620). +Le(cr71_0,i4840). +Le(cr71_0,i5060). +Le(cr71_0,i5280). +Le(cr71_0,i5500). +Le(cr71_0,i5720). +Le(cr71_0,i5940). +Le(cr71_0,i6160). +Le(cr71_0,i6380). +Le(cr71_0,i6600). +Le(cr71_0,i6820). +Le(cr71_0,i7040). +Le(cr71_0,i7260). +Le(cr71_0,i7480). +Le(cr71_0,i7700). +Le(cr71_0,i7920). +Le(cr71_0,i8140). +Le(cr71_0,i8360). +Le(cr71_0,i8580). +Eq(i1770,i1770). +Le(i1770,cr72_0). +Le(cr72_0,i1810). +Le(i-30,cr72_0). +Le(i0,cr72_0). +Le(i13,cr72_0). +Le(i26,cr72_0). +Le(i39,cr72_0). +Le(i52,cr72_0). +Le(i60,cr72_0). +Le(i65,cr72_0). +Le(i70,cr72_0). +Le(i78,cr72_0). +Le(i90,cr72_0). +Le(i91,cr72_0). +Le(i104,cr72_0). +Le(i117,cr72_0). +Le(i130,cr72_0). +Le(i143,cr72_0). +Le(i156,cr72_0). +Le(i169,cr72_0). +Le(i182,cr72_0). +Le(i195,cr72_0). +Le(i208,cr72_0). +Le(i221,cr72_0). +Le(i234,cr72_0). +Le(i247,cr72_0). +Le(i260,cr72_0). +Le(i460,cr72_0). +Le(i530,cr72_0). +Le(i600,cr72_0). +Le(i660,cr72_0). +Le(i670,cr72_0). +Le(i710,cr72_0). +Le(i740,cr72_0). +Le(i810,cr72_0). +Le(i850,cr72_0). +Le(i880,cr72_0). +Le(i890,cr72_0). +Le(i920,cr72_0). +Le(i960,cr72_0). +Le(i990,cr72_0). +Le(i1030,cr72_0). +Le(i1060,cr72_0). +Le(i1100,cr72_0). +Le(i1130,cr72_0). +Le(i1170,cr72_0). +Le(i1200,cr72_0). +Le(i1240,cr72_0). +Le(i1260,cr72_0). +Le(i1270,cr72_0). +Le(i1290,cr72_0). +Le(i1310,cr72_0). +Le(i1320,cr72_0). +Le(i1330,cr72_0). +Le(i1350,cr72_0). +Le(i1360,cr72_0). +Le(i1380,cr72_0). +Le(i1390,cr72_0). +Le(i1420,cr72_0). +Le(i1430,cr72_0). +Le(i1450,cr72_0). +Le(i1460,cr72_0). +Le(i1490,cr72_0). +Le(i1520,cr72_0). +Le(i1530,cr72_0). +Le(i1540,cr72_0). +Le(i1560,cr72_0). +Le(i1590,cr72_0). +Le(i1630,cr72_0). +Le(i1660,cr72_0). +Le(i1700,cr72_0). +Le(i1730,cr72_0). +Le(i1760,cr72_0). +Le(cr72_0,i1840). +Le(cr72_0,i1880). +Le(cr72_0,i1910). +Le(cr72_0,i1950). +Le(cr72_0,i1980). +Le(cr72_0,i2020). +Le(cr72_0,i2050). +Le(cr72_0,i2090). +Le(cr72_0,i2120). +Le(cr72_0,i2160). +Le(cr72_0,i2190). +Le(cr72_0,i2200). +Le(cr72_0,i2230). +Le(cr72_0,i2270). +Le(cr72_0,i2300). +Le(cr72_0,i2340). +Le(cr72_0,i2370). +Le(cr72_0,i2410). +Le(cr72_0,i2420). +Le(cr72_0,i2440). +Le(cr72_0,i2480). +Le(cr72_0,i2510). +Le(cr72_0,i2550). +Le(cr72_0,i2580). +Le(cr72_0,i2620). +Le(cr72_0,i2640). +Le(cr72_0,i2660). +Le(cr72_0,i2730). +Le(cr72_0,i2760). +Le(cr72_0,i2800). +Le(cr72_0,i2830). +Le(cr72_0,i2860). +Le(cr72_0,i2870). +Le(cr72_0,i2940). +Le(cr72_0,i2970). +Le(cr72_0,i3010). +Le(cr72_0,i3040). +Le(cr72_0,i3080). +Le(cr72_0,i3120). +Le(cr72_0,i3150). +Le(cr72_0,i3220). +Le(cr72_0,i3260). +Le(cr72_0,i3290). +Le(cr72_0,i3300). +Le(cr72_0,i3330). +Le(cr72_0,i3400). +Le(cr72_0,i3430). +Le(cr72_0,i3500). +Le(cr72_0,i3520). +Le(cr72_0,i3580). +Le(cr72_0,i3610). +Le(cr72_0,i3650). +Le(cr72_0,i3680). +Le(cr72_0,i3720). +Le(cr72_0,i3740). +Le(cr72_0,i3790). +Le(cr72_0,i3820). +Le(cr72_0,i3860). +Le(cr72_0,i3960). +Le(cr72_0,i4040). +Le(cr72_0,i4140). +Le(cr72_0,i4180). +Le(cr72_0,i4400). +Le(cr72_0,i4620). +Le(cr72_0,i4840). +Le(cr72_0,i5060). +Le(cr72_0,i5280). +Le(cr72_0,i5500). +Le(cr72_0,i5720). +Le(cr72_0,i5940). +Le(cr72_0,i6160). +Le(cr72_0,i6380). +Le(cr72_0,i6600). +Le(cr72_0,i6820). +Le(cr72_0,i7040). +Le(cr72_0,i7260). +Le(cr72_0,i7480). +Le(cr72_0,i7700). +Le(cr72_0,i7920). +Le(cr72_0,i8140). +Le(cr72_0,i8360). +Le(cr72_0,i8580). +Eq(i1810,i1810). +Le(i1810,cr73_0). +Le(cr73_0,i1840). +Le(i-30,cr73_0). +Le(i0,cr73_0). +Le(i13,cr73_0). +Le(i26,cr73_0). +Le(i39,cr73_0). +Le(i52,cr73_0). +Le(i60,cr73_0). +Le(i65,cr73_0). +Le(i70,cr73_0). +Le(i78,cr73_0). +Le(i90,cr73_0). +Le(i91,cr73_0). +Le(i104,cr73_0). +Le(i117,cr73_0). +Le(i130,cr73_0). +Le(i143,cr73_0). +Le(i156,cr73_0). +Le(i169,cr73_0). +Le(i182,cr73_0). +Le(i195,cr73_0). +Le(i208,cr73_0). +Le(i221,cr73_0). +Le(i234,cr73_0). +Le(i247,cr73_0). +Le(i260,cr73_0). +Le(i460,cr73_0). +Le(i530,cr73_0). +Le(i600,cr73_0). +Le(i660,cr73_0). +Le(i670,cr73_0). +Le(i710,cr73_0). +Le(i740,cr73_0). +Le(i810,cr73_0). +Le(i850,cr73_0). +Le(i880,cr73_0). +Le(i890,cr73_0). +Le(i920,cr73_0). +Le(i960,cr73_0). +Le(i990,cr73_0). +Le(i1030,cr73_0). +Le(i1060,cr73_0). +Le(i1100,cr73_0). +Le(i1130,cr73_0). +Le(i1170,cr73_0). +Le(i1200,cr73_0). +Le(i1240,cr73_0). +Le(i1260,cr73_0). +Le(i1270,cr73_0). +Le(i1290,cr73_0). +Le(i1310,cr73_0). +Le(i1320,cr73_0). +Le(i1330,cr73_0). +Le(i1350,cr73_0). +Le(i1360,cr73_0). +Le(i1380,cr73_0). +Le(i1390,cr73_0). +Le(i1420,cr73_0). +Le(i1430,cr73_0). +Le(i1450,cr73_0). +Le(i1460,cr73_0). +Le(i1490,cr73_0). +Le(i1520,cr73_0). +Le(i1530,cr73_0). +Le(i1540,cr73_0). +Le(i1560,cr73_0). +Le(i1590,cr73_0). +Le(i1630,cr73_0). +Le(i1660,cr73_0). +Le(i1700,cr73_0). +Le(i1730,cr73_0). +Le(i1760,cr73_0). +Le(i1770,cr73_0). +Le(cr73_0,i1880). +Le(cr73_0,i1910). +Le(cr73_0,i1950). +Le(cr73_0,i1980). +Le(cr73_0,i2020). +Le(cr73_0,i2050). +Le(cr73_0,i2090). +Le(cr73_0,i2120). +Le(cr73_0,i2160). +Le(cr73_0,i2190). +Le(cr73_0,i2200). +Le(cr73_0,i2230). +Le(cr73_0,i2270). +Le(cr73_0,i2300). +Le(cr73_0,i2340). +Le(cr73_0,i2370). +Le(cr73_0,i2410). +Le(cr73_0,i2420). +Le(cr73_0,i2440). +Le(cr73_0,i2480). +Le(cr73_0,i2510). +Le(cr73_0,i2550). +Le(cr73_0,i2580). +Le(cr73_0,i2620). +Le(cr73_0,i2640). +Le(cr73_0,i2660). +Le(cr73_0,i2730). +Le(cr73_0,i2760). +Le(cr73_0,i2800). +Le(cr73_0,i2830). +Le(cr73_0,i2860). +Le(cr73_0,i2870). +Le(cr73_0,i2940). +Le(cr73_0,i2970). +Le(cr73_0,i3010). +Le(cr73_0,i3040). +Le(cr73_0,i3080). +Le(cr73_0,i3120). +Le(cr73_0,i3150). +Le(cr73_0,i3220). +Le(cr73_0,i3260). +Le(cr73_0,i3290). +Le(cr73_0,i3300). +Le(cr73_0,i3330). +Le(cr73_0,i3400). +Le(cr73_0,i3430). +Le(cr73_0,i3500). +Le(cr73_0,i3520). +Le(cr73_0,i3580). +Le(cr73_0,i3610). +Le(cr73_0,i3650). +Le(cr73_0,i3680). +Le(cr73_0,i3720). +Le(cr73_0,i3740). +Le(cr73_0,i3790). +Le(cr73_0,i3820). +Le(cr73_0,i3860). +Le(cr73_0,i3960). +Le(cr73_0,i4040). +Le(cr73_0,i4140). +Le(cr73_0,i4180). +Le(cr73_0,i4400). +Le(cr73_0,i4620). +Le(cr73_0,i4840). +Le(cr73_0,i5060). +Le(cr73_0,i5280). +Le(cr73_0,i5500). +Le(cr73_0,i5720). +Le(cr73_0,i5940). +Le(cr73_0,i6160). +Le(cr73_0,i6380). +Le(cr73_0,i6600). +Le(cr73_0,i6820). +Le(cr73_0,i7040). +Le(cr73_0,i7260). +Le(cr73_0,i7480). +Le(cr73_0,i7700). +Le(cr73_0,i7920). +Le(cr73_0,i8140). +Le(cr73_0,i8360). +Le(cr73_0,i8580). +Eq(i1840,i1840). +Le(i1840,cr74_0). +Le(cr74_0,i1880). +Le(i-30,cr74_0). +Le(i0,cr74_0). +Le(i13,cr74_0). +Le(i26,cr74_0). +Le(i39,cr74_0). +Le(i52,cr74_0). +Le(i60,cr74_0). +Le(i65,cr74_0). +Le(i70,cr74_0). +Le(i78,cr74_0). +Le(i90,cr74_0). +Le(i91,cr74_0). +Le(i104,cr74_0). +Le(i117,cr74_0). +Le(i130,cr74_0). +Le(i143,cr74_0). +Le(i156,cr74_0). +Le(i169,cr74_0). +Le(i182,cr74_0). +Le(i195,cr74_0). +Le(i208,cr74_0). +Le(i221,cr74_0). +Le(i234,cr74_0). +Le(i247,cr74_0). +Le(i260,cr74_0). +Le(i460,cr74_0). +Le(i530,cr74_0). +Le(i600,cr74_0). +Le(i660,cr74_0). +Le(i670,cr74_0). +Le(i710,cr74_0). +Le(i740,cr74_0). +Le(i810,cr74_0). +Le(i850,cr74_0). +Le(i880,cr74_0). +Le(i890,cr74_0). +Le(i920,cr74_0). +Le(i960,cr74_0). +Le(i990,cr74_0). +Le(i1030,cr74_0). +Le(i1060,cr74_0). +Le(i1100,cr74_0). +Le(i1130,cr74_0). +Le(i1170,cr74_0). +Le(i1200,cr74_0). +Le(i1240,cr74_0). +Le(i1260,cr74_0). +Le(i1270,cr74_0). +Le(i1290,cr74_0). +Le(i1310,cr74_0). +Le(i1320,cr74_0). +Le(i1330,cr74_0). +Le(i1350,cr74_0). +Le(i1360,cr74_0). +Le(i1380,cr74_0). +Le(i1390,cr74_0). +Le(i1420,cr74_0). +Le(i1430,cr74_0). +Le(i1450,cr74_0). +Le(i1460,cr74_0). +Le(i1490,cr74_0). +Le(i1520,cr74_0). +Le(i1530,cr74_0). +Le(i1540,cr74_0). +Le(i1560,cr74_0). +Le(i1590,cr74_0). +Le(i1630,cr74_0). +Le(i1660,cr74_0). +Le(i1700,cr74_0). +Le(i1730,cr74_0). +Le(i1760,cr74_0). +Le(i1770,cr74_0). +Le(i1810,cr74_0). +Le(cr74_0,i1910). +Le(cr74_0,i1950). +Le(cr74_0,i1980). +Le(cr74_0,i2020). +Le(cr74_0,i2050). +Le(cr74_0,i2090). +Le(cr74_0,i2120). +Le(cr74_0,i2160). +Le(cr74_0,i2190). +Le(cr74_0,i2200). +Le(cr74_0,i2230). +Le(cr74_0,i2270). +Le(cr74_0,i2300). +Le(cr74_0,i2340). +Le(cr74_0,i2370). +Le(cr74_0,i2410). +Le(cr74_0,i2420). +Le(cr74_0,i2440). +Le(cr74_0,i2480). +Le(cr74_0,i2510). +Le(cr74_0,i2550). +Le(cr74_0,i2580). +Le(cr74_0,i2620). +Le(cr74_0,i2640). +Le(cr74_0,i2660). +Le(cr74_0,i2730). +Le(cr74_0,i2760). +Le(cr74_0,i2800). +Le(cr74_0,i2830). +Le(cr74_0,i2860). +Le(cr74_0,i2870). +Le(cr74_0,i2940). +Le(cr74_0,i2970). +Le(cr74_0,i3010). +Le(cr74_0,i3040). +Le(cr74_0,i3080). +Le(cr74_0,i3120). +Le(cr74_0,i3150). +Le(cr74_0,i3220). +Le(cr74_0,i3260). +Le(cr74_0,i3290). +Le(cr74_0,i3300). +Le(cr74_0,i3330). +Le(cr74_0,i3400). +Le(cr74_0,i3430). +Le(cr74_0,i3500). +Le(cr74_0,i3520). +Le(cr74_0,i3580). +Le(cr74_0,i3610). +Le(cr74_0,i3650). +Le(cr74_0,i3680). +Le(cr74_0,i3720). +Le(cr74_0,i3740). +Le(cr74_0,i3790). +Le(cr74_0,i3820). +Le(cr74_0,i3860). +Le(cr74_0,i3960). +Le(cr74_0,i4040). +Le(cr74_0,i4140). +Le(cr74_0,i4180). +Le(cr74_0,i4400). +Le(cr74_0,i4620). +Le(cr74_0,i4840). +Le(cr74_0,i5060). +Le(cr74_0,i5280). +Le(cr74_0,i5500). +Le(cr74_0,i5720). +Le(cr74_0,i5940). +Le(cr74_0,i6160). +Le(cr74_0,i6380). +Le(cr74_0,i6600). +Le(cr74_0,i6820). +Le(cr74_0,i7040). +Le(cr74_0,i7260). +Le(cr74_0,i7480). +Le(cr74_0,i7700). +Le(cr74_0,i7920). +Le(cr74_0,i8140). +Le(cr74_0,i8360). +Le(cr74_0,i8580). +Eq(i1880,i1880). +Le(i1880,cr75_0). +Le(cr75_0,i1910). +Le(i-30,cr75_0). +Le(i0,cr75_0). +Le(i13,cr75_0). +Le(i26,cr75_0). +Le(i39,cr75_0). +Le(i52,cr75_0). +Le(i60,cr75_0). +Le(i65,cr75_0). +Le(i70,cr75_0). +Le(i78,cr75_0). +Le(i90,cr75_0). +Le(i91,cr75_0). +Le(i104,cr75_0). +Le(i117,cr75_0). +Le(i130,cr75_0). +Le(i143,cr75_0). +Le(i156,cr75_0). +Le(i169,cr75_0). +Le(i182,cr75_0). +Le(i195,cr75_0). +Le(i208,cr75_0). +Le(i221,cr75_0). +Le(i234,cr75_0). +Le(i247,cr75_0). +Le(i260,cr75_0). +Le(i460,cr75_0). +Le(i530,cr75_0). +Le(i600,cr75_0). +Le(i660,cr75_0). +Le(i670,cr75_0). +Le(i710,cr75_0). +Le(i740,cr75_0). +Le(i810,cr75_0). +Le(i850,cr75_0). +Le(i880,cr75_0). +Le(i890,cr75_0). +Le(i920,cr75_0). +Le(i960,cr75_0). +Le(i990,cr75_0). +Le(i1030,cr75_0). +Le(i1060,cr75_0). +Le(i1100,cr75_0). +Le(i1130,cr75_0). +Le(i1170,cr75_0). +Le(i1200,cr75_0). +Le(i1240,cr75_0). +Le(i1260,cr75_0). +Le(i1270,cr75_0). +Le(i1290,cr75_0). +Le(i1310,cr75_0). +Le(i1320,cr75_0). +Le(i1330,cr75_0). +Le(i1350,cr75_0). +Le(i1360,cr75_0). +Le(i1380,cr75_0). +Le(i1390,cr75_0). +Le(i1420,cr75_0). +Le(i1430,cr75_0). +Le(i1450,cr75_0). +Le(i1460,cr75_0). +Le(i1490,cr75_0). +Le(i1520,cr75_0). +Le(i1530,cr75_0). +Le(i1540,cr75_0). +Le(i1560,cr75_0). +Le(i1590,cr75_0). +Le(i1630,cr75_0). +Le(i1660,cr75_0). +Le(i1700,cr75_0). +Le(i1730,cr75_0). +Le(i1760,cr75_0). +Le(i1770,cr75_0). +Le(i1810,cr75_0). +Le(i1840,cr75_0). +Le(cr75_0,i1950). +Le(cr75_0,i1980). +Le(cr75_0,i2020). +Le(cr75_0,i2050). +Le(cr75_0,i2090). +Le(cr75_0,i2120). +Le(cr75_0,i2160). +Le(cr75_0,i2190). +Le(cr75_0,i2200). +Le(cr75_0,i2230). +Le(cr75_0,i2270). +Le(cr75_0,i2300). +Le(cr75_0,i2340). +Le(cr75_0,i2370). +Le(cr75_0,i2410). +Le(cr75_0,i2420). +Le(cr75_0,i2440). +Le(cr75_0,i2480). +Le(cr75_0,i2510). +Le(cr75_0,i2550). +Le(cr75_0,i2580). +Le(cr75_0,i2620). +Le(cr75_0,i2640). +Le(cr75_0,i2660). +Le(cr75_0,i2730). +Le(cr75_0,i2760). +Le(cr75_0,i2800). +Le(cr75_0,i2830). +Le(cr75_0,i2860). +Le(cr75_0,i2870). +Le(cr75_0,i2940). +Le(cr75_0,i2970). +Le(cr75_0,i3010). +Le(cr75_0,i3040). +Le(cr75_0,i3080). +Le(cr75_0,i3120). +Le(cr75_0,i3150). +Le(cr75_0,i3220). +Le(cr75_0,i3260). +Le(cr75_0,i3290). +Le(cr75_0,i3300). +Le(cr75_0,i3330). +Le(cr75_0,i3400). +Le(cr75_0,i3430). +Le(cr75_0,i3500). +Le(cr75_0,i3520). +Le(cr75_0,i3580). +Le(cr75_0,i3610). +Le(cr75_0,i3650). +Le(cr75_0,i3680). +Le(cr75_0,i3720). +Le(cr75_0,i3740). +Le(cr75_0,i3790). +Le(cr75_0,i3820). +Le(cr75_0,i3860). +Le(cr75_0,i3960). +Le(cr75_0,i4040). +Le(cr75_0,i4140). +Le(cr75_0,i4180). +Le(cr75_0,i4400). +Le(cr75_0,i4620). +Le(cr75_0,i4840). +Le(cr75_0,i5060). +Le(cr75_0,i5280). +Le(cr75_0,i5500). +Le(cr75_0,i5720). +Le(cr75_0,i5940). +Le(cr75_0,i6160). +Le(cr75_0,i6380). +Le(cr75_0,i6600). +Le(cr75_0,i6820). +Le(cr75_0,i7040). +Le(cr75_0,i7260). +Le(cr75_0,i7480). +Le(cr75_0,i7700). +Le(cr75_0,i7920). +Le(cr75_0,i8140). +Le(cr75_0,i8360). +Le(cr75_0,i8580). +Eq(i1910,i1910). +Le(i1910,cr76_0). +Le(cr76_0,i1950). +Le(i-30,cr76_0). +Le(i0,cr76_0). +Le(i13,cr76_0). +Le(i26,cr76_0). +Le(i39,cr76_0). +Le(i52,cr76_0). +Le(i60,cr76_0). +Le(i65,cr76_0). +Le(i70,cr76_0). +Le(i78,cr76_0). +Le(i90,cr76_0). +Le(i91,cr76_0). +Le(i104,cr76_0). +Le(i117,cr76_0). +Le(i130,cr76_0). +Le(i143,cr76_0). +Le(i156,cr76_0). +Le(i169,cr76_0). +Le(i182,cr76_0). +Le(i195,cr76_0). +Le(i208,cr76_0). +Le(i221,cr76_0). +Le(i234,cr76_0). +Le(i247,cr76_0). +Le(i260,cr76_0). +Le(i460,cr76_0). +Le(i530,cr76_0). +Le(i600,cr76_0). +Le(i660,cr76_0). +Le(i670,cr76_0). +Le(i710,cr76_0). +Le(i740,cr76_0). +Le(i810,cr76_0). +Le(i850,cr76_0). +Le(i880,cr76_0). +Le(i890,cr76_0). +Le(i920,cr76_0). +Le(i960,cr76_0). +Le(i990,cr76_0). +Le(i1030,cr76_0). +Le(i1060,cr76_0). +Le(i1100,cr76_0). +Le(i1130,cr76_0). +Le(i1170,cr76_0). +Le(i1200,cr76_0). +Le(i1240,cr76_0). +Le(i1260,cr76_0). +Le(i1270,cr76_0). +Le(i1290,cr76_0). +Le(i1310,cr76_0). +Le(i1320,cr76_0). +Le(i1330,cr76_0). +Le(i1350,cr76_0). +Le(i1360,cr76_0). +Le(i1380,cr76_0). +Le(i1390,cr76_0). +Le(i1420,cr76_0). +Le(i1430,cr76_0). +Le(i1450,cr76_0). +Le(i1460,cr76_0). +Le(i1490,cr76_0). +Le(i1520,cr76_0). +Le(i1530,cr76_0). +Le(i1540,cr76_0). +Le(i1560,cr76_0). +Le(i1590,cr76_0). +Le(i1630,cr76_0). +Le(i1660,cr76_0). +Le(i1700,cr76_0). +Le(i1730,cr76_0). +Le(i1760,cr76_0). +Le(i1770,cr76_0). +Le(i1810,cr76_0). +Le(i1840,cr76_0). +Le(i1880,cr76_0). +Le(cr76_0,i1980). +Le(cr76_0,i2020). +Le(cr76_0,i2050). +Le(cr76_0,i2090). +Le(cr76_0,i2120). +Le(cr76_0,i2160). +Le(cr76_0,i2190). +Le(cr76_0,i2200). +Le(cr76_0,i2230). +Le(cr76_0,i2270). +Le(cr76_0,i2300). +Le(cr76_0,i2340). +Le(cr76_0,i2370). +Le(cr76_0,i2410). +Le(cr76_0,i2420). +Le(cr76_0,i2440). +Le(cr76_0,i2480). +Le(cr76_0,i2510). +Le(cr76_0,i2550). +Le(cr76_0,i2580). +Le(cr76_0,i2620). +Le(cr76_0,i2640). +Le(cr76_0,i2660). +Le(cr76_0,i2730). +Le(cr76_0,i2760). +Le(cr76_0,i2800). +Le(cr76_0,i2830). +Le(cr76_0,i2860). +Le(cr76_0,i2870). +Le(cr76_0,i2940). +Le(cr76_0,i2970). +Le(cr76_0,i3010). +Le(cr76_0,i3040). +Le(cr76_0,i3080). +Le(cr76_0,i3120). +Le(cr76_0,i3150). +Le(cr76_0,i3220). +Le(cr76_0,i3260). +Le(cr76_0,i3290). +Le(cr76_0,i3300). +Le(cr76_0,i3330). +Le(cr76_0,i3400). +Le(cr76_0,i3430). +Le(cr76_0,i3500). +Le(cr76_0,i3520). +Le(cr76_0,i3580). +Le(cr76_0,i3610). +Le(cr76_0,i3650). +Le(cr76_0,i3680). +Le(cr76_0,i3720). +Le(cr76_0,i3740). +Le(cr76_0,i3790). +Le(cr76_0,i3820). +Le(cr76_0,i3860). +Le(cr76_0,i3960). +Le(cr76_0,i4040). +Le(cr76_0,i4140). +Le(cr76_0,i4180). +Le(cr76_0,i4400). +Le(cr76_0,i4620). +Le(cr76_0,i4840). +Le(cr76_0,i5060). +Le(cr76_0,i5280). +Le(cr76_0,i5500). +Le(cr76_0,i5720). +Le(cr76_0,i5940). +Le(cr76_0,i6160). +Le(cr76_0,i6380). +Le(cr76_0,i6600). +Le(cr76_0,i6820). +Le(cr76_0,i7040). +Le(cr76_0,i7260). +Le(cr76_0,i7480). +Le(cr76_0,i7700). +Le(cr76_0,i7920). +Le(cr76_0,i8140). +Le(cr76_0,i8360). +Le(cr76_0,i8580). +Eq(i1950,i1950). +Le(i1950,cr77_0). +Le(cr77_0,i1980). +Le(i-30,cr77_0). +Le(i0,cr77_0). +Le(i13,cr77_0). +Le(i26,cr77_0). +Le(i39,cr77_0). +Le(i52,cr77_0). +Le(i60,cr77_0). +Le(i65,cr77_0). +Le(i70,cr77_0). +Le(i78,cr77_0). +Le(i90,cr77_0). +Le(i91,cr77_0). +Le(i104,cr77_0). +Le(i117,cr77_0). +Le(i130,cr77_0). +Le(i143,cr77_0). +Le(i156,cr77_0). +Le(i169,cr77_0). +Le(i182,cr77_0). +Le(i195,cr77_0). +Le(i208,cr77_0). +Le(i221,cr77_0). +Le(i234,cr77_0). +Le(i247,cr77_0). +Le(i260,cr77_0). +Le(i460,cr77_0). +Le(i530,cr77_0). +Le(i600,cr77_0). +Le(i660,cr77_0). +Le(i670,cr77_0). +Le(i710,cr77_0). +Le(i740,cr77_0). +Le(i810,cr77_0). +Le(i850,cr77_0). +Le(i880,cr77_0). +Le(i890,cr77_0). +Le(i920,cr77_0). +Le(i960,cr77_0). +Le(i990,cr77_0). +Le(i1030,cr77_0). +Le(i1060,cr77_0). +Le(i1100,cr77_0). +Le(i1130,cr77_0). +Le(i1170,cr77_0). +Le(i1200,cr77_0). +Le(i1240,cr77_0). +Le(i1260,cr77_0). +Le(i1270,cr77_0). +Le(i1290,cr77_0). +Le(i1310,cr77_0). +Le(i1320,cr77_0). +Le(i1330,cr77_0). +Le(i1350,cr77_0). +Le(i1360,cr77_0). +Le(i1380,cr77_0). +Le(i1390,cr77_0). +Le(i1420,cr77_0). +Le(i1430,cr77_0). +Le(i1450,cr77_0). +Le(i1460,cr77_0). +Le(i1490,cr77_0). +Le(i1520,cr77_0). +Le(i1530,cr77_0). +Le(i1540,cr77_0). +Le(i1560,cr77_0). +Le(i1590,cr77_0). +Le(i1630,cr77_0). +Le(i1660,cr77_0). +Le(i1700,cr77_0). +Le(i1730,cr77_0). +Le(i1760,cr77_0). +Le(i1770,cr77_0). +Le(i1810,cr77_0). +Le(i1840,cr77_0). +Le(i1880,cr77_0). +Le(i1910,cr77_0). +Le(cr77_0,i2020). +Le(cr77_0,i2050). +Le(cr77_0,i2090). +Le(cr77_0,i2120). +Le(cr77_0,i2160). +Le(cr77_0,i2190). +Le(cr77_0,i2200). +Le(cr77_0,i2230). +Le(cr77_0,i2270). +Le(cr77_0,i2300). +Le(cr77_0,i2340). +Le(cr77_0,i2370). +Le(cr77_0,i2410). +Le(cr77_0,i2420). +Le(cr77_0,i2440). +Le(cr77_0,i2480). +Le(cr77_0,i2510). +Le(cr77_0,i2550). +Le(cr77_0,i2580). +Le(cr77_0,i2620). +Le(cr77_0,i2640). +Le(cr77_0,i2660). +Le(cr77_0,i2730). +Le(cr77_0,i2760). +Le(cr77_0,i2800). +Le(cr77_0,i2830). +Le(cr77_0,i2860). +Le(cr77_0,i2870). +Le(cr77_0,i2940). +Le(cr77_0,i2970). +Le(cr77_0,i3010). +Le(cr77_0,i3040). +Le(cr77_0,i3080). +Le(cr77_0,i3120). +Le(cr77_0,i3150). +Le(cr77_0,i3220). +Le(cr77_0,i3260). +Le(cr77_0,i3290). +Le(cr77_0,i3300). +Le(cr77_0,i3330). +Le(cr77_0,i3400). +Le(cr77_0,i3430). +Le(cr77_0,i3500). +Le(cr77_0,i3520). +Le(cr77_0,i3580). +Le(cr77_0,i3610). +Le(cr77_0,i3650). +Le(cr77_0,i3680). +Le(cr77_0,i3720). +Le(cr77_0,i3740). +Le(cr77_0,i3790). +Le(cr77_0,i3820). +Le(cr77_0,i3860). +Le(cr77_0,i3960). +Le(cr77_0,i4040). +Le(cr77_0,i4140). +Le(cr77_0,i4180). +Le(cr77_0,i4400). +Le(cr77_0,i4620). +Le(cr77_0,i4840). +Le(cr77_0,i5060). +Le(cr77_0,i5280). +Le(cr77_0,i5500). +Le(cr77_0,i5720). +Le(cr77_0,i5940). +Le(cr77_0,i6160). +Le(cr77_0,i6380). +Le(cr77_0,i6600). +Le(cr77_0,i6820). +Le(cr77_0,i7040). +Le(cr77_0,i7260). +Le(cr77_0,i7480). +Le(cr77_0,i7700). +Le(cr77_0,i7920). +Le(cr77_0,i8140). +Le(cr77_0,i8360). +Le(cr77_0,i8580). +Eq(i1980,i1980). +Le(i1980,cr78_0). +Le(cr78_0,i2020). +Le(i-30,cr78_0). +Le(i0,cr78_0). +Le(i13,cr78_0). +Le(i26,cr78_0). +Le(i39,cr78_0). +Le(i52,cr78_0). +Le(i60,cr78_0). +Le(i65,cr78_0). +Le(i70,cr78_0). +Le(i78,cr78_0). +Le(i90,cr78_0). +Le(i91,cr78_0). +Le(i104,cr78_0). +Le(i117,cr78_0). +Le(i130,cr78_0). +Le(i143,cr78_0). +Le(i156,cr78_0). +Le(i169,cr78_0). +Le(i182,cr78_0). +Le(i195,cr78_0). +Le(i208,cr78_0). +Le(i221,cr78_0). +Le(i234,cr78_0). +Le(i247,cr78_0). +Le(i260,cr78_0). +Le(i460,cr78_0). +Le(i530,cr78_0). +Le(i600,cr78_0). +Le(i660,cr78_0). +Le(i670,cr78_0). +Le(i710,cr78_0). +Le(i740,cr78_0). +Le(i810,cr78_0). +Le(i850,cr78_0). +Le(i880,cr78_0). +Le(i890,cr78_0). +Le(i920,cr78_0). +Le(i960,cr78_0). +Le(i990,cr78_0). +Le(i1030,cr78_0). +Le(i1060,cr78_0). +Le(i1100,cr78_0). +Le(i1130,cr78_0). +Le(i1170,cr78_0). +Le(i1200,cr78_0). +Le(i1240,cr78_0). +Le(i1260,cr78_0). +Le(i1270,cr78_0). +Le(i1290,cr78_0). +Le(i1310,cr78_0). +Le(i1320,cr78_0). +Le(i1330,cr78_0). +Le(i1350,cr78_0). +Le(i1360,cr78_0). +Le(i1380,cr78_0). +Le(i1390,cr78_0). +Le(i1420,cr78_0). +Le(i1430,cr78_0). +Le(i1450,cr78_0). +Le(i1460,cr78_0). +Le(i1490,cr78_0). +Le(i1520,cr78_0). +Le(i1530,cr78_0). +Le(i1540,cr78_0). +Le(i1560,cr78_0). +Le(i1590,cr78_0). +Le(i1630,cr78_0). +Le(i1660,cr78_0). +Le(i1700,cr78_0). +Le(i1730,cr78_0). +Le(i1760,cr78_0). +Le(i1770,cr78_0). +Le(i1810,cr78_0). +Le(i1840,cr78_0). +Le(i1880,cr78_0). +Le(i1910,cr78_0). +Le(i1950,cr78_0). +Le(cr78_0,i2050). +Le(cr78_0,i2090). +Le(cr78_0,i2120). +Le(cr78_0,i2160). +Le(cr78_0,i2190). +Le(cr78_0,i2200). +Le(cr78_0,i2230). +Le(cr78_0,i2270). +Le(cr78_0,i2300). +Le(cr78_0,i2340). +Le(cr78_0,i2370). +Le(cr78_0,i2410). +Le(cr78_0,i2420). +Le(cr78_0,i2440). +Le(cr78_0,i2480). +Le(cr78_0,i2510). +Le(cr78_0,i2550). +Le(cr78_0,i2580). +Le(cr78_0,i2620). +Le(cr78_0,i2640). +Le(cr78_0,i2660). +Le(cr78_0,i2730). +Le(cr78_0,i2760). +Le(cr78_0,i2800). +Le(cr78_0,i2830). +Le(cr78_0,i2860). +Le(cr78_0,i2870). +Le(cr78_0,i2940). +Le(cr78_0,i2970). +Le(cr78_0,i3010). +Le(cr78_0,i3040). +Le(cr78_0,i3080). +Le(cr78_0,i3120). +Le(cr78_0,i3150). +Le(cr78_0,i3220). +Le(cr78_0,i3260). +Le(cr78_0,i3290). +Le(cr78_0,i3300). +Le(cr78_0,i3330). +Le(cr78_0,i3400). +Le(cr78_0,i3430). +Le(cr78_0,i3500). +Le(cr78_0,i3520). +Le(cr78_0,i3580). +Le(cr78_0,i3610). +Le(cr78_0,i3650). +Le(cr78_0,i3680). +Le(cr78_0,i3720). +Le(cr78_0,i3740). +Le(cr78_0,i3790). +Le(cr78_0,i3820). +Le(cr78_0,i3860). +Le(cr78_0,i3960). +Le(cr78_0,i4040). +Le(cr78_0,i4140). +Le(cr78_0,i4180). +Le(cr78_0,i4400). +Le(cr78_0,i4620). +Le(cr78_0,i4840). +Le(cr78_0,i5060). +Le(cr78_0,i5280). +Le(cr78_0,i5500). +Le(cr78_0,i5720). +Le(cr78_0,i5940). +Le(cr78_0,i6160). +Le(cr78_0,i6380). +Le(cr78_0,i6600). +Le(cr78_0,i6820). +Le(cr78_0,i7040). +Le(cr78_0,i7260). +Le(cr78_0,i7480). +Le(cr78_0,i7700). +Le(cr78_0,i7920). +Le(cr78_0,i8140). +Le(cr78_0,i8360). +Le(cr78_0,i8580). +Eq(i2020,i2020). +Le(i2020,cr79_0). +Le(cr79_0,i2050). +Le(i-30,cr79_0). +Le(i0,cr79_0). +Le(i13,cr79_0). +Le(i26,cr79_0). +Le(i39,cr79_0). +Le(i52,cr79_0). +Le(i60,cr79_0). +Le(i65,cr79_0). +Le(i70,cr79_0). +Le(i78,cr79_0). +Le(i90,cr79_0). +Le(i91,cr79_0). +Le(i104,cr79_0). +Le(i117,cr79_0). +Le(i130,cr79_0). +Le(i143,cr79_0). +Le(i156,cr79_0). +Le(i169,cr79_0). +Le(i182,cr79_0). +Le(i195,cr79_0). +Le(i208,cr79_0). +Le(i221,cr79_0). +Le(i234,cr79_0). +Le(i247,cr79_0). +Le(i260,cr79_0). +Le(i460,cr79_0). +Le(i530,cr79_0). +Le(i600,cr79_0). +Le(i660,cr79_0). +Le(i670,cr79_0). +Le(i710,cr79_0). +Le(i740,cr79_0). +Le(i810,cr79_0). +Le(i850,cr79_0). +Le(i880,cr79_0). +Le(i890,cr79_0). +Le(i920,cr79_0). +Le(i960,cr79_0). +Le(i990,cr79_0). +Le(i1030,cr79_0). +Le(i1060,cr79_0). +Le(i1100,cr79_0). +Le(i1130,cr79_0). +Le(i1170,cr79_0). +Le(i1200,cr79_0). +Le(i1240,cr79_0). +Le(i1260,cr79_0). +Le(i1270,cr79_0). +Le(i1290,cr79_0). +Le(i1310,cr79_0). +Le(i1320,cr79_0). +Le(i1330,cr79_0). +Le(i1350,cr79_0). +Le(i1360,cr79_0). +Le(i1380,cr79_0). +Le(i1390,cr79_0). +Le(i1420,cr79_0). +Le(i1430,cr79_0). +Le(i1450,cr79_0). +Le(i1460,cr79_0). +Le(i1490,cr79_0). +Le(i1520,cr79_0). +Le(i1530,cr79_0). +Le(i1540,cr79_0). +Le(i1560,cr79_0). +Le(i1590,cr79_0). +Le(i1630,cr79_0). +Le(i1660,cr79_0). +Le(i1700,cr79_0). +Le(i1730,cr79_0). +Le(i1760,cr79_0). +Le(i1770,cr79_0). +Le(i1810,cr79_0). +Le(i1840,cr79_0). +Le(i1880,cr79_0). +Le(i1910,cr79_0). +Le(i1950,cr79_0). +Le(i1980,cr79_0). +Le(cr79_0,i2090). +Le(cr79_0,i2120). +Le(cr79_0,i2160). +Le(cr79_0,i2190). +Le(cr79_0,i2200). +Le(cr79_0,i2230). +Le(cr79_0,i2270). +Le(cr79_0,i2300). +Le(cr79_0,i2340). +Le(cr79_0,i2370). +Le(cr79_0,i2410). +Le(cr79_0,i2420). +Le(cr79_0,i2440). +Le(cr79_0,i2480). +Le(cr79_0,i2510). +Le(cr79_0,i2550). +Le(cr79_0,i2580). +Le(cr79_0,i2620). +Le(cr79_0,i2640). +Le(cr79_0,i2660). +Le(cr79_0,i2730). +Le(cr79_0,i2760). +Le(cr79_0,i2800). +Le(cr79_0,i2830). +Le(cr79_0,i2860). +Le(cr79_0,i2870). +Le(cr79_0,i2940). +Le(cr79_0,i2970). +Le(cr79_0,i3010). +Le(cr79_0,i3040). +Le(cr79_0,i3080). +Le(cr79_0,i3120). +Le(cr79_0,i3150). +Le(cr79_0,i3220). +Le(cr79_0,i3260). +Le(cr79_0,i3290). +Le(cr79_0,i3300). +Le(cr79_0,i3330). +Le(cr79_0,i3400). +Le(cr79_0,i3430). +Le(cr79_0,i3500). +Le(cr79_0,i3520). +Le(cr79_0,i3580). +Le(cr79_0,i3610). +Le(cr79_0,i3650). +Le(cr79_0,i3680). +Le(cr79_0,i3720). +Le(cr79_0,i3740). +Le(cr79_0,i3790). +Le(cr79_0,i3820). +Le(cr79_0,i3860). +Le(cr79_0,i3960). +Le(cr79_0,i4040). +Le(cr79_0,i4140). +Le(cr79_0,i4180). +Le(cr79_0,i4400). +Le(cr79_0,i4620). +Le(cr79_0,i4840). +Le(cr79_0,i5060). +Le(cr79_0,i5280). +Le(cr79_0,i5500). +Le(cr79_0,i5720). +Le(cr79_0,i5940). +Le(cr79_0,i6160). +Le(cr79_0,i6380). +Le(cr79_0,i6600). +Le(cr79_0,i6820). +Le(cr79_0,i7040). +Le(cr79_0,i7260). +Le(cr79_0,i7480). +Le(cr79_0,i7700). +Le(cr79_0,i7920). +Le(cr79_0,i8140). +Le(cr79_0,i8360). +Le(cr79_0,i8580). +Eq(i2050,i2050). +Le(i2050,cr80_0). +Le(cr80_0,i2090). +Le(i-30,cr80_0). +Le(i0,cr80_0). +Le(i13,cr80_0). +Le(i26,cr80_0). +Le(i39,cr80_0). +Le(i52,cr80_0). +Le(i60,cr80_0). +Le(i65,cr80_0). +Le(i70,cr80_0). +Le(i78,cr80_0). +Le(i90,cr80_0). +Le(i91,cr80_0). +Le(i104,cr80_0). +Le(i117,cr80_0). +Le(i130,cr80_0). +Le(i143,cr80_0). +Le(i156,cr80_0). +Le(i169,cr80_0). +Le(i182,cr80_0). +Le(i195,cr80_0). +Le(i208,cr80_0). +Le(i221,cr80_0). +Le(i234,cr80_0). +Le(i247,cr80_0). +Le(i260,cr80_0). +Le(i460,cr80_0). +Le(i530,cr80_0). +Le(i600,cr80_0). +Le(i660,cr80_0). +Le(i670,cr80_0). +Le(i710,cr80_0). +Le(i740,cr80_0). +Le(i810,cr80_0). +Le(i850,cr80_0). +Le(i880,cr80_0). +Le(i890,cr80_0). +Le(i920,cr80_0). +Le(i960,cr80_0). +Le(i990,cr80_0). +Le(i1030,cr80_0). +Le(i1060,cr80_0). +Le(i1100,cr80_0). +Le(i1130,cr80_0). +Le(i1170,cr80_0). +Le(i1200,cr80_0). +Le(i1240,cr80_0). +Le(i1260,cr80_0). +Le(i1270,cr80_0). +Le(i1290,cr80_0). +Le(i1310,cr80_0). +Le(i1320,cr80_0). +Le(i1330,cr80_0). +Le(i1350,cr80_0). +Le(i1360,cr80_0). +Le(i1380,cr80_0). +Le(i1390,cr80_0). +Le(i1420,cr80_0). +Le(i1430,cr80_0). +Le(i1450,cr80_0). +Le(i1460,cr80_0). +Le(i1490,cr80_0). +Le(i1520,cr80_0). +Le(i1530,cr80_0). +Le(i1540,cr80_0). +Le(i1560,cr80_0). +Le(i1590,cr80_0). +Le(i1630,cr80_0). +Le(i1660,cr80_0). +Le(i1700,cr80_0). +Le(i1730,cr80_0). +Le(i1760,cr80_0). +Le(i1770,cr80_0). +Le(i1810,cr80_0). +Le(i1840,cr80_0). +Le(i1880,cr80_0). +Le(i1910,cr80_0). +Le(i1950,cr80_0). +Le(i1980,cr80_0). +Le(i2020,cr80_0). +Le(cr80_0,i2120). +Le(cr80_0,i2160). +Le(cr80_0,i2190). +Le(cr80_0,i2200). +Le(cr80_0,i2230). +Le(cr80_0,i2270). +Le(cr80_0,i2300). +Le(cr80_0,i2340). +Le(cr80_0,i2370). +Le(cr80_0,i2410). +Le(cr80_0,i2420). +Le(cr80_0,i2440). +Le(cr80_0,i2480). +Le(cr80_0,i2510). +Le(cr80_0,i2550). +Le(cr80_0,i2580). +Le(cr80_0,i2620). +Le(cr80_0,i2640). +Le(cr80_0,i2660). +Le(cr80_0,i2730). +Le(cr80_0,i2760). +Le(cr80_0,i2800). +Le(cr80_0,i2830). +Le(cr80_0,i2860). +Le(cr80_0,i2870). +Le(cr80_0,i2940). +Le(cr80_0,i2970). +Le(cr80_0,i3010). +Le(cr80_0,i3040). +Le(cr80_0,i3080). +Le(cr80_0,i3120). +Le(cr80_0,i3150). +Le(cr80_0,i3220). +Le(cr80_0,i3260). +Le(cr80_0,i3290). +Le(cr80_0,i3300). +Le(cr80_0,i3330). +Le(cr80_0,i3400). +Le(cr80_0,i3430). +Le(cr80_0,i3500). +Le(cr80_0,i3520). +Le(cr80_0,i3580). +Le(cr80_0,i3610). +Le(cr80_0,i3650). +Le(cr80_0,i3680). +Le(cr80_0,i3720). +Le(cr80_0,i3740). +Le(cr80_0,i3790). +Le(cr80_0,i3820). +Le(cr80_0,i3860). +Le(cr80_0,i3960). +Le(cr80_0,i4040). +Le(cr80_0,i4140). +Le(cr80_0,i4180). +Le(cr80_0,i4400). +Le(cr80_0,i4620). +Le(cr80_0,i4840). +Le(cr80_0,i5060). +Le(cr80_0,i5280). +Le(cr80_0,i5500). +Le(cr80_0,i5720). +Le(cr80_0,i5940). +Le(cr80_0,i6160). +Le(cr80_0,i6380). +Le(cr80_0,i6600). +Le(cr80_0,i6820). +Le(cr80_0,i7040). +Le(cr80_0,i7260). +Le(cr80_0,i7480). +Le(cr80_0,i7700). +Le(cr80_0,i7920). +Le(cr80_0,i8140). +Le(cr80_0,i8360). +Le(cr80_0,i8580). +Eq(i2090,i2090). +Le(i2090,cr81_0). +Le(cr81_0,i2120). +Le(i-30,cr81_0). +Le(i0,cr81_0). +Le(i13,cr81_0). +Le(i26,cr81_0). +Le(i39,cr81_0). +Le(i52,cr81_0). +Le(i60,cr81_0). +Le(i65,cr81_0). +Le(i70,cr81_0). +Le(i78,cr81_0). +Le(i90,cr81_0). +Le(i91,cr81_0). +Le(i104,cr81_0). +Le(i117,cr81_0). +Le(i130,cr81_0). +Le(i143,cr81_0). +Le(i156,cr81_0). +Le(i169,cr81_0). +Le(i182,cr81_0). +Le(i195,cr81_0). +Le(i208,cr81_0). +Le(i221,cr81_0). +Le(i234,cr81_0). +Le(i247,cr81_0). +Le(i260,cr81_0). +Le(i460,cr81_0). +Le(i530,cr81_0). +Le(i600,cr81_0). +Le(i660,cr81_0). +Le(i670,cr81_0). +Le(i710,cr81_0). +Le(i740,cr81_0). +Le(i810,cr81_0). +Le(i850,cr81_0). +Le(i880,cr81_0). +Le(i890,cr81_0). +Le(i920,cr81_0). +Le(i960,cr81_0). +Le(i990,cr81_0). +Le(i1030,cr81_0). +Le(i1060,cr81_0). +Le(i1100,cr81_0). +Le(i1130,cr81_0). +Le(i1170,cr81_0). +Le(i1200,cr81_0). +Le(i1240,cr81_0). +Le(i1260,cr81_0). +Le(i1270,cr81_0). +Le(i1290,cr81_0). +Le(i1310,cr81_0). +Le(i1320,cr81_0). +Le(i1330,cr81_0). +Le(i1350,cr81_0). +Le(i1360,cr81_0). +Le(i1380,cr81_0). +Le(i1390,cr81_0). +Le(i1420,cr81_0). +Le(i1430,cr81_0). +Le(i1450,cr81_0). +Le(i1460,cr81_0). +Le(i1490,cr81_0). +Le(i1520,cr81_0). +Le(i1530,cr81_0). +Le(i1540,cr81_0). +Le(i1560,cr81_0). +Le(i1590,cr81_0). +Le(i1630,cr81_0). +Le(i1660,cr81_0). +Le(i1700,cr81_0). +Le(i1730,cr81_0). +Le(i1760,cr81_0). +Le(i1770,cr81_0). +Le(i1810,cr81_0). +Le(i1840,cr81_0). +Le(i1880,cr81_0). +Le(i1910,cr81_0). +Le(i1950,cr81_0). +Le(i1980,cr81_0). +Le(i2020,cr81_0). +Le(i2050,cr81_0). +Le(cr81_0,i2160). +Le(cr81_0,i2190). +Le(cr81_0,i2200). +Le(cr81_0,i2230). +Le(cr81_0,i2270). +Le(cr81_0,i2300). +Le(cr81_0,i2340). +Le(cr81_0,i2370). +Le(cr81_0,i2410). +Le(cr81_0,i2420). +Le(cr81_0,i2440). +Le(cr81_0,i2480). +Le(cr81_0,i2510). +Le(cr81_0,i2550). +Le(cr81_0,i2580). +Le(cr81_0,i2620). +Le(cr81_0,i2640). +Le(cr81_0,i2660). +Le(cr81_0,i2730). +Le(cr81_0,i2760). +Le(cr81_0,i2800). +Le(cr81_0,i2830). +Le(cr81_0,i2860). +Le(cr81_0,i2870). +Le(cr81_0,i2940). +Le(cr81_0,i2970). +Le(cr81_0,i3010). +Le(cr81_0,i3040). +Le(cr81_0,i3080). +Le(cr81_0,i3120). +Le(cr81_0,i3150). +Le(cr81_0,i3220). +Le(cr81_0,i3260). +Le(cr81_0,i3290). +Le(cr81_0,i3300). +Le(cr81_0,i3330). +Le(cr81_0,i3400). +Le(cr81_0,i3430). +Le(cr81_0,i3500). +Le(cr81_0,i3520). +Le(cr81_0,i3580). +Le(cr81_0,i3610). +Le(cr81_0,i3650). +Le(cr81_0,i3680). +Le(cr81_0,i3720). +Le(cr81_0,i3740). +Le(cr81_0,i3790). +Le(cr81_0,i3820). +Le(cr81_0,i3860). +Le(cr81_0,i3960). +Le(cr81_0,i4040). +Le(cr81_0,i4140). +Le(cr81_0,i4180). +Le(cr81_0,i4400). +Le(cr81_0,i4620). +Le(cr81_0,i4840). +Le(cr81_0,i5060). +Le(cr81_0,i5280). +Le(cr81_0,i5500). +Le(cr81_0,i5720). +Le(cr81_0,i5940). +Le(cr81_0,i6160). +Le(cr81_0,i6380). +Le(cr81_0,i6600). +Le(cr81_0,i6820). +Le(cr81_0,i7040). +Le(cr81_0,i7260). +Le(cr81_0,i7480). +Le(cr81_0,i7700). +Le(cr81_0,i7920). +Le(cr81_0,i8140). +Le(cr81_0,i8360). +Le(cr81_0,i8580). +Eq(i2120,i2120). +Le(i2120,cr82_0). +Le(cr82_0,i2160). +Le(i-30,cr82_0). +Le(i0,cr82_0). +Le(i13,cr82_0). +Le(i26,cr82_0). +Le(i39,cr82_0). +Le(i52,cr82_0). +Le(i60,cr82_0). +Le(i65,cr82_0). +Le(i70,cr82_0). +Le(i78,cr82_0). +Le(i90,cr82_0). +Le(i91,cr82_0). +Le(i104,cr82_0). +Le(i117,cr82_0). +Le(i130,cr82_0). +Le(i143,cr82_0). +Le(i156,cr82_0). +Le(i169,cr82_0). +Le(i182,cr82_0). +Le(i195,cr82_0). +Le(i208,cr82_0). +Le(i221,cr82_0). +Le(i234,cr82_0). +Le(i247,cr82_0). +Le(i260,cr82_0). +Le(i460,cr82_0). +Le(i530,cr82_0). +Le(i600,cr82_0). +Le(i660,cr82_0). +Le(i670,cr82_0). +Le(i710,cr82_0). +Le(i740,cr82_0). +Le(i810,cr82_0). +Le(i850,cr82_0). +Le(i880,cr82_0). +Le(i890,cr82_0). +Le(i920,cr82_0). +Le(i960,cr82_0). +Le(i990,cr82_0). +Le(i1030,cr82_0). +Le(i1060,cr82_0). +Le(i1100,cr82_0). +Le(i1130,cr82_0). +Le(i1170,cr82_0). +Le(i1200,cr82_0). +Le(i1240,cr82_0). +Le(i1260,cr82_0). +Le(i1270,cr82_0). +Le(i1290,cr82_0). +Le(i1310,cr82_0). +Le(i1320,cr82_0). +Le(i1330,cr82_0). +Le(i1350,cr82_0). +Le(i1360,cr82_0). +Le(i1380,cr82_0). +Le(i1390,cr82_0). +Le(i1420,cr82_0). +Le(i1430,cr82_0). +Le(i1450,cr82_0). +Le(i1460,cr82_0). +Le(i1490,cr82_0). +Le(i1520,cr82_0). +Le(i1530,cr82_0). +Le(i1540,cr82_0). +Le(i1560,cr82_0). +Le(i1590,cr82_0). +Le(i1630,cr82_0). +Le(i1660,cr82_0). +Le(i1700,cr82_0). +Le(i1730,cr82_0). +Le(i1760,cr82_0). +Le(i1770,cr82_0). +Le(i1810,cr82_0). +Le(i1840,cr82_0). +Le(i1880,cr82_0). +Le(i1910,cr82_0). +Le(i1950,cr82_0). +Le(i1980,cr82_0). +Le(i2020,cr82_0). +Le(i2050,cr82_0). +Le(i2090,cr82_0). +Le(cr82_0,i2190). +Le(cr82_0,i2200). +Le(cr82_0,i2230). +Le(cr82_0,i2270). +Le(cr82_0,i2300). +Le(cr82_0,i2340). +Le(cr82_0,i2370). +Le(cr82_0,i2410). +Le(cr82_0,i2420). +Le(cr82_0,i2440). +Le(cr82_0,i2480). +Le(cr82_0,i2510). +Le(cr82_0,i2550). +Le(cr82_0,i2580). +Le(cr82_0,i2620). +Le(cr82_0,i2640). +Le(cr82_0,i2660). +Le(cr82_0,i2730). +Le(cr82_0,i2760). +Le(cr82_0,i2800). +Le(cr82_0,i2830). +Le(cr82_0,i2860). +Le(cr82_0,i2870). +Le(cr82_0,i2940). +Le(cr82_0,i2970). +Le(cr82_0,i3010). +Le(cr82_0,i3040). +Le(cr82_0,i3080). +Le(cr82_0,i3120). +Le(cr82_0,i3150). +Le(cr82_0,i3220). +Le(cr82_0,i3260). +Le(cr82_0,i3290). +Le(cr82_0,i3300). +Le(cr82_0,i3330). +Le(cr82_0,i3400). +Le(cr82_0,i3430). +Le(cr82_0,i3500). +Le(cr82_0,i3520). +Le(cr82_0,i3580). +Le(cr82_0,i3610). +Le(cr82_0,i3650). +Le(cr82_0,i3680). +Le(cr82_0,i3720). +Le(cr82_0,i3740). +Le(cr82_0,i3790). +Le(cr82_0,i3820). +Le(cr82_0,i3860). +Le(cr82_0,i3960). +Le(cr82_0,i4040). +Le(cr82_0,i4140). +Le(cr82_0,i4180). +Le(cr82_0,i4400). +Le(cr82_0,i4620). +Le(cr82_0,i4840). +Le(cr82_0,i5060). +Le(cr82_0,i5280). +Le(cr82_0,i5500). +Le(cr82_0,i5720). +Le(cr82_0,i5940). +Le(cr82_0,i6160). +Le(cr82_0,i6380). +Le(cr82_0,i6600). +Le(cr82_0,i6820). +Le(cr82_0,i7040). +Le(cr82_0,i7260). +Le(cr82_0,i7480). +Le(cr82_0,i7700). +Le(cr82_0,i7920). +Le(cr82_0,i8140). +Le(cr82_0,i8360). +Le(cr82_0,i8580). +Eq(i2160,i2160). +Le(i2160,cr83_0). +Le(cr83_0,i2190). +Le(i-30,cr83_0). +Le(i0,cr83_0). +Le(i13,cr83_0). +Le(i26,cr83_0). +Le(i39,cr83_0). +Le(i52,cr83_0). +Le(i60,cr83_0). +Le(i65,cr83_0). +Le(i70,cr83_0). +Le(i78,cr83_0). +Le(i90,cr83_0). +Le(i91,cr83_0). +Le(i104,cr83_0). +Le(i117,cr83_0). +Le(i130,cr83_0). +Le(i143,cr83_0). +Le(i156,cr83_0). +Le(i169,cr83_0). +Le(i182,cr83_0). +Le(i195,cr83_0). +Le(i208,cr83_0). +Le(i221,cr83_0). +Le(i234,cr83_0). +Le(i247,cr83_0). +Le(i260,cr83_0). +Le(i460,cr83_0). +Le(i530,cr83_0). +Le(i600,cr83_0). +Le(i660,cr83_0). +Le(i670,cr83_0). +Le(i710,cr83_0). +Le(i740,cr83_0). +Le(i810,cr83_0). +Le(i850,cr83_0). +Le(i880,cr83_0). +Le(i890,cr83_0). +Le(i920,cr83_0). +Le(i960,cr83_0). +Le(i990,cr83_0). +Le(i1030,cr83_0). +Le(i1060,cr83_0). +Le(i1100,cr83_0). +Le(i1130,cr83_0). +Le(i1170,cr83_0). +Le(i1200,cr83_0). +Le(i1240,cr83_0). +Le(i1260,cr83_0). +Le(i1270,cr83_0). +Le(i1290,cr83_0). +Le(i1310,cr83_0). +Le(i1320,cr83_0). +Le(i1330,cr83_0). +Le(i1350,cr83_0). +Le(i1360,cr83_0). +Le(i1380,cr83_0). +Le(i1390,cr83_0). +Le(i1420,cr83_0). +Le(i1430,cr83_0). +Le(i1450,cr83_0). +Le(i1460,cr83_0). +Le(i1490,cr83_0). +Le(i1520,cr83_0). +Le(i1530,cr83_0). +Le(i1540,cr83_0). +Le(i1560,cr83_0). +Le(i1590,cr83_0). +Le(i1630,cr83_0). +Le(i1660,cr83_0). +Le(i1700,cr83_0). +Le(i1730,cr83_0). +Le(i1760,cr83_0). +Le(i1770,cr83_0). +Le(i1810,cr83_0). +Le(i1840,cr83_0). +Le(i1880,cr83_0). +Le(i1910,cr83_0). +Le(i1950,cr83_0). +Le(i1980,cr83_0). +Le(i2020,cr83_0). +Le(i2050,cr83_0). +Le(i2090,cr83_0). +Le(i2120,cr83_0). +Le(cr83_0,i2200). +Le(cr83_0,i2230). +Le(cr83_0,i2270). +Le(cr83_0,i2300). +Le(cr83_0,i2340). +Le(cr83_0,i2370). +Le(cr83_0,i2410). +Le(cr83_0,i2420). +Le(cr83_0,i2440). +Le(cr83_0,i2480). +Le(cr83_0,i2510). +Le(cr83_0,i2550). +Le(cr83_0,i2580). +Le(cr83_0,i2620). +Le(cr83_0,i2640). +Le(cr83_0,i2660). +Le(cr83_0,i2730). +Le(cr83_0,i2760). +Le(cr83_0,i2800). +Le(cr83_0,i2830). +Le(cr83_0,i2860). +Le(cr83_0,i2870). +Le(cr83_0,i2940). +Le(cr83_0,i2970). +Le(cr83_0,i3010). +Le(cr83_0,i3040). +Le(cr83_0,i3080). +Le(cr83_0,i3120). +Le(cr83_0,i3150). +Le(cr83_0,i3220). +Le(cr83_0,i3260). +Le(cr83_0,i3290). +Le(cr83_0,i3300). +Le(cr83_0,i3330). +Le(cr83_0,i3400). +Le(cr83_0,i3430). +Le(cr83_0,i3500). +Le(cr83_0,i3520). +Le(cr83_0,i3580). +Le(cr83_0,i3610). +Le(cr83_0,i3650). +Le(cr83_0,i3680). +Le(cr83_0,i3720). +Le(cr83_0,i3740). +Le(cr83_0,i3790). +Le(cr83_0,i3820). +Le(cr83_0,i3860). +Le(cr83_0,i3960). +Le(cr83_0,i4040). +Le(cr83_0,i4140). +Le(cr83_0,i4180). +Le(cr83_0,i4400). +Le(cr83_0,i4620). +Le(cr83_0,i4840). +Le(cr83_0,i5060). +Le(cr83_0,i5280). +Le(cr83_0,i5500). +Le(cr83_0,i5720). +Le(cr83_0,i5940). +Le(cr83_0,i6160). +Le(cr83_0,i6380). +Le(cr83_0,i6600). +Le(cr83_0,i6820). +Le(cr83_0,i7040). +Le(cr83_0,i7260). +Le(cr83_0,i7480). +Le(cr83_0,i7700). +Le(cr83_0,i7920). +Le(cr83_0,i8140). +Le(cr83_0,i8360). +Le(cr83_0,i8580). +Eq(i2190,i2190). +Le(i2190,cr84_0). +Le(cr84_0,i2200). +Le(i-30,cr84_0). +Le(i0,cr84_0). +Le(i13,cr84_0). +Le(i26,cr84_0). +Le(i39,cr84_0). +Le(i52,cr84_0). +Le(i60,cr84_0). +Le(i65,cr84_0). +Le(i70,cr84_0). +Le(i78,cr84_0). +Le(i90,cr84_0). +Le(i91,cr84_0). +Le(i104,cr84_0). +Le(i117,cr84_0). +Le(i130,cr84_0). +Le(i143,cr84_0). +Le(i156,cr84_0). +Le(i169,cr84_0). +Le(i182,cr84_0). +Le(i195,cr84_0). +Le(i208,cr84_0). +Le(i221,cr84_0). +Le(i234,cr84_0). +Le(i247,cr84_0). +Le(i260,cr84_0). +Le(i460,cr84_0). +Le(i530,cr84_0). +Le(i600,cr84_0). +Le(i660,cr84_0). +Le(i670,cr84_0). +Le(i710,cr84_0). +Le(i740,cr84_0). +Le(i810,cr84_0). +Le(i850,cr84_0). +Le(i880,cr84_0). +Le(i890,cr84_0). +Le(i920,cr84_0). +Le(i960,cr84_0). +Le(i990,cr84_0). +Le(i1030,cr84_0). +Le(i1060,cr84_0). +Le(i1100,cr84_0). +Le(i1130,cr84_0). +Le(i1170,cr84_0). +Le(i1200,cr84_0). +Le(i1240,cr84_0). +Le(i1260,cr84_0). +Le(i1270,cr84_0). +Le(i1290,cr84_0). +Le(i1310,cr84_0). +Le(i1320,cr84_0). +Le(i1330,cr84_0). +Le(i1350,cr84_0). +Le(i1360,cr84_0). +Le(i1380,cr84_0). +Le(i1390,cr84_0). +Le(i1420,cr84_0). +Le(i1430,cr84_0). +Le(i1450,cr84_0). +Le(i1460,cr84_0). +Le(i1490,cr84_0). +Le(i1520,cr84_0). +Le(i1530,cr84_0). +Le(i1540,cr84_0). +Le(i1560,cr84_0). +Le(i1590,cr84_0). +Le(i1630,cr84_0). +Le(i1660,cr84_0). +Le(i1700,cr84_0). +Le(i1730,cr84_0). +Le(i1760,cr84_0). +Le(i1770,cr84_0). +Le(i1810,cr84_0). +Le(i1840,cr84_0). +Le(i1880,cr84_0). +Le(i1910,cr84_0). +Le(i1950,cr84_0). +Le(i1980,cr84_0). +Le(i2020,cr84_0). +Le(i2050,cr84_0). +Le(i2090,cr84_0). +Le(i2120,cr84_0). +Le(i2160,cr84_0). +Le(cr84_0,i2230). +Le(cr84_0,i2270). +Le(cr84_0,i2300). +Le(cr84_0,i2340). +Le(cr84_0,i2370). +Le(cr84_0,i2410). +Le(cr84_0,i2420). +Le(cr84_0,i2440). +Le(cr84_0,i2480). +Le(cr84_0,i2510). +Le(cr84_0,i2550). +Le(cr84_0,i2580). +Le(cr84_0,i2620). +Le(cr84_0,i2640). +Le(cr84_0,i2660). +Le(cr84_0,i2730). +Le(cr84_0,i2760). +Le(cr84_0,i2800). +Le(cr84_0,i2830). +Le(cr84_0,i2860). +Le(cr84_0,i2870). +Le(cr84_0,i2940). +Le(cr84_0,i2970). +Le(cr84_0,i3010). +Le(cr84_0,i3040). +Le(cr84_0,i3080). +Le(cr84_0,i3120). +Le(cr84_0,i3150). +Le(cr84_0,i3220). +Le(cr84_0,i3260). +Le(cr84_0,i3290). +Le(cr84_0,i3300). +Le(cr84_0,i3330). +Le(cr84_0,i3400). +Le(cr84_0,i3430). +Le(cr84_0,i3500). +Le(cr84_0,i3520). +Le(cr84_0,i3580). +Le(cr84_0,i3610). +Le(cr84_0,i3650). +Le(cr84_0,i3680). +Le(cr84_0,i3720). +Le(cr84_0,i3740). +Le(cr84_0,i3790). +Le(cr84_0,i3820). +Le(cr84_0,i3860). +Le(cr84_0,i3960). +Le(cr84_0,i4040). +Le(cr84_0,i4140). +Le(cr84_0,i4180). +Le(cr84_0,i4400). +Le(cr84_0,i4620). +Le(cr84_0,i4840). +Le(cr84_0,i5060). +Le(cr84_0,i5280). +Le(cr84_0,i5500). +Le(cr84_0,i5720). +Le(cr84_0,i5940). +Le(cr84_0,i6160). +Le(cr84_0,i6380). +Le(cr84_0,i6600). +Le(cr84_0,i6820). +Le(cr84_0,i7040). +Le(cr84_0,i7260). +Le(cr84_0,i7480). +Le(cr84_0,i7700). +Le(cr84_0,i7920). +Le(cr84_0,i8140). +Le(cr84_0,i8360). +Le(cr84_0,i8580). +Eq(i2200,i2200). +Le(i2200,cr85_0). +Le(cr85_0,i2230). +Le(i-30,cr85_0). +Le(i0,cr85_0). +Le(i13,cr85_0). +Le(i26,cr85_0). +Le(i39,cr85_0). +Le(i52,cr85_0). +Le(i60,cr85_0). +Le(i65,cr85_0). +Le(i70,cr85_0). +Le(i78,cr85_0). +Le(i90,cr85_0). +Le(i91,cr85_0). +Le(i104,cr85_0). +Le(i117,cr85_0). +Le(i130,cr85_0). +Le(i143,cr85_0). +Le(i156,cr85_0). +Le(i169,cr85_0). +Le(i182,cr85_0). +Le(i195,cr85_0). +Le(i208,cr85_0). +Le(i221,cr85_0). +Le(i234,cr85_0). +Le(i247,cr85_0). +Le(i260,cr85_0). +Le(i460,cr85_0). +Le(i530,cr85_0). +Le(i600,cr85_0). +Le(i660,cr85_0). +Le(i670,cr85_0). +Le(i710,cr85_0). +Le(i740,cr85_0). +Le(i810,cr85_0). +Le(i850,cr85_0). +Le(i880,cr85_0). +Le(i890,cr85_0). +Le(i920,cr85_0). +Le(i960,cr85_0). +Le(i990,cr85_0). +Le(i1030,cr85_0). +Le(i1060,cr85_0). +Le(i1100,cr85_0). +Le(i1130,cr85_0). +Le(i1170,cr85_0). +Le(i1200,cr85_0). +Le(i1240,cr85_0). +Le(i1260,cr85_0). +Le(i1270,cr85_0). +Le(i1290,cr85_0). +Le(i1310,cr85_0). +Le(i1320,cr85_0). +Le(i1330,cr85_0). +Le(i1350,cr85_0). +Le(i1360,cr85_0). +Le(i1380,cr85_0). +Le(i1390,cr85_0). +Le(i1420,cr85_0). +Le(i1430,cr85_0). +Le(i1450,cr85_0). +Le(i1460,cr85_0). +Le(i1490,cr85_0). +Le(i1520,cr85_0). +Le(i1530,cr85_0). +Le(i1540,cr85_0). +Le(i1560,cr85_0). +Le(i1590,cr85_0). +Le(i1630,cr85_0). +Le(i1660,cr85_0). +Le(i1700,cr85_0). +Le(i1730,cr85_0). +Le(i1760,cr85_0). +Le(i1770,cr85_0). +Le(i1810,cr85_0). +Le(i1840,cr85_0). +Le(i1880,cr85_0). +Le(i1910,cr85_0). +Le(i1950,cr85_0). +Le(i1980,cr85_0). +Le(i2020,cr85_0). +Le(i2050,cr85_0). +Le(i2090,cr85_0). +Le(i2120,cr85_0). +Le(i2160,cr85_0). +Le(i2190,cr85_0). +Le(cr85_0,i2270). +Le(cr85_0,i2300). +Le(cr85_0,i2340). +Le(cr85_0,i2370). +Le(cr85_0,i2410). +Le(cr85_0,i2420). +Le(cr85_0,i2440). +Le(cr85_0,i2480). +Le(cr85_0,i2510). +Le(cr85_0,i2550). +Le(cr85_0,i2580). +Le(cr85_0,i2620). +Le(cr85_0,i2640). +Le(cr85_0,i2660). +Le(cr85_0,i2730). +Le(cr85_0,i2760). +Le(cr85_0,i2800). +Le(cr85_0,i2830). +Le(cr85_0,i2860). +Le(cr85_0,i2870). +Le(cr85_0,i2940). +Le(cr85_0,i2970). +Le(cr85_0,i3010). +Le(cr85_0,i3040). +Le(cr85_0,i3080). +Le(cr85_0,i3120). +Le(cr85_0,i3150). +Le(cr85_0,i3220). +Le(cr85_0,i3260). +Le(cr85_0,i3290). +Le(cr85_0,i3300). +Le(cr85_0,i3330). +Le(cr85_0,i3400). +Le(cr85_0,i3430). +Le(cr85_0,i3500). +Le(cr85_0,i3520). +Le(cr85_0,i3580). +Le(cr85_0,i3610). +Le(cr85_0,i3650). +Le(cr85_0,i3680). +Le(cr85_0,i3720). +Le(cr85_0,i3740). +Le(cr85_0,i3790). +Le(cr85_0,i3820). +Le(cr85_0,i3860). +Le(cr85_0,i3960). +Le(cr85_0,i4040). +Le(cr85_0,i4140). +Le(cr85_0,i4180). +Le(cr85_0,i4400). +Le(cr85_0,i4620). +Le(cr85_0,i4840). +Le(cr85_0,i5060). +Le(cr85_0,i5280). +Le(cr85_0,i5500). +Le(cr85_0,i5720). +Le(cr85_0,i5940). +Le(cr85_0,i6160). +Le(cr85_0,i6380). +Le(cr85_0,i6600). +Le(cr85_0,i6820). +Le(cr85_0,i7040). +Le(cr85_0,i7260). +Le(cr85_0,i7480). +Le(cr85_0,i7700). +Le(cr85_0,i7920). +Le(cr85_0,i8140). +Le(cr85_0,i8360). +Le(cr85_0,i8580). +Eq(i2230,i2230). +Le(i2230,cr86_0). +Le(cr86_0,i2270). +Le(i-30,cr86_0). +Le(i0,cr86_0). +Le(i13,cr86_0). +Le(i26,cr86_0). +Le(i39,cr86_0). +Le(i52,cr86_0). +Le(i60,cr86_0). +Le(i65,cr86_0). +Le(i70,cr86_0). +Le(i78,cr86_0). +Le(i90,cr86_0). +Le(i91,cr86_0). +Le(i104,cr86_0). +Le(i117,cr86_0). +Le(i130,cr86_0). +Le(i143,cr86_0). +Le(i156,cr86_0). +Le(i169,cr86_0). +Le(i182,cr86_0). +Le(i195,cr86_0). +Le(i208,cr86_0). +Le(i221,cr86_0). +Le(i234,cr86_0). +Le(i247,cr86_0). +Le(i260,cr86_0). +Le(i460,cr86_0). +Le(i530,cr86_0). +Le(i600,cr86_0). +Le(i660,cr86_0). +Le(i670,cr86_0). +Le(i710,cr86_0). +Le(i740,cr86_0). +Le(i810,cr86_0). +Le(i850,cr86_0). +Le(i880,cr86_0). +Le(i890,cr86_0). +Le(i920,cr86_0). +Le(i960,cr86_0). +Le(i990,cr86_0). +Le(i1030,cr86_0). +Le(i1060,cr86_0). +Le(i1100,cr86_0). +Le(i1130,cr86_0). +Le(i1170,cr86_0). +Le(i1200,cr86_0). +Le(i1240,cr86_0). +Le(i1260,cr86_0). +Le(i1270,cr86_0). +Le(i1290,cr86_0). +Le(i1310,cr86_0). +Le(i1320,cr86_0). +Le(i1330,cr86_0). +Le(i1350,cr86_0). +Le(i1360,cr86_0). +Le(i1380,cr86_0). +Le(i1390,cr86_0). +Le(i1420,cr86_0). +Le(i1430,cr86_0). +Le(i1450,cr86_0). +Le(i1460,cr86_0). +Le(i1490,cr86_0). +Le(i1520,cr86_0). +Le(i1530,cr86_0). +Le(i1540,cr86_0). +Le(i1560,cr86_0). +Le(i1590,cr86_0). +Le(i1630,cr86_0). +Le(i1660,cr86_0). +Le(i1700,cr86_0). +Le(i1730,cr86_0). +Le(i1760,cr86_0). +Le(i1770,cr86_0). +Le(i1810,cr86_0). +Le(i1840,cr86_0). +Le(i1880,cr86_0). +Le(i1910,cr86_0). +Le(i1950,cr86_0). +Le(i1980,cr86_0). +Le(i2020,cr86_0). +Le(i2050,cr86_0). +Le(i2090,cr86_0). +Le(i2120,cr86_0). +Le(i2160,cr86_0). +Le(i2190,cr86_0). +Le(i2200,cr86_0). +Le(cr86_0,i2300). +Le(cr86_0,i2340). +Le(cr86_0,i2370). +Le(cr86_0,i2410). +Le(cr86_0,i2420). +Le(cr86_0,i2440). +Le(cr86_0,i2480). +Le(cr86_0,i2510). +Le(cr86_0,i2550). +Le(cr86_0,i2580). +Le(cr86_0,i2620). +Le(cr86_0,i2640). +Le(cr86_0,i2660). +Le(cr86_0,i2730). +Le(cr86_0,i2760). +Le(cr86_0,i2800). +Le(cr86_0,i2830). +Le(cr86_0,i2860). +Le(cr86_0,i2870). +Le(cr86_0,i2940). +Le(cr86_0,i2970). +Le(cr86_0,i3010). +Le(cr86_0,i3040). +Le(cr86_0,i3080). +Le(cr86_0,i3120). +Le(cr86_0,i3150). +Le(cr86_0,i3220). +Le(cr86_0,i3260). +Le(cr86_0,i3290). +Le(cr86_0,i3300). +Le(cr86_0,i3330). +Le(cr86_0,i3400). +Le(cr86_0,i3430). +Le(cr86_0,i3500). +Le(cr86_0,i3520). +Le(cr86_0,i3580). +Le(cr86_0,i3610). +Le(cr86_0,i3650). +Le(cr86_0,i3680). +Le(cr86_0,i3720). +Le(cr86_0,i3740). +Le(cr86_0,i3790). +Le(cr86_0,i3820). +Le(cr86_0,i3860). +Le(cr86_0,i3960). +Le(cr86_0,i4040). +Le(cr86_0,i4140). +Le(cr86_0,i4180). +Le(cr86_0,i4400). +Le(cr86_0,i4620). +Le(cr86_0,i4840). +Le(cr86_0,i5060). +Le(cr86_0,i5280). +Le(cr86_0,i5500). +Le(cr86_0,i5720). +Le(cr86_0,i5940). +Le(cr86_0,i6160). +Le(cr86_0,i6380). +Le(cr86_0,i6600). +Le(cr86_0,i6820). +Le(cr86_0,i7040). +Le(cr86_0,i7260). +Le(cr86_0,i7480). +Le(cr86_0,i7700). +Le(cr86_0,i7920). +Le(cr86_0,i8140). +Le(cr86_0,i8360). +Le(cr86_0,i8580). +Eq(i2270,i2270). +Le(i2270,cr87_0). +Le(cr87_0,i2300). +Le(i-30,cr87_0). +Le(i0,cr87_0). +Le(i13,cr87_0). +Le(i26,cr87_0). +Le(i39,cr87_0). +Le(i52,cr87_0). +Le(i60,cr87_0). +Le(i65,cr87_0). +Le(i70,cr87_0). +Le(i78,cr87_0). +Le(i90,cr87_0). +Le(i91,cr87_0). +Le(i104,cr87_0). +Le(i117,cr87_0). +Le(i130,cr87_0). +Le(i143,cr87_0). +Le(i156,cr87_0). +Le(i169,cr87_0). +Le(i182,cr87_0). +Le(i195,cr87_0). +Le(i208,cr87_0). +Le(i221,cr87_0). +Le(i234,cr87_0). +Le(i247,cr87_0). +Le(i260,cr87_0). +Le(i460,cr87_0). +Le(i530,cr87_0). +Le(i600,cr87_0). +Le(i660,cr87_0). +Le(i670,cr87_0). +Le(i710,cr87_0). +Le(i740,cr87_0). +Le(i810,cr87_0). +Le(i850,cr87_0). +Le(i880,cr87_0). +Le(i890,cr87_0). +Le(i920,cr87_0). +Le(i960,cr87_0). +Le(i990,cr87_0). +Le(i1030,cr87_0). +Le(i1060,cr87_0). +Le(i1100,cr87_0). +Le(i1130,cr87_0). +Le(i1170,cr87_0). +Le(i1200,cr87_0). +Le(i1240,cr87_0). +Le(i1260,cr87_0). +Le(i1270,cr87_0). +Le(i1290,cr87_0). +Le(i1310,cr87_0). +Le(i1320,cr87_0). +Le(i1330,cr87_0). +Le(i1350,cr87_0). +Le(i1360,cr87_0). +Le(i1380,cr87_0). +Le(i1390,cr87_0). +Le(i1420,cr87_0). +Le(i1430,cr87_0). +Le(i1450,cr87_0). +Le(i1460,cr87_0). +Le(i1490,cr87_0). +Le(i1520,cr87_0). +Le(i1530,cr87_0). +Le(i1540,cr87_0). +Le(i1560,cr87_0). +Le(i1590,cr87_0). +Le(i1630,cr87_0). +Le(i1660,cr87_0). +Le(i1700,cr87_0). +Le(i1730,cr87_0). +Le(i1760,cr87_0). +Le(i1770,cr87_0). +Le(i1810,cr87_0). +Le(i1840,cr87_0). +Le(i1880,cr87_0). +Le(i1910,cr87_0). +Le(i1950,cr87_0). +Le(i1980,cr87_0). +Le(i2020,cr87_0). +Le(i2050,cr87_0). +Le(i2090,cr87_0). +Le(i2120,cr87_0). +Le(i2160,cr87_0). +Le(i2190,cr87_0). +Le(i2200,cr87_0). +Le(i2230,cr87_0). +Le(cr87_0,i2340). +Le(cr87_0,i2370). +Le(cr87_0,i2410). +Le(cr87_0,i2420). +Le(cr87_0,i2440). +Le(cr87_0,i2480). +Le(cr87_0,i2510). +Le(cr87_0,i2550). +Le(cr87_0,i2580). +Le(cr87_0,i2620). +Le(cr87_0,i2640). +Le(cr87_0,i2660). +Le(cr87_0,i2730). +Le(cr87_0,i2760). +Le(cr87_0,i2800). +Le(cr87_0,i2830). +Le(cr87_0,i2860). +Le(cr87_0,i2870). +Le(cr87_0,i2940). +Le(cr87_0,i2970). +Le(cr87_0,i3010). +Le(cr87_0,i3040). +Le(cr87_0,i3080). +Le(cr87_0,i3120). +Le(cr87_0,i3150). +Le(cr87_0,i3220). +Le(cr87_0,i3260). +Le(cr87_0,i3290). +Le(cr87_0,i3300). +Le(cr87_0,i3330). +Le(cr87_0,i3400). +Le(cr87_0,i3430). +Le(cr87_0,i3500). +Le(cr87_0,i3520). +Le(cr87_0,i3580). +Le(cr87_0,i3610). +Le(cr87_0,i3650). +Le(cr87_0,i3680). +Le(cr87_0,i3720). +Le(cr87_0,i3740). +Le(cr87_0,i3790). +Le(cr87_0,i3820). +Le(cr87_0,i3860). +Le(cr87_0,i3960). +Le(cr87_0,i4040). +Le(cr87_0,i4140). +Le(cr87_0,i4180). +Le(cr87_0,i4400). +Le(cr87_0,i4620). +Le(cr87_0,i4840). +Le(cr87_0,i5060). +Le(cr87_0,i5280). +Le(cr87_0,i5500). +Le(cr87_0,i5720). +Le(cr87_0,i5940). +Le(cr87_0,i6160). +Le(cr87_0,i6380). +Le(cr87_0,i6600). +Le(cr87_0,i6820). +Le(cr87_0,i7040). +Le(cr87_0,i7260). +Le(cr87_0,i7480). +Le(cr87_0,i7700). +Le(cr87_0,i7920). +Le(cr87_0,i8140). +Le(cr87_0,i8360). +Le(cr87_0,i8580). +Eq(i2300,i2300). +Le(i2300,cr88_0). +Le(cr88_0,i2340). +Le(i-30,cr88_0). +Le(i0,cr88_0). +Le(i13,cr88_0). +Le(i26,cr88_0). +Le(i39,cr88_0). +Le(i52,cr88_0). +Le(i60,cr88_0). +Le(i65,cr88_0). +Le(i70,cr88_0). +Le(i78,cr88_0). +Le(i90,cr88_0). +Le(i91,cr88_0). +Le(i104,cr88_0). +Le(i117,cr88_0). +Le(i130,cr88_0). +Le(i143,cr88_0). +Le(i156,cr88_0). +Le(i169,cr88_0). +Le(i182,cr88_0). +Le(i195,cr88_0). +Le(i208,cr88_0). +Le(i221,cr88_0). +Le(i234,cr88_0). +Le(i247,cr88_0). +Le(i260,cr88_0). +Le(i460,cr88_0). +Le(i530,cr88_0). +Le(i600,cr88_0). +Le(i660,cr88_0). +Le(i670,cr88_0). +Le(i710,cr88_0). +Le(i740,cr88_0). +Le(i810,cr88_0). +Le(i850,cr88_0). +Le(i880,cr88_0). +Le(i890,cr88_0). +Le(i920,cr88_0). +Le(i960,cr88_0). +Le(i990,cr88_0). +Le(i1030,cr88_0). +Le(i1060,cr88_0). +Le(i1100,cr88_0). +Le(i1130,cr88_0). +Le(i1170,cr88_0). +Le(i1200,cr88_0). +Le(i1240,cr88_0). +Le(i1260,cr88_0). +Le(i1270,cr88_0). +Le(i1290,cr88_0). +Le(i1310,cr88_0). +Le(i1320,cr88_0). +Le(i1330,cr88_0). +Le(i1350,cr88_0). +Le(i1360,cr88_0). +Le(i1380,cr88_0). +Le(i1390,cr88_0). +Le(i1420,cr88_0). +Le(i1430,cr88_0). +Le(i1450,cr88_0). +Le(i1460,cr88_0). +Le(i1490,cr88_0). +Le(i1520,cr88_0). +Le(i1530,cr88_0). +Le(i1540,cr88_0). +Le(i1560,cr88_0). +Le(i1590,cr88_0). +Le(i1630,cr88_0). +Le(i1660,cr88_0). +Le(i1700,cr88_0). +Le(i1730,cr88_0). +Le(i1760,cr88_0). +Le(i1770,cr88_0). +Le(i1810,cr88_0). +Le(i1840,cr88_0). +Le(i1880,cr88_0). +Le(i1910,cr88_0). +Le(i1950,cr88_0). +Le(i1980,cr88_0). +Le(i2020,cr88_0). +Le(i2050,cr88_0). +Le(i2090,cr88_0). +Le(i2120,cr88_0). +Le(i2160,cr88_0). +Le(i2190,cr88_0). +Le(i2200,cr88_0). +Le(i2230,cr88_0). +Le(i2270,cr88_0). +Le(cr88_0,i2370). +Le(cr88_0,i2410). +Le(cr88_0,i2420). +Le(cr88_0,i2440). +Le(cr88_0,i2480). +Le(cr88_0,i2510). +Le(cr88_0,i2550). +Le(cr88_0,i2580). +Le(cr88_0,i2620). +Le(cr88_0,i2640). +Le(cr88_0,i2660). +Le(cr88_0,i2730). +Le(cr88_0,i2760). +Le(cr88_0,i2800). +Le(cr88_0,i2830). +Le(cr88_0,i2860). +Le(cr88_0,i2870). +Le(cr88_0,i2940). +Le(cr88_0,i2970). +Le(cr88_0,i3010). +Le(cr88_0,i3040). +Le(cr88_0,i3080). +Le(cr88_0,i3120). +Le(cr88_0,i3150). +Le(cr88_0,i3220). +Le(cr88_0,i3260). +Le(cr88_0,i3290). +Le(cr88_0,i3300). +Le(cr88_0,i3330). +Le(cr88_0,i3400). +Le(cr88_0,i3430). +Le(cr88_0,i3500). +Le(cr88_0,i3520). +Le(cr88_0,i3580). +Le(cr88_0,i3610). +Le(cr88_0,i3650). +Le(cr88_0,i3680). +Le(cr88_0,i3720). +Le(cr88_0,i3740). +Le(cr88_0,i3790). +Le(cr88_0,i3820). +Le(cr88_0,i3860). +Le(cr88_0,i3960). +Le(cr88_0,i4040). +Le(cr88_0,i4140). +Le(cr88_0,i4180). +Le(cr88_0,i4400). +Le(cr88_0,i4620). +Le(cr88_0,i4840). +Le(cr88_0,i5060). +Le(cr88_0,i5280). +Le(cr88_0,i5500). +Le(cr88_0,i5720). +Le(cr88_0,i5940). +Le(cr88_0,i6160). +Le(cr88_0,i6380). +Le(cr88_0,i6600). +Le(cr88_0,i6820). +Le(cr88_0,i7040). +Le(cr88_0,i7260). +Le(cr88_0,i7480). +Le(cr88_0,i7700). +Le(cr88_0,i7920). +Le(cr88_0,i8140). +Le(cr88_0,i8360). +Le(cr88_0,i8580). +Eq(i2340,i2340). +Le(i2340,cr89_0). +Le(cr89_0,i2370). +Le(i-30,cr89_0). +Le(i0,cr89_0). +Le(i13,cr89_0). +Le(i26,cr89_0). +Le(i39,cr89_0). +Le(i52,cr89_0). +Le(i60,cr89_0). +Le(i65,cr89_0). +Le(i70,cr89_0). +Le(i78,cr89_0). +Le(i90,cr89_0). +Le(i91,cr89_0). +Le(i104,cr89_0). +Le(i117,cr89_0). +Le(i130,cr89_0). +Le(i143,cr89_0). +Le(i156,cr89_0). +Le(i169,cr89_0). +Le(i182,cr89_0). +Le(i195,cr89_0). +Le(i208,cr89_0). +Le(i221,cr89_0). +Le(i234,cr89_0). +Le(i247,cr89_0). +Le(i260,cr89_0). +Le(i460,cr89_0). +Le(i530,cr89_0). +Le(i600,cr89_0). +Le(i660,cr89_0). +Le(i670,cr89_0). +Le(i710,cr89_0). +Le(i740,cr89_0). +Le(i810,cr89_0). +Le(i850,cr89_0). +Le(i880,cr89_0). +Le(i890,cr89_0). +Le(i920,cr89_0). +Le(i960,cr89_0). +Le(i990,cr89_0). +Le(i1030,cr89_0). +Le(i1060,cr89_0). +Le(i1100,cr89_0). +Le(i1130,cr89_0). +Le(i1170,cr89_0). +Le(i1200,cr89_0). +Le(i1240,cr89_0). +Le(i1260,cr89_0). +Le(i1270,cr89_0). +Le(i1290,cr89_0). +Le(i1310,cr89_0). +Le(i1320,cr89_0). +Le(i1330,cr89_0). +Le(i1350,cr89_0). +Le(i1360,cr89_0). +Le(i1380,cr89_0). +Le(i1390,cr89_0). +Le(i1420,cr89_0). +Le(i1430,cr89_0). +Le(i1450,cr89_0). +Le(i1460,cr89_0). +Le(i1490,cr89_0). +Le(i1520,cr89_0). +Le(i1530,cr89_0). +Le(i1540,cr89_0). +Le(i1560,cr89_0). +Le(i1590,cr89_0). +Le(i1630,cr89_0). +Le(i1660,cr89_0). +Le(i1700,cr89_0). +Le(i1730,cr89_0). +Le(i1760,cr89_0). +Le(i1770,cr89_0). +Le(i1810,cr89_0). +Le(i1840,cr89_0). +Le(i1880,cr89_0). +Le(i1910,cr89_0). +Le(i1950,cr89_0). +Le(i1980,cr89_0). +Le(i2020,cr89_0). +Le(i2050,cr89_0). +Le(i2090,cr89_0). +Le(i2120,cr89_0). +Le(i2160,cr89_0). +Le(i2190,cr89_0). +Le(i2200,cr89_0). +Le(i2230,cr89_0). +Le(i2270,cr89_0). +Le(i2300,cr89_0). +Le(cr89_0,i2410). +Le(cr89_0,i2420). +Le(cr89_0,i2440). +Le(cr89_0,i2480). +Le(cr89_0,i2510). +Le(cr89_0,i2550). +Le(cr89_0,i2580). +Le(cr89_0,i2620). +Le(cr89_0,i2640). +Le(cr89_0,i2660). +Le(cr89_0,i2730). +Le(cr89_0,i2760). +Le(cr89_0,i2800). +Le(cr89_0,i2830). +Le(cr89_0,i2860). +Le(cr89_0,i2870). +Le(cr89_0,i2940). +Le(cr89_0,i2970). +Le(cr89_0,i3010). +Le(cr89_0,i3040). +Le(cr89_0,i3080). +Le(cr89_0,i3120). +Le(cr89_0,i3150). +Le(cr89_0,i3220). +Le(cr89_0,i3260). +Le(cr89_0,i3290). +Le(cr89_0,i3300). +Le(cr89_0,i3330). +Le(cr89_0,i3400). +Le(cr89_0,i3430). +Le(cr89_0,i3500). +Le(cr89_0,i3520). +Le(cr89_0,i3580). +Le(cr89_0,i3610). +Le(cr89_0,i3650). +Le(cr89_0,i3680). +Le(cr89_0,i3720). +Le(cr89_0,i3740). +Le(cr89_0,i3790). +Le(cr89_0,i3820). +Le(cr89_0,i3860). +Le(cr89_0,i3960). +Le(cr89_0,i4040). +Le(cr89_0,i4140). +Le(cr89_0,i4180). +Le(cr89_0,i4400). +Le(cr89_0,i4620). +Le(cr89_0,i4840). +Le(cr89_0,i5060). +Le(cr89_0,i5280). +Le(cr89_0,i5500). +Le(cr89_0,i5720). +Le(cr89_0,i5940). +Le(cr89_0,i6160). +Le(cr89_0,i6380). +Le(cr89_0,i6600). +Le(cr89_0,i6820). +Le(cr89_0,i7040). +Le(cr89_0,i7260). +Le(cr89_0,i7480). +Le(cr89_0,i7700). +Le(cr89_0,i7920). +Le(cr89_0,i8140). +Le(cr89_0,i8360). +Le(cr89_0,i8580). +Eq(i2370,i2370). +Le(i2370,cr90_0). +Le(cr90_0,i2410). +Le(i-30,cr90_0). +Le(i0,cr90_0). +Le(i13,cr90_0). +Le(i26,cr90_0). +Le(i39,cr90_0). +Le(i52,cr90_0). +Le(i60,cr90_0). +Le(i65,cr90_0). +Le(i70,cr90_0). +Le(i78,cr90_0). +Le(i90,cr90_0). +Le(i91,cr90_0). +Le(i104,cr90_0). +Le(i117,cr90_0). +Le(i130,cr90_0). +Le(i143,cr90_0). +Le(i156,cr90_0). +Le(i169,cr90_0). +Le(i182,cr90_0). +Le(i195,cr90_0). +Le(i208,cr90_0). +Le(i221,cr90_0). +Le(i234,cr90_0). +Le(i247,cr90_0). +Le(i260,cr90_0). +Le(i460,cr90_0). +Le(i530,cr90_0). +Le(i600,cr90_0). +Le(i660,cr90_0). +Le(i670,cr90_0). +Le(i710,cr90_0). +Le(i740,cr90_0). +Le(i810,cr90_0). +Le(i850,cr90_0). +Le(i880,cr90_0). +Le(i890,cr90_0). +Le(i920,cr90_0). +Le(i960,cr90_0). +Le(i990,cr90_0). +Le(i1030,cr90_0). +Le(i1060,cr90_0). +Le(i1100,cr90_0). +Le(i1130,cr90_0). +Le(i1170,cr90_0). +Le(i1200,cr90_0). +Le(i1240,cr90_0). +Le(i1260,cr90_0). +Le(i1270,cr90_0). +Le(i1290,cr90_0). +Le(i1310,cr90_0). +Le(i1320,cr90_0). +Le(i1330,cr90_0). +Le(i1350,cr90_0). +Le(i1360,cr90_0). +Le(i1380,cr90_0). +Le(i1390,cr90_0). +Le(i1420,cr90_0). +Le(i1430,cr90_0). +Le(i1450,cr90_0). +Le(i1460,cr90_0). +Le(i1490,cr90_0). +Le(i1520,cr90_0). +Le(i1530,cr90_0). +Le(i1540,cr90_0). +Le(i1560,cr90_0). +Le(i1590,cr90_0). +Le(i1630,cr90_0). +Le(i1660,cr90_0). +Le(i1700,cr90_0). +Le(i1730,cr90_0). +Le(i1760,cr90_0). +Le(i1770,cr90_0). +Le(i1810,cr90_0). +Le(i1840,cr90_0). +Le(i1880,cr90_0). +Le(i1910,cr90_0). +Le(i1950,cr90_0). +Le(i1980,cr90_0). +Le(i2020,cr90_0). +Le(i2050,cr90_0). +Le(i2090,cr90_0). +Le(i2120,cr90_0). +Le(i2160,cr90_0). +Le(i2190,cr90_0). +Le(i2200,cr90_0). +Le(i2230,cr90_0). +Le(i2270,cr90_0). +Le(i2300,cr90_0). +Le(i2340,cr90_0). +Le(cr90_0,i2420). +Le(cr90_0,i2440). +Le(cr90_0,i2480). +Le(cr90_0,i2510). +Le(cr90_0,i2550). +Le(cr90_0,i2580). +Le(cr90_0,i2620). +Le(cr90_0,i2640). +Le(cr90_0,i2660). +Le(cr90_0,i2730). +Le(cr90_0,i2760). +Le(cr90_0,i2800). +Le(cr90_0,i2830). +Le(cr90_0,i2860). +Le(cr90_0,i2870). +Le(cr90_0,i2940). +Le(cr90_0,i2970). +Le(cr90_0,i3010). +Le(cr90_0,i3040). +Le(cr90_0,i3080). +Le(cr90_0,i3120). +Le(cr90_0,i3150). +Le(cr90_0,i3220). +Le(cr90_0,i3260). +Le(cr90_0,i3290). +Le(cr90_0,i3300). +Le(cr90_0,i3330). +Le(cr90_0,i3400). +Le(cr90_0,i3430). +Le(cr90_0,i3500). +Le(cr90_0,i3520). +Le(cr90_0,i3580). +Le(cr90_0,i3610). +Le(cr90_0,i3650). +Le(cr90_0,i3680). +Le(cr90_0,i3720). +Le(cr90_0,i3740). +Le(cr90_0,i3790). +Le(cr90_0,i3820). +Le(cr90_0,i3860). +Le(cr90_0,i3960). +Le(cr90_0,i4040). +Le(cr90_0,i4140). +Le(cr90_0,i4180). +Le(cr90_0,i4400). +Le(cr90_0,i4620). +Le(cr90_0,i4840). +Le(cr90_0,i5060). +Le(cr90_0,i5280). +Le(cr90_0,i5500). +Le(cr90_0,i5720). +Le(cr90_0,i5940). +Le(cr90_0,i6160). +Le(cr90_0,i6380). +Le(cr90_0,i6600). +Le(cr90_0,i6820). +Le(cr90_0,i7040). +Le(cr90_0,i7260). +Le(cr90_0,i7480). +Le(cr90_0,i7700). +Le(cr90_0,i7920). +Le(cr90_0,i8140). +Le(cr90_0,i8360). +Le(cr90_0,i8580). +Eq(i2410,i2410). +Le(i2410,cr91_0). +Le(cr91_0,i2420). +Le(i-30,cr91_0). +Le(i0,cr91_0). +Le(i13,cr91_0). +Le(i26,cr91_0). +Le(i39,cr91_0). +Le(i52,cr91_0). +Le(i60,cr91_0). +Le(i65,cr91_0). +Le(i70,cr91_0). +Le(i78,cr91_0). +Le(i90,cr91_0). +Le(i91,cr91_0). +Le(i104,cr91_0). +Le(i117,cr91_0). +Le(i130,cr91_0). +Le(i143,cr91_0). +Le(i156,cr91_0). +Le(i169,cr91_0). +Le(i182,cr91_0). +Le(i195,cr91_0). +Le(i208,cr91_0). +Le(i221,cr91_0). +Le(i234,cr91_0). +Le(i247,cr91_0). +Le(i260,cr91_0). +Le(i460,cr91_0). +Le(i530,cr91_0). +Le(i600,cr91_0). +Le(i660,cr91_0). +Le(i670,cr91_0). +Le(i710,cr91_0). +Le(i740,cr91_0). +Le(i810,cr91_0). +Le(i850,cr91_0). +Le(i880,cr91_0). +Le(i890,cr91_0). +Le(i920,cr91_0). +Le(i960,cr91_0). +Le(i990,cr91_0). +Le(i1030,cr91_0). +Le(i1060,cr91_0). +Le(i1100,cr91_0). +Le(i1130,cr91_0). +Le(i1170,cr91_0). +Le(i1200,cr91_0). +Le(i1240,cr91_0). +Le(i1260,cr91_0). +Le(i1270,cr91_0). +Le(i1290,cr91_0). +Le(i1310,cr91_0). +Le(i1320,cr91_0). +Le(i1330,cr91_0). +Le(i1350,cr91_0). +Le(i1360,cr91_0). +Le(i1380,cr91_0). +Le(i1390,cr91_0). +Le(i1420,cr91_0). +Le(i1430,cr91_0). +Le(i1450,cr91_0). +Le(i1460,cr91_0). +Le(i1490,cr91_0). +Le(i1520,cr91_0). +Le(i1530,cr91_0). +Le(i1540,cr91_0). +Le(i1560,cr91_0). +Le(i1590,cr91_0). +Le(i1630,cr91_0). +Le(i1660,cr91_0). +Le(i1700,cr91_0). +Le(i1730,cr91_0). +Le(i1760,cr91_0). +Le(i1770,cr91_0). +Le(i1810,cr91_0). +Le(i1840,cr91_0). +Le(i1880,cr91_0). +Le(i1910,cr91_0). +Le(i1950,cr91_0). +Le(i1980,cr91_0). +Le(i2020,cr91_0). +Le(i2050,cr91_0). +Le(i2090,cr91_0). +Le(i2120,cr91_0). +Le(i2160,cr91_0). +Le(i2190,cr91_0). +Le(i2200,cr91_0). +Le(i2230,cr91_0). +Le(i2270,cr91_0). +Le(i2300,cr91_0). +Le(i2340,cr91_0). +Le(i2370,cr91_0). +Le(cr91_0,i2440). +Le(cr91_0,i2480). +Le(cr91_0,i2510). +Le(cr91_0,i2550). +Le(cr91_0,i2580). +Le(cr91_0,i2620). +Le(cr91_0,i2640). +Le(cr91_0,i2660). +Le(cr91_0,i2730). +Le(cr91_0,i2760). +Le(cr91_0,i2800). +Le(cr91_0,i2830). +Le(cr91_0,i2860). +Le(cr91_0,i2870). +Le(cr91_0,i2940). +Le(cr91_0,i2970). +Le(cr91_0,i3010). +Le(cr91_0,i3040). +Le(cr91_0,i3080). +Le(cr91_0,i3120). +Le(cr91_0,i3150). +Le(cr91_0,i3220). +Le(cr91_0,i3260). +Le(cr91_0,i3290). +Le(cr91_0,i3300). +Le(cr91_0,i3330). +Le(cr91_0,i3400). +Le(cr91_0,i3430). +Le(cr91_0,i3500). +Le(cr91_0,i3520). +Le(cr91_0,i3580). +Le(cr91_0,i3610). +Le(cr91_0,i3650). +Le(cr91_0,i3680). +Le(cr91_0,i3720). +Le(cr91_0,i3740). +Le(cr91_0,i3790). +Le(cr91_0,i3820). +Le(cr91_0,i3860). +Le(cr91_0,i3960). +Le(cr91_0,i4040). +Le(cr91_0,i4140). +Le(cr91_0,i4180). +Le(cr91_0,i4400). +Le(cr91_0,i4620). +Le(cr91_0,i4840). +Le(cr91_0,i5060). +Le(cr91_0,i5280). +Le(cr91_0,i5500). +Le(cr91_0,i5720). +Le(cr91_0,i5940). +Le(cr91_0,i6160). +Le(cr91_0,i6380). +Le(cr91_0,i6600). +Le(cr91_0,i6820). +Le(cr91_0,i7040). +Le(cr91_0,i7260). +Le(cr91_0,i7480). +Le(cr91_0,i7700). +Le(cr91_0,i7920). +Le(cr91_0,i8140). +Le(cr91_0,i8360). +Le(cr91_0,i8580). +Eq(i2420,i2420). +Le(i2420,cr92_0). +Le(cr92_0,i2440). +Le(i-30,cr92_0). +Le(i0,cr92_0). +Le(i13,cr92_0). +Le(i26,cr92_0). +Le(i39,cr92_0). +Le(i52,cr92_0). +Le(i60,cr92_0). +Le(i65,cr92_0). +Le(i70,cr92_0). +Le(i78,cr92_0). +Le(i90,cr92_0). +Le(i91,cr92_0). +Le(i104,cr92_0). +Le(i117,cr92_0). +Le(i130,cr92_0). +Le(i143,cr92_0). +Le(i156,cr92_0). +Le(i169,cr92_0). +Le(i182,cr92_0). +Le(i195,cr92_0). +Le(i208,cr92_0). +Le(i221,cr92_0). +Le(i234,cr92_0). +Le(i247,cr92_0). +Le(i260,cr92_0). +Le(i460,cr92_0). +Le(i530,cr92_0). +Le(i600,cr92_0). +Le(i660,cr92_0). +Le(i670,cr92_0). +Le(i710,cr92_0). +Le(i740,cr92_0). +Le(i810,cr92_0). +Le(i850,cr92_0). +Le(i880,cr92_0). +Le(i890,cr92_0). +Le(i920,cr92_0). +Le(i960,cr92_0). +Le(i990,cr92_0). +Le(i1030,cr92_0). +Le(i1060,cr92_0). +Le(i1100,cr92_0). +Le(i1130,cr92_0). +Le(i1170,cr92_0). +Le(i1200,cr92_0). +Le(i1240,cr92_0). +Le(i1260,cr92_0). +Le(i1270,cr92_0). +Le(i1290,cr92_0). +Le(i1310,cr92_0). +Le(i1320,cr92_0). +Le(i1330,cr92_0). +Le(i1350,cr92_0). +Le(i1360,cr92_0). +Le(i1380,cr92_0). +Le(i1390,cr92_0). +Le(i1420,cr92_0). +Le(i1430,cr92_0). +Le(i1450,cr92_0). +Le(i1460,cr92_0). +Le(i1490,cr92_0). +Le(i1520,cr92_0). +Le(i1530,cr92_0). +Le(i1540,cr92_0). +Le(i1560,cr92_0). +Le(i1590,cr92_0). +Le(i1630,cr92_0). +Le(i1660,cr92_0). +Le(i1700,cr92_0). +Le(i1730,cr92_0). +Le(i1760,cr92_0). +Le(i1770,cr92_0). +Le(i1810,cr92_0). +Le(i1840,cr92_0). +Le(i1880,cr92_0). +Le(i1910,cr92_0). +Le(i1950,cr92_0). +Le(i1980,cr92_0). +Le(i2020,cr92_0). +Le(i2050,cr92_0). +Le(i2090,cr92_0). +Le(i2120,cr92_0). +Le(i2160,cr92_0). +Le(i2190,cr92_0). +Le(i2200,cr92_0). +Le(i2230,cr92_0). +Le(i2270,cr92_0). +Le(i2300,cr92_0). +Le(i2340,cr92_0). +Le(i2370,cr92_0). +Le(i2410,cr92_0). +Le(cr92_0,i2480). +Le(cr92_0,i2510). +Le(cr92_0,i2550). +Le(cr92_0,i2580). +Le(cr92_0,i2620). +Le(cr92_0,i2640). +Le(cr92_0,i2660). +Le(cr92_0,i2730). +Le(cr92_0,i2760). +Le(cr92_0,i2800). +Le(cr92_0,i2830). +Le(cr92_0,i2860). +Le(cr92_0,i2870). +Le(cr92_0,i2940). +Le(cr92_0,i2970). +Le(cr92_0,i3010). +Le(cr92_0,i3040). +Le(cr92_0,i3080). +Le(cr92_0,i3120). +Le(cr92_0,i3150). +Le(cr92_0,i3220). +Le(cr92_0,i3260). +Le(cr92_0,i3290). +Le(cr92_0,i3300). +Le(cr92_0,i3330). +Le(cr92_0,i3400). +Le(cr92_0,i3430). +Le(cr92_0,i3500). +Le(cr92_0,i3520). +Le(cr92_0,i3580). +Le(cr92_0,i3610). +Le(cr92_0,i3650). +Le(cr92_0,i3680). +Le(cr92_0,i3720). +Le(cr92_0,i3740). +Le(cr92_0,i3790). +Le(cr92_0,i3820). +Le(cr92_0,i3860). +Le(cr92_0,i3960). +Le(cr92_0,i4040). +Le(cr92_0,i4140). +Le(cr92_0,i4180). +Le(cr92_0,i4400). +Le(cr92_0,i4620). +Le(cr92_0,i4840). +Le(cr92_0,i5060). +Le(cr92_0,i5280). +Le(cr92_0,i5500). +Le(cr92_0,i5720). +Le(cr92_0,i5940). +Le(cr92_0,i6160). +Le(cr92_0,i6380). +Le(cr92_0,i6600). +Le(cr92_0,i6820). +Le(cr92_0,i7040). +Le(cr92_0,i7260). +Le(cr92_0,i7480). +Le(cr92_0,i7700). +Le(cr92_0,i7920). +Le(cr92_0,i8140). +Le(cr92_0,i8360). +Le(cr92_0,i8580). +Eq(i2440,i2440). +Le(i2440,cr93_0). +Le(cr93_0,i2480). +Le(i-30,cr93_0). +Le(i0,cr93_0). +Le(i13,cr93_0). +Le(i26,cr93_0). +Le(i39,cr93_0). +Le(i52,cr93_0). +Le(i60,cr93_0). +Le(i65,cr93_0). +Le(i70,cr93_0). +Le(i78,cr93_0). +Le(i90,cr93_0). +Le(i91,cr93_0). +Le(i104,cr93_0). +Le(i117,cr93_0). +Le(i130,cr93_0). +Le(i143,cr93_0). +Le(i156,cr93_0). +Le(i169,cr93_0). +Le(i182,cr93_0). +Le(i195,cr93_0). +Le(i208,cr93_0). +Le(i221,cr93_0). +Le(i234,cr93_0). +Le(i247,cr93_0). +Le(i260,cr93_0). +Le(i460,cr93_0). +Le(i530,cr93_0). +Le(i600,cr93_0). +Le(i660,cr93_0). +Le(i670,cr93_0). +Le(i710,cr93_0). +Le(i740,cr93_0). +Le(i810,cr93_0). +Le(i850,cr93_0). +Le(i880,cr93_0). +Le(i890,cr93_0). +Le(i920,cr93_0). +Le(i960,cr93_0). +Le(i990,cr93_0). +Le(i1030,cr93_0). +Le(i1060,cr93_0). +Le(i1100,cr93_0). +Le(i1130,cr93_0). +Le(i1170,cr93_0). +Le(i1200,cr93_0). +Le(i1240,cr93_0). +Le(i1260,cr93_0). +Le(i1270,cr93_0). +Le(i1290,cr93_0). +Le(i1310,cr93_0). +Le(i1320,cr93_0). +Le(i1330,cr93_0). +Le(i1350,cr93_0). +Le(i1360,cr93_0). +Le(i1380,cr93_0). +Le(i1390,cr93_0). +Le(i1420,cr93_0). +Le(i1430,cr93_0). +Le(i1450,cr93_0). +Le(i1460,cr93_0). +Le(i1490,cr93_0). +Le(i1520,cr93_0). +Le(i1530,cr93_0). +Le(i1540,cr93_0). +Le(i1560,cr93_0). +Le(i1590,cr93_0). +Le(i1630,cr93_0). +Le(i1660,cr93_0). +Le(i1700,cr93_0). +Le(i1730,cr93_0). +Le(i1760,cr93_0). +Le(i1770,cr93_0). +Le(i1810,cr93_0). +Le(i1840,cr93_0). +Le(i1880,cr93_0). +Le(i1910,cr93_0). +Le(i1950,cr93_0). +Le(i1980,cr93_0). +Le(i2020,cr93_0). +Le(i2050,cr93_0). +Le(i2090,cr93_0). +Le(i2120,cr93_0). +Le(i2160,cr93_0). +Le(i2190,cr93_0). +Le(i2200,cr93_0). +Le(i2230,cr93_0). +Le(i2270,cr93_0). +Le(i2300,cr93_0). +Le(i2340,cr93_0). +Le(i2370,cr93_0). +Le(i2410,cr93_0). +Le(i2420,cr93_0). +Le(cr93_0,i2510). +Le(cr93_0,i2550). +Le(cr93_0,i2580). +Le(cr93_0,i2620). +Le(cr93_0,i2640). +Le(cr93_0,i2660). +Le(cr93_0,i2730). +Le(cr93_0,i2760). +Le(cr93_0,i2800). +Le(cr93_0,i2830). +Le(cr93_0,i2860). +Le(cr93_0,i2870). +Le(cr93_0,i2940). +Le(cr93_0,i2970). +Le(cr93_0,i3010). +Le(cr93_0,i3040). +Le(cr93_0,i3080). +Le(cr93_0,i3120). +Le(cr93_0,i3150). +Le(cr93_0,i3220). +Le(cr93_0,i3260). +Le(cr93_0,i3290). +Le(cr93_0,i3300). +Le(cr93_0,i3330). +Le(cr93_0,i3400). +Le(cr93_0,i3430). +Le(cr93_0,i3500). +Le(cr93_0,i3520). +Le(cr93_0,i3580). +Le(cr93_0,i3610). +Le(cr93_0,i3650). +Le(cr93_0,i3680). +Le(cr93_0,i3720). +Le(cr93_0,i3740). +Le(cr93_0,i3790). +Le(cr93_0,i3820). +Le(cr93_0,i3860). +Le(cr93_0,i3960). +Le(cr93_0,i4040). +Le(cr93_0,i4140). +Le(cr93_0,i4180). +Le(cr93_0,i4400). +Le(cr93_0,i4620). +Le(cr93_0,i4840). +Le(cr93_0,i5060). +Le(cr93_0,i5280). +Le(cr93_0,i5500). +Le(cr93_0,i5720). +Le(cr93_0,i5940). +Le(cr93_0,i6160). +Le(cr93_0,i6380). +Le(cr93_0,i6600). +Le(cr93_0,i6820). +Le(cr93_0,i7040). +Le(cr93_0,i7260). +Le(cr93_0,i7480). +Le(cr93_0,i7700). +Le(cr93_0,i7920). +Le(cr93_0,i8140). +Le(cr93_0,i8360). +Le(cr93_0,i8580). +Eq(i2480,i2480). +Le(i2480,cr94_0). +Le(cr94_0,i2510). +Le(i-30,cr94_0). +Le(i0,cr94_0). +Le(i13,cr94_0). +Le(i26,cr94_0). +Le(i39,cr94_0). +Le(i52,cr94_0). +Le(i60,cr94_0). +Le(i65,cr94_0). +Le(i70,cr94_0). +Le(i78,cr94_0). +Le(i90,cr94_0). +Le(i91,cr94_0). +Le(i104,cr94_0). +Le(i117,cr94_0). +Le(i130,cr94_0). +Le(i143,cr94_0). +Le(i156,cr94_0). +Le(i169,cr94_0). +Le(i182,cr94_0). +Le(i195,cr94_0). +Le(i208,cr94_0). +Le(i221,cr94_0). +Le(i234,cr94_0). +Le(i247,cr94_0). +Le(i260,cr94_0). +Le(i460,cr94_0). +Le(i530,cr94_0). +Le(i600,cr94_0). +Le(i660,cr94_0). +Le(i670,cr94_0). +Le(i710,cr94_0). +Le(i740,cr94_0). +Le(i810,cr94_0). +Le(i850,cr94_0). +Le(i880,cr94_0). +Le(i890,cr94_0). +Le(i920,cr94_0). +Le(i960,cr94_0). +Le(i990,cr94_0). +Le(i1030,cr94_0). +Le(i1060,cr94_0). +Le(i1100,cr94_0). +Le(i1130,cr94_0). +Le(i1170,cr94_0). +Le(i1200,cr94_0). +Le(i1240,cr94_0). +Le(i1260,cr94_0). +Le(i1270,cr94_0). +Le(i1290,cr94_0). +Le(i1310,cr94_0). +Le(i1320,cr94_0). +Le(i1330,cr94_0). +Le(i1350,cr94_0). +Le(i1360,cr94_0). +Le(i1380,cr94_0). +Le(i1390,cr94_0). +Le(i1420,cr94_0). +Le(i1430,cr94_0). +Le(i1450,cr94_0). +Le(i1460,cr94_0). +Le(i1490,cr94_0). +Le(i1520,cr94_0). +Le(i1530,cr94_0). +Le(i1540,cr94_0). +Le(i1560,cr94_0). +Le(i1590,cr94_0). +Le(i1630,cr94_0). +Le(i1660,cr94_0). +Le(i1700,cr94_0). +Le(i1730,cr94_0). +Le(i1760,cr94_0). +Le(i1770,cr94_0). +Le(i1810,cr94_0). +Le(i1840,cr94_0). +Le(i1880,cr94_0). +Le(i1910,cr94_0). +Le(i1950,cr94_0). +Le(i1980,cr94_0). +Le(i2020,cr94_0). +Le(i2050,cr94_0). +Le(i2090,cr94_0). +Le(i2120,cr94_0). +Le(i2160,cr94_0). +Le(i2190,cr94_0). +Le(i2200,cr94_0). +Le(i2230,cr94_0). +Le(i2270,cr94_0). +Le(i2300,cr94_0). +Le(i2340,cr94_0). +Le(i2370,cr94_0). +Le(i2410,cr94_0). +Le(i2420,cr94_0). +Le(i2440,cr94_0). +Le(cr94_0,i2550). +Le(cr94_0,i2580). +Le(cr94_0,i2620). +Le(cr94_0,i2640). +Le(cr94_0,i2660). +Le(cr94_0,i2730). +Le(cr94_0,i2760). +Le(cr94_0,i2800). +Le(cr94_0,i2830). +Le(cr94_0,i2860). +Le(cr94_0,i2870). +Le(cr94_0,i2940). +Le(cr94_0,i2970). +Le(cr94_0,i3010). +Le(cr94_0,i3040). +Le(cr94_0,i3080). +Le(cr94_0,i3120). +Le(cr94_0,i3150). +Le(cr94_0,i3220). +Le(cr94_0,i3260). +Le(cr94_0,i3290). +Le(cr94_0,i3300). +Le(cr94_0,i3330). +Le(cr94_0,i3400). +Le(cr94_0,i3430). +Le(cr94_0,i3500). +Le(cr94_0,i3520). +Le(cr94_0,i3580). +Le(cr94_0,i3610). +Le(cr94_0,i3650). +Le(cr94_0,i3680). +Le(cr94_0,i3720). +Le(cr94_0,i3740). +Le(cr94_0,i3790). +Le(cr94_0,i3820). +Le(cr94_0,i3860). +Le(cr94_0,i3960). +Le(cr94_0,i4040). +Le(cr94_0,i4140). +Le(cr94_0,i4180). +Le(cr94_0,i4400). +Le(cr94_0,i4620). +Le(cr94_0,i4840). +Le(cr94_0,i5060). +Le(cr94_0,i5280). +Le(cr94_0,i5500). +Le(cr94_0,i5720). +Le(cr94_0,i5940). +Le(cr94_0,i6160). +Le(cr94_0,i6380). +Le(cr94_0,i6600). +Le(cr94_0,i6820). +Le(cr94_0,i7040). +Le(cr94_0,i7260). +Le(cr94_0,i7480). +Le(cr94_0,i7700). +Le(cr94_0,i7920). +Le(cr94_0,i8140). +Le(cr94_0,i8360). +Le(cr94_0,i8580). +Eq(i2510,i2510). +Le(i2510,cr95_0). +Le(cr95_0,i2550). +Le(i-30,cr95_0). +Le(i0,cr95_0). +Le(i13,cr95_0). +Le(i26,cr95_0). +Le(i39,cr95_0). +Le(i52,cr95_0). +Le(i60,cr95_0). +Le(i65,cr95_0). +Le(i70,cr95_0). +Le(i78,cr95_0). +Le(i90,cr95_0). +Le(i91,cr95_0). +Le(i104,cr95_0). +Le(i117,cr95_0). +Le(i130,cr95_0). +Le(i143,cr95_0). +Le(i156,cr95_0). +Le(i169,cr95_0). +Le(i182,cr95_0). +Le(i195,cr95_0). +Le(i208,cr95_0). +Le(i221,cr95_0). +Le(i234,cr95_0). +Le(i247,cr95_0). +Le(i260,cr95_0). +Le(i460,cr95_0). +Le(i530,cr95_0). +Le(i600,cr95_0). +Le(i660,cr95_0). +Le(i670,cr95_0). +Le(i710,cr95_0). +Le(i740,cr95_0). +Le(i810,cr95_0). +Le(i850,cr95_0). +Le(i880,cr95_0). +Le(i890,cr95_0). +Le(i920,cr95_0). +Le(i960,cr95_0). +Le(i990,cr95_0). +Le(i1030,cr95_0). +Le(i1060,cr95_0). +Le(i1100,cr95_0). +Le(i1130,cr95_0). +Le(i1170,cr95_0). +Le(i1200,cr95_0). +Le(i1240,cr95_0). +Le(i1260,cr95_0). +Le(i1270,cr95_0). +Le(i1290,cr95_0). +Le(i1310,cr95_0). +Le(i1320,cr95_0). +Le(i1330,cr95_0). +Le(i1350,cr95_0). +Le(i1360,cr95_0). +Le(i1380,cr95_0). +Le(i1390,cr95_0). +Le(i1420,cr95_0). +Le(i1430,cr95_0). +Le(i1450,cr95_0). +Le(i1460,cr95_0). +Le(i1490,cr95_0). +Le(i1520,cr95_0). +Le(i1530,cr95_0). +Le(i1540,cr95_0). +Le(i1560,cr95_0). +Le(i1590,cr95_0). +Le(i1630,cr95_0). +Le(i1660,cr95_0). +Le(i1700,cr95_0). +Le(i1730,cr95_0). +Le(i1760,cr95_0). +Le(i1770,cr95_0). +Le(i1810,cr95_0). +Le(i1840,cr95_0). +Le(i1880,cr95_0). +Le(i1910,cr95_0). +Le(i1950,cr95_0). +Le(i1980,cr95_0). +Le(i2020,cr95_0). +Le(i2050,cr95_0). +Le(i2090,cr95_0). +Le(i2120,cr95_0). +Le(i2160,cr95_0). +Le(i2190,cr95_0). +Le(i2200,cr95_0). +Le(i2230,cr95_0). +Le(i2270,cr95_0). +Le(i2300,cr95_0). +Le(i2340,cr95_0). +Le(i2370,cr95_0). +Le(i2410,cr95_0). +Le(i2420,cr95_0). +Le(i2440,cr95_0). +Le(i2480,cr95_0). +Le(cr95_0,i2580). +Le(cr95_0,i2620). +Le(cr95_0,i2640). +Le(cr95_0,i2660). +Le(cr95_0,i2730). +Le(cr95_0,i2760). +Le(cr95_0,i2800). +Le(cr95_0,i2830). +Le(cr95_0,i2860). +Le(cr95_0,i2870). +Le(cr95_0,i2940). +Le(cr95_0,i2970). +Le(cr95_0,i3010). +Le(cr95_0,i3040). +Le(cr95_0,i3080). +Le(cr95_0,i3120). +Le(cr95_0,i3150). +Le(cr95_0,i3220). +Le(cr95_0,i3260). +Le(cr95_0,i3290). +Le(cr95_0,i3300). +Le(cr95_0,i3330). +Le(cr95_0,i3400). +Le(cr95_0,i3430). +Le(cr95_0,i3500). +Le(cr95_0,i3520). +Le(cr95_0,i3580). +Le(cr95_0,i3610). +Le(cr95_0,i3650). +Le(cr95_0,i3680). +Le(cr95_0,i3720). +Le(cr95_0,i3740). +Le(cr95_0,i3790). +Le(cr95_0,i3820). +Le(cr95_0,i3860). +Le(cr95_0,i3960). +Le(cr95_0,i4040). +Le(cr95_0,i4140). +Le(cr95_0,i4180). +Le(cr95_0,i4400). +Le(cr95_0,i4620). +Le(cr95_0,i4840). +Le(cr95_0,i5060). +Le(cr95_0,i5280). +Le(cr95_0,i5500). +Le(cr95_0,i5720). +Le(cr95_0,i5940). +Le(cr95_0,i6160). +Le(cr95_0,i6380). +Le(cr95_0,i6600). +Le(cr95_0,i6820). +Le(cr95_0,i7040). +Le(cr95_0,i7260). +Le(cr95_0,i7480). +Le(cr95_0,i7700). +Le(cr95_0,i7920). +Le(cr95_0,i8140). +Le(cr95_0,i8360). +Le(cr95_0,i8580). +Eq(i2550,i2550). +Le(i2550,cr96_0). +Le(cr96_0,i2580). +Le(i-30,cr96_0). +Le(i0,cr96_0). +Le(i13,cr96_0). +Le(i26,cr96_0). +Le(i39,cr96_0). +Le(i52,cr96_0). +Le(i60,cr96_0). +Le(i65,cr96_0). +Le(i70,cr96_0). +Le(i78,cr96_0). +Le(i90,cr96_0). +Le(i91,cr96_0). +Le(i104,cr96_0). +Le(i117,cr96_0). +Le(i130,cr96_0). +Le(i143,cr96_0). +Le(i156,cr96_0). +Le(i169,cr96_0). +Le(i182,cr96_0). +Le(i195,cr96_0). +Le(i208,cr96_0). +Le(i221,cr96_0). +Le(i234,cr96_0). +Le(i247,cr96_0). +Le(i260,cr96_0). +Le(i460,cr96_0). +Le(i530,cr96_0). +Le(i600,cr96_0). +Le(i660,cr96_0). +Le(i670,cr96_0). +Le(i710,cr96_0). +Le(i740,cr96_0). +Le(i810,cr96_0). +Le(i850,cr96_0). +Le(i880,cr96_0). +Le(i890,cr96_0). +Le(i920,cr96_0). +Le(i960,cr96_0). +Le(i990,cr96_0). +Le(i1030,cr96_0). +Le(i1060,cr96_0). +Le(i1100,cr96_0). +Le(i1130,cr96_0). +Le(i1170,cr96_0). +Le(i1200,cr96_0). +Le(i1240,cr96_0). +Le(i1260,cr96_0). +Le(i1270,cr96_0). +Le(i1290,cr96_0). +Le(i1310,cr96_0). +Le(i1320,cr96_0). +Le(i1330,cr96_0). +Le(i1350,cr96_0). +Le(i1360,cr96_0). +Le(i1380,cr96_0). +Le(i1390,cr96_0). +Le(i1420,cr96_0). +Le(i1430,cr96_0). +Le(i1450,cr96_0). +Le(i1460,cr96_0). +Le(i1490,cr96_0). +Le(i1520,cr96_0). +Le(i1530,cr96_0). +Le(i1540,cr96_0). +Le(i1560,cr96_0). +Le(i1590,cr96_0). +Le(i1630,cr96_0). +Le(i1660,cr96_0). +Le(i1700,cr96_0). +Le(i1730,cr96_0). +Le(i1760,cr96_0). +Le(i1770,cr96_0). +Le(i1810,cr96_0). +Le(i1840,cr96_0). +Le(i1880,cr96_0). +Le(i1910,cr96_0). +Le(i1950,cr96_0). +Le(i1980,cr96_0). +Le(i2020,cr96_0). +Le(i2050,cr96_0). +Le(i2090,cr96_0). +Le(i2120,cr96_0). +Le(i2160,cr96_0). +Le(i2190,cr96_0). +Le(i2200,cr96_0). +Le(i2230,cr96_0). +Le(i2270,cr96_0). +Le(i2300,cr96_0). +Le(i2340,cr96_0). +Le(i2370,cr96_0). +Le(i2410,cr96_0). +Le(i2420,cr96_0). +Le(i2440,cr96_0). +Le(i2480,cr96_0). +Le(i2510,cr96_0). +Le(cr96_0,i2620). +Le(cr96_0,i2640). +Le(cr96_0,i2660). +Le(cr96_0,i2730). +Le(cr96_0,i2760). +Le(cr96_0,i2800). +Le(cr96_0,i2830). +Le(cr96_0,i2860). +Le(cr96_0,i2870). +Le(cr96_0,i2940). +Le(cr96_0,i2970). +Le(cr96_0,i3010). +Le(cr96_0,i3040). +Le(cr96_0,i3080). +Le(cr96_0,i3120). +Le(cr96_0,i3150). +Le(cr96_0,i3220). +Le(cr96_0,i3260). +Le(cr96_0,i3290). +Le(cr96_0,i3300). +Le(cr96_0,i3330). +Le(cr96_0,i3400). +Le(cr96_0,i3430). +Le(cr96_0,i3500). +Le(cr96_0,i3520). +Le(cr96_0,i3580). +Le(cr96_0,i3610). +Le(cr96_0,i3650). +Le(cr96_0,i3680). +Le(cr96_0,i3720). +Le(cr96_0,i3740). +Le(cr96_0,i3790). +Le(cr96_0,i3820). +Le(cr96_0,i3860). +Le(cr96_0,i3960). +Le(cr96_0,i4040). +Le(cr96_0,i4140). +Le(cr96_0,i4180). +Le(cr96_0,i4400). +Le(cr96_0,i4620). +Le(cr96_0,i4840). +Le(cr96_0,i5060). +Le(cr96_0,i5280). +Le(cr96_0,i5500). +Le(cr96_0,i5720). +Le(cr96_0,i5940). +Le(cr96_0,i6160). +Le(cr96_0,i6380). +Le(cr96_0,i6600). +Le(cr96_0,i6820). +Le(cr96_0,i7040). +Le(cr96_0,i7260). +Le(cr96_0,i7480). +Le(cr96_0,i7700). +Le(cr96_0,i7920). +Le(cr96_0,i8140). +Le(cr96_0,i8360). +Le(cr96_0,i8580). +Eq(i2580,i2580). +Le(i2580,cr97_0). +Le(cr97_0,i2620). +Le(i-30,cr97_0). +Le(i0,cr97_0). +Le(i13,cr97_0). +Le(i26,cr97_0). +Le(i39,cr97_0). +Le(i52,cr97_0). +Le(i60,cr97_0). +Le(i65,cr97_0). +Le(i70,cr97_0). +Le(i78,cr97_0). +Le(i90,cr97_0). +Le(i91,cr97_0). +Le(i104,cr97_0). +Le(i117,cr97_0). +Le(i130,cr97_0). +Le(i143,cr97_0). +Le(i156,cr97_0). +Le(i169,cr97_0). +Le(i182,cr97_0). +Le(i195,cr97_0). +Le(i208,cr97_0). +Le(i221,cr97_0). +Le(i234,cr97_0). +Le(i247,cr97_0). +Le(i260,cr97_0). +Le(i460,cr97_0). +Le(i530,cr97_0). +Le(i600,cr97_0). +Le(i660,cr97_0). +Le(i670,cr97_0). +Le(i710,cr97_0). +Le(i740,cr97_0). +Le(i810,cr97_0). +Le(i850,cr97_0). +Le(i880,cr97_0). +Le(i890,cr97_0). +Le(i920,cr97_0). +Le(i960,cr97_0). +Le(i990,cr97_0). +Le(i1030,cr97_0). +Le(i1060,cr97_0). +Le(i1100,cr97_0). +Le(i1130,cr97_0). +Le(i1170,cr97_0). +Le(i1200,cr97_0). +Le(i1240,cr97_0). +Le(i1260,cr97_0). +Le(i1270,cr97_0). +Le(i1290,cr97_0). +Le(i1310,cr97_0). +Le(i1320,cr97_0). +Le(i1330,cr97_0). +Le(i1350,cr97_0). +Le(i1360,cr97_0). +Le(i1380,cr97_0). +Le(i1390,cr97_0). +Le(i1420,cr97_0). +Le(i1430,cr97_0). +Le(i1450,cr97_0). +Le(i1460,cr97_0). +Le(i1490,cr97_0). +Le(i1520,cr97_0). +Le(i1530,cr97_0). +Le(i1540,cr97_0). +Le(i1560,cr97_0). +Le(i1590,cr97_0). +Le(i1630,cr97_0). +Le(i1660,cr97_0). +Le(i1700,cr97_0). +Le(i1730,cr97_0). +Le(i1760,cr97_0). +Le(i1770,cr97_0). +Le(i1810,cr97_0). +Le(i1840,cr97_0). +Le(i1880,cr97_0). +Le(i1910,cr97_0). +Le(i1950,cr97_0). +Le(i1980,cr97_0). +Le(i2020,cr97_0). +Le(i2050,cr97_0). +Le(i2090,cr97_0). +Le(i2120,cr97_0). +Le(i2160,cr97_0). +Le(i2190,cr97_0). +Le(i2200,cr97_0). +Le(i2230,cr97_0). +Le(i2270,cr97_0). +Le(i2300,cr97_0). +Le(i2340,cr97_0). +Le(i2370,cr97_0). +Le(i2410,cr97_0). +Le(i2420,cr97_0). +Le(i2440,cr97_0). +Le(i2480,cr97_0). +Le(i2510,cr97_0). +Le(i2550,cr97_0). +Le(cr97_0,i2640). +Le(cr97_0,i2660). +Le(cr97_0,i2730). +Le(cr97_0,i2760). +Le(cr97_0,i2800). +Le(cr97_0,i2830). +Le(cr97_0,i2860). +Le(cr97_0,i2870). +Le(cr97_0,i2940). +Le(cr97_0,i2970). +Le(cr97_0,i3010). +Le(cr97_0,i3040). +Le(cr97_0,i3080). +Le(cr97_0,i3120). +Le(cr97_0,i3150). +Le(cr97_0,i3220). +Le(cr97_0,i3260). +Le(cr97_0,i3290). +Le(cr97_0,i3300). +Le(cr97_0,i3330). +Le(cr97_0,i3400). +Le(cr97_0,i3430). +Le(cr97_0,i3500). +Le(cr97_0,i3520). +Le(cr97_0,i3580). +Le(cr97_0,i3610). +Le(cr97_0,i3650). +Le(cr97_0,i3680). +Le(cr97_0,i3720). +Le(cr97_0,i3740). +Le(cr97_0,i3790). +Le(cr97_0,i3820). +Le(cr97_0,i3860). +Le(cr97_0,i3960). +Le(cr97_0,i4040). +Le(cr97_0,i4140). +Le(cr97_0,i4180). +Le(cr97_0,i4400). +Le(cr97_0,i4620). +Le(cr97_0,i4840). +Le(cr97_0,i5060). +Le(cr97_0,i5280). +Le(cr97_0,i5500). +Le(cr97_0,i5720). +Le(cr97_0,i5940). +Le(cr97_0,i6160). +Le(cr97_0,i6380). +Le(cr97_0,i6600). +Le(cr97_0,i6820). +Le(cr97_0,i7040). +Le(cr97_0,i7260). +Le(cr97_0,i7480). +Le(cr97_0,i7700). +Le(cr97_0,i7920). +Le(cr97_0,i8140). +Le(cr97_0,i8360). +Le(cr97_0,i8580). +Eq(i2620,i2620). +Le(i2620,cr98_0). +Le(cr98_0,i2640). +Le(i-30,cr98_0). +Le(i0,cr98_0). +Le(i13,cr98_0). +Le(i26,cr98_0). +Le(i39,cr98_0). +Le(i52,cr98_0). +Le(i60,cr98_0). +Le(i65,cr98_0). +Le(i70,cr98_0). +Le(i78,cr98_0). +Le(i90,cr98_0). +Le(i91,cr98_0). +Le(i104,cr98_0). +Le(i117,cr98_0). +Le(i130,cr98_0). +Le(i143,cr98_0). +Le(i156,cr98_0). +Le(i169,cr98_0). +Le(i182,cr98_0). +Le(i195,cr98_0). +Le(i208,cr98_0). +Le(i221,cr98_0). +Le(i234,cr98_0). +Le(i247,cr98_0). +Le(i260,cr98_0). +Le(i460,cr98_0). +Le(i530,cr98_0). +Le(i600,cr98_0). +Le(i660,cr98_0). +Le(i670,cr98_0). +Le(i710,cr98_0). +Le(i740,cr98_0). +Le(i810,cr98_0). +Le(i850,cr98_0). +Le(i880,cr98_0). +Le(i890,cr98_0). +Le(i920,cr98_0). +Le(i960,cr98_0). +Le(i990,cr98_0). +Le(i1030,cr98_0). +Le(i1060,cr98_0). +Le(i1100,cr98_0). +Le(i1130,cr98_0). +Le(i1170,cr98_0). +Le(i1200,cr98_0). +Le(i1240,cr98_0). +Le(i1260,cr98_0). +Le(i1270,cr98_0). +Le(i1290,cr98_0). +Le(i1310,cr98_0). +Le(i1320,cr98_0). +Le(i1330,cr98_0). +Le(i1350,cr98_0). +Le(i1360,cr98_0). +Le(i1380,cr98_0). +Le(i1390,cr98_0). +Le(i1420,cr98_0). +Le(i1430,cr98_0). +Le(i1450,cr98_0). +Le(i1460,cr98_0). +Le(i1490,cr98_0). +Le(i1520,cr98_0). +Le(i1530,cr98_0). +Le(i1540,cr98_0). +Le(i1560,cr98_0). +Le(i1590,cr98_0). +Le(i1630,cr98_0). +Le(i1660,cr98_0). +Le(i1700,cr98_0). +Le(i1730,cr98_0). +Le(i1760,cr98_0). +Le(i1770,cr98_0). +Le(i1810,cr98_0). +Le(i1840,cr98_0). +Le(i1880,cr98_0). +Le(i1910,cr98_0). +Le(i1950,cr98_0). +Le(i1980,cr98_0). +Le(i2020,cr98_0). +Le(i2050,cr98_0). +Le(i2090,cr98_0). +Le(i2120,cr98_0). +Le(i2160,cr98_0). +Le(i2190,cr98_0). +Le(i2200,cr98_0). +Le(i2230,cr98_0). +Le(i2270,cr98_0). +Le(i2300,cr98_0). +Le(i2340,cr98_0). +Le(i2370,cr98_0). +Le(i2410,cr98_0). +Le(i2420,cr98_0). +Le(i2440,cr98_0). +Le(i2480,cr98_0). +Le(i2510,cr98_0). +Le(i2550,cr98_0). +Le(i2580,cr98_0). +Le(cr98_0,i2660). +Le(cr98_0,i2730). +Le(cr98_0,i2760). +Le(cr98_0,i2800). +Le(cr98_0,i2830). +Le(cr98_0,i2860). +Le(cr98_0,i2870). +Le(cr98_0,i2940). +Le(cr98_0,i2970). +Le(cr98_0,i3010). +Le(cr98_0,i3040). +Le(cr98_0,i3080). +Le(cr98_0,i3120). +Le(cr98_0,i3150). +Le(cr98_0,i3220). +Le(cr98_0,i3260). +Le(cr98_0,i3290). +Le(cr98_0,i3300). +Le(cr98_0,i3330). +Le(cr98_0,i3400). +Le(cr98_0,i3430). +Le(cr98_0,i3500). +Le(cr98_0,i3520). +Le(cr98_0,i3580). +Le(cr98_0,i3610). +Le(cr98_0,i3650). +Le(cr98_0,i3680). +Le(cr98_0,i3720). +Le(cr98_0,i3740). +Le(cr98_0,i3790). +Le(cr98_0,i3820). +Le(cr98_0,i3860). +Le(cr98_0,i3960). +Le(cr98_0,i4040). +Le(cr98_0,i4140). +Le(cr98_0,i4180). +Le(cr98_0,i4400). +Le(cr98_0,i4620). +Le(cr98_0,i4840). +Le(cr98_0,i5060). +Le(cr98_0,i5280). +Le(cr98_0,i5500). +Le(cr98_0,i5720). +Le(cr98_0,i5940). +Le(cr98_0,i6160). +Le(cr98_0,i6380). +Le(cr98_0,i6600). +Le(cr98_0,i6820). +Le(cr98_0,i7040). +Le(cr98_0,i7260). +Le(cr98_0,i7480). +Le(cr98_0,i7700). +Le(cr98_0,i7920). +Le(cr98_0,i8140). +Le(cr98_0,i8360). +Le(cr98_0,i8580). +Eq(i2640,i2640). +Le(i2640,cr99_0). +Le(cr99_0,i2660). +Le(i-30,cr99_0). +Le(i0,cr99_0). +Le(i13,cr99_0). +Le(i26,cr99_0). +Le(i39,cr99_0). +Le(i52,cr99_0). +Le(i60,cr99_0). +Le(i65,cr99_0). +Le(i70,cr99_0). +Le(i78,cr99_0). +Le(i90,cr99_0). +Le(i91,cr99_0). +Le(i104,cr99_0). +Le(i117,cr99_0). +Le(i130,cr99_0). +Le(i143,cr99_0). +Le(i156,cr99_0). +Le(i169,cr99_0). +Le(i182,cr99_0). +Le(i195,cr99_0). +Le(i208,cr99_0). +Le(i221,cr99_0). +Le(i234,cr99_0). +Le(i247,cr99_0). +Le(i260,cr99_0). +Le(i460,cr99_0). +Le(i530,cr99_0). +Le(i600,cr99_0). +Le(i660,cr99_0). +Le(i670,cr99_0). +Le(i710,cr99_0). +Le(i740,cr99_0). +Le(i810,cr99_0). +Le(i850,cr99_0). +Le(i880,cr99_0). +Le(i890,cr99_0). +Le(i920,cr99_0). +Le(i960,cr99_0). +Le(i990,cr99_0). +Le(i1030,cr99_0). +Le(i1060,cr99_0). +Le(i1100,cr99_0). +Le(i1130,cr99_0). +Le(i1170,cr99_0). +Le(i1200,cr99_0). +Le(i1240,cr99_0). +Le(i1260,cr99_0). +Le(i1270,cr99_0). +Le(i1290,cr99_0). +Le(i1310,cr99_0). +Le(i1320,cr99_0). +Le(i1330,cr99_0). +Le(i1350,cr99_0). +Le(i1360,cr99_0). +Le(i1380,cr99_0). +Le(i1390,cr99_0). +Le(i1420,cr99_0). +Le(i1430,cr99_0). +Le(i1450,cr99_0). +Le(i1460,cr99_0). +Le(i1490,cr99_0). +Le(i1520,cr99_0). +Le(i1530,cr99_0). +Le(i1540,cr99_0). +Le(i1560,cr99_0). +Le(i1590,cr99_0). +Le(i1630,cr99_0). +Le(i1660,cr99_0). +Le(i1700,cr99_0). +Le(i1730,cr99_0). +Le(i1760,cr99_0). +Le(i1770,cr99_0). +Le(i1810,cr99_0). +Le(i1840,cr99_0). +Le(i1880,cr99_0). +Le(i1910,cr99_0). +Le(i1950,cr99_0). +Le(i1980,cr99_0). +Le(i2020,cr99_0). +Le(i2050,cr99_0). +Le(i2090,cr99_0). +Le(i2120,cr99_0). +Le(i2160,cr99_0). +Le(i2190,cr99_0). +Le(i2200,cr99_0). +Le(i2230,cr99_0). +Le(i2270,cr99_0). +Le(i2300,cr99_0). +Le(i2340,cr99_0). +Le(i2370,cr99_0). +Le(i2410,cr99_0). +Le(i2420,cr99_0). +Le(i2440,cr99_0). +Le(i2480,cr99_0). +Le(i2510,cr99_0). +Le(i2550,cr99_0). +Le(i2580,cr99_0). +Le(i2620,cr99_0). +Le(cr99_0,i2730). +Le(cr99_0,i2760). +Le(cr99_0,i2800). +Le(cr99_0,i2830). +Le(cr99_0,i2860). +Le(cr99_0,i2870). +Le(cr99_0,i2940). +Le(cr99_0,i2970). +Le(cr99_0,i3010). +Le(cr99_0,i3040). +Le(cr99_0,i3080). +Le(cr99_0,i3120). +Le(cr99_0,i3150). +Le(cr99_0,i3220). +Le(cr99_0,i3260). +Le(cr99_0,i3290). +Le(cr99_0,i3300). +Le(cr99_0,i3330). +Le(cr99_0,i3400). +Le(cr99_0,i3430). +Le(cr99_0,i3500). +Le(cr99_0,i3520). +Le(cr99_0,i3580). +Le(cr99_0,i3610). +Le(cr99_0,i3650). +Le(cr99_0,i3680). +Le(cr99_0,i3720). +Le(cr99_0,i3740). +Le(cr99_0,i3790). +Le(cr99_0,i3820). +Le(cr99_0,i3860). +Le(cr99_0,i3960). +Le(cr99_0,i4040). +Le(cr99_0,i4140). +Le(cr99_0,i4180). +Le(cr99_0,i4400). +Le(cr99_0,i4620). +Le(cr99_0,i4840). +Le(cr99_0,i5060). +Le(cr99_0,i5280). +Le(cr99_0,i5500). +Le(cr99_0,i5720). +Le(cr99_0,i5940). +Le(cr99_0,i6160). +Le(cr99_0,i6380). +Le(cr99_0,i6600). +Le(cr99_0,i6820). +Le(cr99_0,i7040). +Le(cr99_0,i7260). +Le(cr99_0,i7480). +Le(cr99_0,i7700). +Le(cr99_0,i7920). +Le(cr99_0,i8140). +Le(cr99_0,i8360). +Le(cr99_0,i8580). +Eq(i2660,i2660). +Le(i2660,cr100_0). +Le(cr100_0,i2730). +Le(i-30,cr100_0). +Le(i0,cr100_0). +Le(i13,cr100_0). +Le(i26,cr100_0). +Le(i39,cr100_0). +Le(i52,cr100_0). +Le(i60,cr100_0). +Le(i65,cr100_0). +Le(i70,cr100_0). +Le(i78,cr100_0). +Le(i90,cr100_0). +Le(i91,cr100_0). +Le(i104,cr100_0). +Le(i117,cr100_0). +Le(i130,cr100_0). +Le(i143,cr100_0). +Le(i156,cr100_0). +Le(i169,cr100_0). +Le(i182,cr100_0). +Le(i195,cr100_0). +Le(i208,cr100_0). +Le(i221,cr100_0). +Le(i234,cr100_0). +Le(i247,cr100_0). +Le(i260,cr100_0). +Le(i460,cr100_0). +Le(i530,cr100_0). +Le(i600,cr100_0). +Le(i660,cr100_0). +Le(i670,cr100_0). +Le(i710,cr100_0). +Le(i740,cr100_0). +Le(i810,cr100_0). +Le(i850,cr100_0). +Le(i880,cr100_0). +Le(i890,cr100_0). +Le(i920,cr100_0). +Le(i960,cr100_0). +Le(i990,cr100_0). +Le(i1030,cr100_0). +Le(i1060,cr100_0). +Le(i1100,cr100_0). +Le(i1130,cr100_0). +Le(i1170,cr100_0). +Le(i1200,cr100_0). +Le(i1240,cr100_0). +Le(i1260,cr100_0). +Le(i1270,cr100_0). +Le(i1290,cr100_0). +Le(i1310,cr100_0). +Le(i1320,cr100_0). +Le(i1330,cr100_0). +Le(i1350,cr100_0). +Le(i1360,cr100_0). +Le(i1380,cr100_0). +Le(i1390,cr100_0). +Le(i1420,cr100_0). +Le(i1430,cr100_0). +Le(i1450,cr100_0). +Le(i1460,cr100_0). +Le(i1490,cr100_0). +Le(i1520,cr100_0). +Le(i1530,cr100_0). +Le(i1540,cr100_0). +Le(i1560,cr100_0). +Le(i1590,cr100_0). +Le(i1630,cr100_0). +Le(i1660,cr100_0). +Le(i1700,cr100_0). +Le(i1730,cr100_0). +Le(i1760,cr100_0). +Le(i1770,cr100_0). +Le(i1810,cr100_0). +Le(i1840,cr100_0). +Le(i1880,cr100_0). +Le(i1910,cr100_0). +Le(i1950,cr100_0). +Le(i1980,cr100_0). +Le(i2020,cr100_0). +Le(i2050,cr100_0). +Le(i2090,cr100_0). +Le(i2120,cr100_0). +Le(i2160,cr100_0). +Le(i2190,cr100_0). +Le(i2200,cr100_0). +Le(i2230,cr100_0). +Le(i2270,cr100_0). +Le(i2300,cr100_0). +Le(i2340,cr100_0). +Le(i2370,cr100_0). +Le(i2410,cr100_0). +Le(i2420,cr100_0). +Le(i2440,cr100_0). +Le(i2480,cr100_0). +Le(i2510,cr100_0). +Le(i2550,cr100_0). +Le(i2580,cr100_0). +Le(i2620,cr100_0). +Le(i2640,cr100_0). +Le(cr100_0,i2760). +Le(cr100_0,i2800). +Le(cr100_0,i2830). +Le(cr100_0,i2860). +Le(cr100_0,i2870). +Le(cr100_0,i2940). +Le(cr100_0,i2970). +Le(cr100_0,i3010). +Le(cr100_0,i3040). +Le(cr100_0,i3080). +Le(cr100_0,i3120). +Le(cr100_0,i3150). +Le(cr100_0,i3220). +Le(cr100_0,i3260). +Le(cr100_0,i3290). +Le(cr100_0,i3300). +Le(cr100_0,i3330). +Le(cr100_0,i3400). +Le(cr100_0,i3430). +Le(cr100_0,i3500). +Le(cr100_0,i3520). +Le(cr100_0,i3580). +Le(cr100_0,i3610). +Le(cr100_0,i3650). +Le(cr100_0,i3680). +Le(cr100_0,i3720). +Le(cr100_0,i3740). +Le(cr100_0,i3790). +Le(cr100_0,i3820). +Le(cr100_0,i3860). +Le(cr100_0,i3960). +Le(cr100_0,i4040). +Le(cr100_0,i4140). +Le(cr100_0,i4180). +Le(cr100_0,i4400). +Le(cr100_0,i4620). +Le(cr100_0,i4840). +Le(cr100_0,i5060). +Le(cr100_0,i5280). +Le(cr100_0,i5500). +Le(cr100_0,i5720). +Le(cr100_0,i5940). +Le(cr100_0,i6160). +Le(cr100_0,i6380). +Le(cr100_0,i6600). +Le(cr100_0,i6820). +Le(cr100_0,i7040). +Le(cr100_0,i7260). +Le(cr100_0,i7480). +Le(cr100_0,i7700). +Le(cr100_0,i7920). +Le(cr100_0,i8140). +Le(cr100_0,i8360). +Le(cr100_0,i8580). +Eq(i2730,i2730). +Le(i2730,cr101_0). +Le(cr101_0,i2760). +Le(i-30,cr101_0). +Le(i0,cr101_0). +Le(i13,cr101_0). +Le(i26,cr101_0). +Le(i39,cr101_0). +Le(i52,cr101_0). +Le(i60,cr101_0). +Le(i65,cr101_0). +Le(i70,cr101_0). +Le(i78,cr101_0). +Le(i90,cr101_0). +Le(i91,cr101_0). +Le(i104,cr101_0). +Le(i117,cr101_0). +Le(i130,cr101_0). +Le(i143,cr101_0). +Le(i156,cr101_0). +Le(i169,cr101_0). +Le(i182,cr101_0). +Le(i195,cr101_0). +Le(i208,cr101_0). +Le(i221,cr101_0). +Le(i234,cr101_0). +Le(i247,cr101_0). +Le(i260,cr101_0). +Le(i460,cr101_0). +Le(i530,cr101_0). +Le(i600,cr101_0). +Le(i660,cr101_0). +Le(i670,cr101_0). +Le(i710,cr101_0). +Le(i740,cr101_0). +Le(i810,cr101_0). +Le(i850,cr101_0). +Le(i880,cr101_0). +Le(i890,cr101_0). +Le(i920,cr101_0). +Le(i960,cr101_0). +Le(i990,cr101_0). +Le(i1030,cr101_0). +Le(i1060,cr101_0). +Le(i1100,cr101_0). +Le(i1130,cr101_0). +Le(i1170,cr101_0). +Le(i1200,cr101_0). +Le(i1240,cr101_0). +Le(i1260,cr101_0). +Le(i1270,cr101_0). +Le(i1290,cr101_0). +Le(i1310,cr101_0). +Le(i1320,cr101_0). +Le(i1330,cr101_0). +Le(i1350,cr101_0). +Le(i1360,cr101_0). +Le(i1380,cr101_0). +Le(i1390,cr101_0). +Le(i1420,cr101_0). +Le(i1430,cr101_0). +Le(i1450,cr101_0). +Le(i1460,cr101_0). +Le(i1490,cr101_0). +Le(i1520,cr101_0). +Le(i1530,cr101_0). +Le(i1540,cr101_0). +Le(i1560,cr101_0). +Le(i1590,cr101_0). +Le(i1630,cr101_0). +Le(i1660,cr101_0). +Le(i1700,cr101_0). +Le(i1730,cr101_0). +Le(i1760,cr101_0). +Le(i1770,cr101_0). +Le(i1810,cr101_0). +Le(i1840,cr101_0). +Le(i1880,cr101_0). +Le(i1910,cr101_0). +Le(i1950,cr101_0). +Le(i1980,cr101_0). +Le(i2020,cr101_0). +Le(i2050,cr101_0). +Le(i2090,cr101_0). +Le(i2120,cr101_0). +Le(i2160,cr101_0). +Le(i2190,cr101_0). +Le(i2200,cr101_0). +Le(i2230,cr101_0). +Le(i2270,cr101_0). +Le(i2300,cr101_0). +Le(i2340,cr101_0). +Le(i2370,cr101_0). +Le(i2410,cr101_0). +Le(i2420,cr101_0). +Le(i2440,cr101_0). +Le(i2480,cr101_0). +Le(i2510,cr101_0). +Le(i2550,cr101_0). +Le(i2580,cr101_0). +Le(i2620,cr101_0). +Le(i2640,cr101_0). +Le(i2660,cr101_0). +Le(cr101_0,i2800). +Le(cr101_0,i2830). +Le(cr101_0,i2860). +Le(cr101_0,i2870). +Le(cr101_0,i2940). +Le(cr101_0,i2970). +Le(cr101_0,i3010). +Le(cr101_0,i3040). +Le(cr101_0,i3080). +Le(cr101_0,i3120). +Le(cr101_0,i3150). +Le(cr101_0,i3220). +Le(cr101_0,i3260). +Le(cr101_0,i3290). +Le(cr101_0,i3300). +Le(cr101_0,i3330). +Le(cr101_0,i3400). +Le(cr101_0,i3430). +Le(cr101_0,i3500). +Le(cr101_0,i3520). +Le(cr101_0,i3580). +Le(cr101_0,i3610). +Le(cr101_0,i3650). +Le(cr101_0,i3680). +Le(cr101_0,i3720). +Le(cr101_0,i3740). +Le(cr101_0,i3790). +Le(cr101_0,i3820). +Le(cr101_0,i3860). +Le(cr101_0,i3960). +Le(cr101_0,i4040). +Le(cr101_0,i4140). +Le(cr101_0,i4180). +Le(cr101_0,i4400). +Le(cr101_0,i4620). +Le(cr101_0,i4840). +Le(cr101_0,i5060). +Le(cr101_0,i5280). +Le(cr101_0,i5500). +Le(cr101_0,i5720). +Le(cr101_0,i5940). +Le(cr101_0,i6160). +Le(cr101_0,i6380). +Le(cr101_0,i6600). +Le(cr101_0,i6820). +Le(cr101_0,i7040). +Le(cr101_0,i7260). +Le(cr101_0,i7480). +Le(cr101_0,i7700). +Le(cr101_0,i7920). +Le(cr101_0,i8140). +Le(cr101_0,i8360). +Le(cr101_0,i8580). +Eq(i2760,i2760). +Le(i2760,cr102_0). +Le(cr102_0,i2800). +Le(i-30,cr102_0). +Le(i0,cr102_0). +Le(i13,cr102_0). +Le(i26,cr102_0). +Le(i39,cr102_0). +Le(i52,cr102_0). +Le(i60,cr102_0). +Le(i65,cr102_0). +Le(i70,cr102_0). +Le(i78,cr102_0). +Le(i90,cr102_0). +Le(i91,cr102_0). +Le(i104,cr102_0). +Le(i117,cr102_0). +Le(i130,cr102_0). +Le(i143,cr102_0). +Le(i156,cr102_0). +Le(i169,cr102_0). +Le(i182,cr102_0). +Le(i195,cr102_0). +Le(i208,cr102_0). +Le(i221,cr102_0). +Le(i234,cr102_0). +Le(i247,cr102_0). +Le(i260,cr102_0). +Le(i460,cr102_0). +Le(i530,cr102_0). +Le(i600,cr102_0). +Le(i660,cr102_0). +Le(i670,cr102_0). +Le(i710,cr102_0). +Le(i740,cr102_0). +Le(i810,cr102_0). +Le(i850,cr102_0). +Le(i880,cr102_0). +Le(i890,cr102_0). +Le(i920,cr102_0). +Le(i960,cr102_0). +Le(i990,cr102_0). +Le(i1030,cr102_0). +Le(i1060,cr102_0). +Le(i1100,cr102_0). +Le(i1130,cr102_0). +Le(i1170,cr102_0). +Le(i1200,cr102_0). +Le(i1240,cr102_0). +Le(i1260,cr102_0). +Le(i1270,cr102_0). +Le(i1290,cr102_0). +Le(i1310,cr102_0). +Le(i1320,cr102_0). +Le(i1330,cr102_0). +Le(i1350,cr102_0). +Le(i1360,cr102_0). +Le(i1380,cr102_0). +Le(i1390,cr102_0). +Le(i1420,cr102_0). +Le(i1430,cr102_0). +Le(i1450,cr102_0). +Le(i1460,cr102_0). +Le(i1490,cr102_0). +Le(i1520,cr102_0). +Le(i1530,cr102_0). +Le(i1540,cr102_0). +Le(i1560,cr102_0). +Le(i1590,cr102_0). +Le(i1630,cr102_0). +Le(i1660,cr102_0). +Le(i1700,cr102_0). +Le(i1730,cr102_0). +Le(i1760,cr102_0). +Le(i1770,cr102_0). +Le(i1810,cr102_0). +Le(i1840,cr102_0). +Le(i1880,cr102_0). +Le(i1910,cr102_0). +Le(i1950,cr102_0). +Le(i1980,cr102_0). +Le(i2020,cr102_0). +Le(i2050,cr102_0). +Le(i2090,cr102_0). +Le(i2120,cr102_0). +Le(i2160,cr102_0). +Le(i2190,cr102_0). +Le(i2200,cr102_0). +Le(i2230,cr102_0). +Le(i2270,cr102_0). +Le(i2300,cr102_0). +Le(i2340,cr102_0). +Le(i2370,cr102_0). +Le(i2410,cr102_0). +Le(i2420,cr102_0). +Le(i2440,cr102_0). +Le(i2480,cr102_0). +Le(i2510,cr102_0). +Le(i2550,cr102_0). +Le(i2580,cr102_0). +Le(i2620,cr102_0). +Le(i2640,cr102_0). +Le(i2660,cr102_0). +Le(i2730,cr102_0). +Le(cr102_0,i2830). +Le(cr102_0,i2860). +Le(cr102_0,i2870). +Le(cr102_0,i2940). +Le(cr102_0,i2970). +Le(cr102_0,i3010). +Le(cr102_0,i3040). +Le(cr102_0,i3080). +Le(cr102_0,i3120). +Le(cr102_0,i3150). +Le(cr102_0,i3220). +Le(cr102_0,i3260). +Le(cr102_0,i3290). +Le(cr102_0,i3300). +Le(cr102_0,i3330). +Le(cr102_0,i3400). +Le(cr102_0,i3430). +Le(cr102_0,i3500). +Le(cr102_0,i3520). +Le(cr102_0,i3580). +Le(cr102_0,i3610). +Le(cr102_0,i3650). +Le(cr102_0,i3680). +Le(cr102_0,i3720). +Le(cr102_0,i3740). +Le(cr102_0,i3790). +Le(cr102_0,i3820). +Le(cr102_0,i3860). +Le(cr102_0,i3960). +Le(cr102_0,i4040). +Le(cr102_0,i4140). +Le(cr102_0,i4180). +Le(cr102_0,i4400). +Le(cr102_0,i4620). +Le(cr102_0,i4840). +Le(cr102_0,i5060). +Le(cr102_0,i5280). +Le(cr102_0,i5500). +Le(cr102_0,i5720). +Le(cr102_0,i5940). +Le(cr102_0,i6160). +Le(cr102_0,i6380). +Le(cr102_0,i6600). +Le(cr102_0,i6820). +Le(cr102_0,i7040). +Le(cr102_0,i7260). +Le(cr102_0,i7480). +Le(cr102_0,i7700). +Le(cr102_0,i7920). +Le(cr102_0,i8140). +Le(cr102_0,i8360). +Le(cr102_0,i8580). +Eq(i2800,i2800). +Le(i2800,cr103_0). +Le(cr103_0,i2830). +Le(i-30,cr103_0). +Le(i0,cr103_0). +Le(i13,cr103_0). +Le(i26,cr103_0). +Le(i39,cr103_0). +Le(i52,cr103_0). +Le(i60,cr103_0). +Le(i65,cr103_0). +Le(i70,cr103_0). +Le(i78,cr103_0). +Le(i90,cr103_0). +Le(i91,cr103_0). +Le(i104,cr103_0). +Le(i117,cr103_0). +Le(i130,cr103_0). +Le(i143,cr103_0). +Le(i156,cr103_0). +Le(i169,cr103_0). +Le(i182,cr103_0). +Le(i195,cr103_0). +Le(i208,cr103_0). +Le(i221,cr103_0). +Le(i234,cr103_0). +Le(i247,cr103_0). +Le(i260,cr103_0). +Le(i460,cr103_0). +Le(i530,cr103_0). +Le(i600,cr103_0). +Le(i660,cr103_0). +Le(i670,cr103_0). +Le(i710,cr103_0). +Le(i740,cr103_0). +Le(i810,cr103_0). +Le(i850,cr103_0). +Le(i880,cr103_0). +Le(i890,cr103_0). +Le(i920,cr103_0). +Le(i960,cr103_0). +Le(i990,cr103_0). +Le(i1030,cr103_0). +Le(i1060,cr103_0). +Le(i1100,cr103_0). +Le(i1130,cr103_0). +Le(i1170,cr103_0). +Le(i1200,cr103_0). +Le(i1240,cr103_0). +Le(i1260,cr103_0). +Le(i1270,cr103_0). +Le(i1290,cr103_0). +Le(i1310,cr103_0). +Le(i1320,cr103_0). +Le(i1330,cr103_0). +Le(i1350,cr103_0). +Le(i1360,cr103_0). +Le(i1380,cr103_0). +Le(i1390,cr103_0). +Le(i1420,cr103_0). +Le(i1430,cr103_0). +Le(i1450,cr103_0). +Le(i1460,cr103_0). +Le(i1490,cr103_0). +Le(i1520,cr103_0). +Le(i1530,cr103_0). +Le(i1540,cr103_0). +Le(i1560,cr103_0). +Le(i1590,cr103_0). +Le(i1630,cr103_0). +Le(i1660,cr103_0). +Le(i1700,cr103_0). +Le(i1730,cr103_0). +Le(i1760,cr103_0). +Le(i1770,cr103_0). +Le(i1810,cr103_0). +Le(i1840,cr103_0). +Le(i1880,cr103_0). +Le(i1910,cr103_0). +Le(i1950,cr103_0). +Le(i1980,cr103_0). +Le(i2020,cr103_0). +Le(i2050,cr103_0). +Le(i2090,cr103_0). +Le(i2120,cr103_0). +Le(i2160,cr103_0). +Le(i2190,cr103_0). +Le(i2200,cr103_0). +Le(i2230,cr103_0). +Le(i2270,cr103_0). +Le(i2300,cr103_0). +Le(i2340,cr103_0). +Le(i2370,cr103_0). +Le(i2410,cr103_0). +Le(i2420,cr103_0). +Le(i2440,cr103_0). +Le(i2480,cr103_0). +Le(i2510,cr103_0). +Le(i2550,cr103_0). +Le(i2580,cr103_0). +Le(i2620,cr103_0). +Le(i2640,cr103_0). +Le(i2660,cr103_0). +Le(i2730,cr103_0). +Le(i2760,cr103_0). +Le(cr103_0,i2860). +Le(cr103_0,i2870). +Le(cr103_0,i2940). +Le(cr103_0,i2970). +Le(cr103_0,i3010). +Le(cr103_0,i3040). +Le(cr103_0,i3080). +Le(cr103_0,i3120). +Le(cr103_0,i3150). +Le(cr103_0,i3220). +Le(cr103_0,i3260). +Le(cr103_0,i3290). +Le(cr103_0,i3300). +Le(cr103_0,i3330). +Le(cr103_0,i3400). +Le(cr103_0,i3430). +Le(cr103_0,i3500). +Le(cr103_0,i3520). +Le(cr103_0,i3580). +Le(cr103_0,i3610). +Le(cr103_0,i3650). +Le(cr103_0,i3680). +Le(cr103_0,i3720). +Le(cr103_0,i3740). +Le(cr103_0,i3790). +Le(cr103_0,i3820). +Le(cr103_0,i3860). +Le(cr103_0,i3960). +Le(cr103_0,i4040). +Le(cr103_0,i4140). +Le(cr103_0,i4180). +Le(cr103_0,i4400). +Le(cr103_0,i4620). +Le(cr103_0,i4840). +Le(cr103_0,i5060). +Le(cr103_0,i5280). +Le(cr103_0,i5500). +Le(cr103_0,i5720). +Le(cr103_0,i5940). +Le(cr103_0,i6160). +Le(cr103_0,i6380). +Le(cr103_0,i6600). +Le(cr103_0,i6820). +Le(cr103_0,i7040). +Le(cr103_0,i7260). +Le(cr103_0,i7480). +Le(cr103_0,i7700). +Le(cr103_0,i7920). +Le(cr103_0,i8140). +Le(cr103_0,i8360). +Le(cr103_0,i8580). +Eq(i2830,i2830). +Le(i2830,cr104_0). +Le(cr104_0,i2860). +Le(i-30,cr104_0). +Le(i0,cr104_0). +Le(i13,cr104_0). +Le(i26,cr104_0). +Le(i39,cr104_0). +Le(i52,cr104_0). +Le(i60,cr104_0). +Le(i65,cr104_0). +Le(i70,cr104_0). +Le(i78,cr104_0). +Le(i90,cr104_0). +Le(i91,cr104_0). +Le(i104,cr104_0). +Le(i117,cr104_0). +Le(i130,cr104_0). +Le(i143,cr104_0). +Le(i156,cr104_0). +Le(i169,cr104_0). +Le(i182,cr104_0). +Le(i195,cr104_0). +Le(i208,cr104_0). +Le(i221,cr104_0). +Le(i234,cr104_0). +Le(i247,cr104_0). +Le(i260,cr104_0). +Le(i460,cr104_0). +Le(i530,cr104_0). +Le(i600,cr104_0). +Le(i660,cr104_0). +Le(i670,cr104_0). +Le(i710,cr104_0). +Le(i740,cr104_0). +Le(i810,cr104_0). +Le(i850,cr104_0). +Le(i880,cr104_0). +Le(i890,cr104_0). +Le(i920,cr104_0). +Le(i960,cr104_0). +Le(i990,cr104_0). +Le(i1030,cr104_0). +Le(i1060,cr104_0). +Le(i1100,cr104_0). +Le(i1130,cr104_0). +Le(i1170,cr104_0). +Le(i1200,cr104_0). +Le(i1240,cr104_0). +Le(i1260,cr104_0). +Le(i1270,cr104_0). +Le(i1290,cr104_0). +Le(i1310,cr104_0). +Le(i1320,cr104_0). +Le(i1330,cr104_0). +Le(i1350,cr104_0). +Le(i1360,cr104_0). +Le(i1380,cr104_0). +Le(i1390,cr104_0). +Le(i1420,cr104_0). +Le(i1430,cr104_0). +Le(i1450,cr104_0). +Le(i1460,cr104_0). +Le(i1490,cr104_0). +Le(i1520,cr104_0). +Le(i1530,cr104_0). +Le(i1540,cr104_0). +Le(i1560,cr104_0). +Le(i1590,cr104_0). +Le(i1630,cr104_0). +Le(i1660,cr104_0). +Le(i1700,cr104_0). +Le(i1730,cr104_0). +Le(i1760,cr104_0). +Le(i1770,cr104_0). +Le(i1810,cr104_0). +Le(i1840,cr104_0). +Le(i1880,cr104_0). +Le(i1910,cr104_0). +Le(i1950,cr104_0). +Le(i1980,cr104_0). +Le(i2020,cr104_0). +Le(i2050,cr104_0). +Le(i2090,cr104_0). +Le(i2120,cr104_0). +Le(i2160,cr104_0). +Le(i2190,cr104_0). +Le(i2200,cr104_0). +Le(i2230,cr104_0). +Le(i2270,cr104_0). +Le(i2300,cr104_0). +Le(i2340,cr104_0). +Le(i2370,cr104_0). +Le(i2410,cr104_0). +Le(i2420,cr104_0). +Le(i2440,cr104_0). +Le(i2480,cr104_0). +Le(i2510,cr104_0). +Le(i2550,cr104_0). +Le(i2580,cr104_0). +Le(i2620,cr104_0). +Le(i2640,cr104_0). +Le(i2660,cr104_0). +Le(i2730,cr104_0). +Le(i2760,cr104_0). +Le(i2800,cr104_0). +Le(cr104_0,i2870). +Le(cr104_0,i2940). +Le(cr104_0,i2970). +Le(cr104_0,i3010). +Le(cr104_0,i3040). +Le(cr104_0,i3080). +Le(cr104_0,i3120). +Le(cr104_0,i3150). +Le(cr104_0,i3220). +Le(cr104_0,i3260). +Le(cr104_0,i3290). +Le(cr104_0,i3300). +Le(cr104_0,i3330). +Le(cr104_0,i3400). +Le(cr104_0,i3430). +Le(cr104_0,i3500). +Le(cr104_0,i3520). +Le(cr104_0,i3580). +Le(cr104_0,i3610). +Le(cr104_0,i3650). +Le(cr104_0,i3680). +Le(cr104_0,i3720). +Le(cr104_0,i3740). +Le(cr104_0,i3790). +Le(cr104_0,i3820). +Le(cr104_0,i3860). +Le(cr104_0,i3960). +Le(cr104_0,i4040). +Le(cr104_0,i4140). +Le(cr104_0,i4180). +Le(cr104_0,i4400). +Le(cr104_0,i4620). +Le(cr104_0,i4840). +Le(cr104_0,i5060). +Le(cr104_0,i5280). +Le(cr104_0,i5500). +Le(cr104_0,i5720). +Le(cr104_0,i5940). +Le(cr104_0,i6160). +Le(cr104_0,i6380). +Le(cr104_0,i6600). +Le(cr104_0,i6820). +Le(cr104_0,i7040). +Le(cr104_0,i7260). +Le(cr104_0,i7480). +Le(cr104_0,i7700). +Le(cr104_0,i7920). +Le(cr104_0,i8140). +Le(cr104_0,i8360). +Le(cr104_0,i8580). +Eq(i2860,i2860). +Le(i2860,cr105_0). +Le(cr105_0,i2870). +Le(i-30,cr105_0). +Le(i0,cr105_0). +Le(i13,cr105_0). +Le(i26,cr105_0). +Le(i39,cr105_0). +Le(i52,cr105_0). +Le(i60,cr105_0). +Le(i65,cr105_0). +Le(i70,cr105_0). +Le(i78,cr105_0). +Le(i90,cr105_0). +Le(i91,cr105_0). +Le(i104,cr105_0). +Le(i117,cr105_0). +Le(i130,cr105_0). +Le(i143,cr105_0). +Le(i156,cr105_0). +Le(i169,cr105_0). +Le(i182,cr105_0). +Le(i195,cr105_0). +Le(i208,cr105_0). +Le(i221,cr105_0). +Le(i234,cr105_0). +Le(i247,cr105_0). +Le(i260,cr105_0). +Le(i460,cr105_0). +Le(i530,cr105_0). +Le(i600,cr105_0). +Le(i660,cr105_0). +Le(i670,cr105_0). +Le(i710,cr105_0). +Le(i740,cr105_0). +Le(i810,cr105_0). +Le(i850,cr105_0). +Le(i880,cr105_0). +Le(i890,cr105_0). +Le(i920,cr105_0). +Le(i960,cr105_0). +Le(i990,cr105_0). +Le(i1030,cr105_0). +Le(i1060,cr105_0). +Le(i1100,cr105_0). +Le(i1130,cr105_0). +Le(i1170,cr105_0). +Le(i1200,cr105_0). +Le(i1240,cr105_0). +Le(i1260,cr105_0). +Le(i1270,cr105_0). +Le(i1290,cr105_0). +Le(i1310,cr105_0). +Le(i1320,cr105_0). +Le(i1330,cr105_0). +Le(i1350,cr105_0). +Le(i1360,cr105_0). +Le(i1380,cr105_0). +Le(i1390,cr105_0). +Le(i1420,cr105_0). +Le(i1430,cr105_0). +Le(i1450,cr105_0). +Le(i1460,cr105_0). +Le(i1490,cr105_0). +Le(i1520,cr105_0). +Le(i1530,cr105_0). +Le(i1540,cr105_0). +Le(i1560,cr105_0). +Le(i1590,cr105_0). +Le(i1630,cr105_0). +Le(i1660,cr105_0). +Le(i1700,cr105_0). +Le(i1730,cr105_0). +Le(i1760,cr105_0). +Le(i1770,cr105_0). +Le(i1810,cr105_0). +Le(i1840,cr105_0). +Le(i1880,cr105_0). +Le(i1910,cr105_0). +Le(i1950,cr105_0). +Le(i1980,cr105_0). +Le(i2020,cr105_0). +Le(i2050,cr105_0). +Le(i2090,cr105_0). +Le(i2120,cr105_0). +Le(i2160,cr105_0). +Le(i2190,cr105_0). +Le(i2200,cr105_0). +Le(i2230,cr105_0). +Le(i2270,cr105_0). +Le(i2300,cr105_0). +Le(i2340,cr105_0). +Le(i2370,cr105_0). +Le(i2410,cr105_0). +Le(i2420,cr105_0). +Le(i2440,cr105_0). +Le(i2480,cr105_0). +Le(i2510,cr105_0). +Le(i2550,cr105_0). +Le(i2580,cr105_0). +Le(i2620,cr105_0). +Le(i2640,cr105_0). +Le(i2660,cr105_0). +Le(i2730,cr105_0). +Le(i2760,cr105_0). +Le(i2800,cr105_0). +Le(i2830,cr105_0). +Le(cr105_0,i2940). +Le(cr105_0,i2970). +Le(cr105_0,i3010). +Le(cr105_0,i3040). +Le(cr105_0,i3080). +Le(cr105_0,i3120). +Le(cr105_0,i3150). +Le(cr105_0,i3220). +Le(cr105_0,i3260). +Le(cr105_0,i3290). +Le(cr105_0,i3300). +Le(cr105_0,i3330). +Le(cr105_0,i3400). +Le(cr105_0,i3430). +Le(cr105_0,i3500). +Le(cr105_0,i3520). +Le(cr105_0,i3580). +Le(cr105_0,i3610). +Le(cr105_0,i3650). +Le(cr105_0,i3680). +Le(cr105_0,i3720). +Le(cr105_0,i3740). +Le(cr105_0,i3790). +Le(cr105_0,i3820). +Le(cr105_0,i3860). +Le(cr105_0,i3960). +Le(cr105_0,i4040). +Le(cr105_0,i4140). +Le(cr105_0,i4180). +Le(cr105_0,i4400). +Le(cr105_0,i4620). +Le(cr105_0,i4840). +Le(cr105_0,i5060). +Le(cr105_0,i5280). +Le(cr105_0,i5500). +Le(cr105_0,i5720). +Le(cr105_0,i5940). +Le(cr105_0,i6160). +Le(cr105_0,i6380). +Le(cr105_0,i6600). +Le(cr105_0,i6820). +Le(cr105_0,i7040). +Le(cr105_0,i7260). +Le(cr105_0,i7480). +Le(cr105_0,i7700). +Le(cr105_0,i7920). +Le(cr105_0,i8140). +Le(cr105_0,i8360). +Le(cr105_0,i8580). +Eq(i2870,i2870). +Le(i2870,cr106_0). +Le(cr106_0,i2940). +Le(i-30,cr106_0). +Le(i0,cr106_0). +Le(i13,cr106_0). +Le(i26,cr106_0). +Le(i39,cr106_0). +Le(i52,cr106_0). +Le(i60,cr106_0). +Le(i65,cr106_0). +Le(i70,cr106_0). +Le(i78,cr106_0). +Le(i90,cr106_0). +Le(i91,cr106_0). +Le(i104,cr106_0). +Le(i117,cr106_0). +Le(i130,cr106_0). +Le(i143,cr106_0). +Le(i156,cr106_0). +Le(i169,cr106_0). +Le(i182,cr106_0). +Le(i195,cr106_0). +Le(i208,cr106_0). +Le(i221,cr106_0). +Le(i234,cr106_0). +Le(i247,cr106_0). +Le(i260,cr106_0). +Le(i460,cr106_0). +Le(i530,cr106_0). +Le(i600,cr106_0). +Le(i660,cr106_0). +Le(i670,cr106_0). +Le(i710,cr106_0). +Le(i740,cr106_0). +Le(i810,cr106_0). +Le(i850,cr106_0). +Le(i880,cr106_0). +Le(i890,cr106_0). +Le(i920,cr106_0). +Le(i960,cr106_0). +Le(i990,cr106_0). +Le(i1030,cr106_0). +Le(i1060,cr106_0). +Le(i1100,cr106_0). +Le(i1130,cr106_0). +Le(i1170,cr106_0). +Le(i1200,cr106_0). +Le(i1240,cr106_0). +Le(i1260,cr106_0). +Le(i1270,cr106_0). +Le(i1290,cr106_0). +Le(i1310,cr106_0). +Le(i1320,cr106_0). +Le(i1330,cr106_0). +Le(i1350,cr106_0). +Le(i1360,cr106_0). +Le(i1380,cr106_0). +Le(i1390,cr106_0). +Le(i1420,cr106_0). +Le(i1430,cr106_0). +Le(i1450,cr106_0). +Le(i1460,cr106_0). +Le(i1490,cr106_0). +Le(i1520,cr106_0). +Le(i1530,cr106_0). +Le(i1540,cr106_0). +Le(i1560,cr106_0). +Le(i1590,cr106_0). +Le(i1630,cr106_0). +Le(i1660,cr106_0). +Le(i1700,cr106_0). +Le(i1730,cr106_0). +Le(i1760,cr106_0). +Le(i1770,cr106_0). +Le(i1810,cr106_0). +Le(i1840,cr106_0). +Le(i1880,cr106_0). +Le(i1910,cr106_0). +Le(i1950,cr106_0). +Le(i1980,cr106_0). +Le(i2020,cr106_0). +Le(i2050,cr106_0). +Le(i2090,cr106_0). +Le(i2120,cr106_0). +Le(i2160,cr106_0). +Le(i2190,cr106_0). +Le(i2200,cr106_0). +Le(i2230,cr106_0). +Le(i2270,cr106_0). +Le(i2300,cr106_0). +Le(i2340,cr106_0). +Le(i2370,cr106_0). +Le(i2410,cr106_0). +Le(i2420,cr106_0). +Le(i2440,cr106_0). +Le(i2480,cr106_0). +Le(i2510,cr106_0). +Le(i2550,cr106_0). +Le(i2580,cr106_0). +Le(i2620,cr106_0). +Le(i2640,cr106_0). +Le(i2660,cr106_0). +Le(i2730,cr106_0). +Le(i2760,cr106_0). +Le(i2800,cr106_0). +Le(i2830,cr106_0). +Le(i2860,cr106_0). +Le(cr106_0,i2970). +Le(cr106_0,i3010). +Le(cr106_0,i3040). +Le(cr106_0,i3080). +Le(cr106_0,i3120). +Le(cr106_0,i3150). +Le(cr106_0,i3220). +Le(cr106_0,i3260). +Le(cr106_0,i3290). +Le(cr106_0,i3300). +Le(cr106_0,i3330). +Le(cr106_0,i3400). +Le(cr106_0,i3430). +Le(cr106_0,i3500). +Le(cr106_0,i3520). +Le(cr106_0,i3580). +Le(cr106_0,i3610). +Le(cr106_0,i3650). +Le(cr106_0,i3680). +Le(cr106_0,i3720). +Le(cr106_0,i3740). +Le(cr106_0,i3790). +Le(cr106_0,i3820). +Le(cr106_0,i3860). +Le(cr106_0,i3960). +Le(cr106_0,i4040). +Le(cr106_0,i4140). +Le(cr106_0,i4180). +Le(cr106_0,i4400). +Le(cr106_0,i4620). +Le(cr106_0,i4840). +Le(cr106_0,i5060). +Le(cr106_0,i5280). +Le(cr106_0,i5500). +Le(cr106_0,i5720). +Le(cr106_0,i5940). +Le(cr106_0,i6160). +Le(cr106_0,i6380). +Le(cr106_0,i6600). +Le(cr106_0,i6820). +Le(cr106_0,i7040). +Le(cr106_0,i7260). +Le(cr106_0,i7480). +Le(cr106_0,i7700). +Le(cr106_0,i7920). +Le(cr106_0,i8140). +Le(cr106_0,i8360). +Le(cr106_0,i8580). +Eq(i2940,i2940). +Le(i2940,cr107_0). +Le(cr107_0,i2970). +Le(i-30,cr107_0). +Le(i0,cr107_0). +Le(i13,cr107_0). +Le(i26,cr107_0). +Le(i39,cr107_0). +Le(i52,cr107_0). +Le(i60,cr107_0). +Le(i65,cr107_0). +Le(i70,cr107_0). +Le(i78,cr107_0). +Le(i90,cr107_0). +Le(i91,cr107_0). +Le(i104,cr107_0). +Le(i117,cr107_0). +Le(i130,cr107_0). +Le(i143,cr107_0). +Le(i156,cr107_0). +Le(i169,cr107_0). +Le(i182,cr107_0). +Le(i195,cr107_0). +Le(i208,cr107_0). +Le(i221,cr107_0). +Le(i234,cr107_0). +Le(i247,cr107_0). +Le(i260,cr107_0). +Le(i460,cr107_0). +Le(i530,cr107_0). +Le(i600,cr107_0). +Le(i660,cr107_0). +Le(i670,cr107_0). +Le(i710,cr107_0). +Le(i740,cr107_0). +Le(i810,cr107_0). +Le(i850,cr107_0). +Le(i880,cr107_0). +Le(i890,cr107_0). +Le(i920,cr107_0). +Le(i960,cr107_0). +Le(i990,cr107_0). +Le(i1030,cr107_0). +Le(i1060,cr107_0). +Le(i1100,cr107_0). +Le(i1130,cr107_0). +Le(i1170,cr107_0). +Le(i1200,cr107_0). +Le(i1240,cr107_0). +Le(i1260,cr107_0). +Le(i1270,cr107_0). +Le(i1290,cr107_0). +Le(i1310,cr107_0). +Le(i1320,cr107_0). +Le(i1330,cr107_0). +Le(i1350,cr107_0). +Le(i1360,cr107_0). +Le(i1380,cr107_0). +Le(i1390,cr107_0). +Le(i1420,cr107_0). +Le(i1430,cr107_0). +Le(i1450,cr107_0). +Le(i1460,cr107_0). +Le(i1490,cr107_0). +Le(i1520,cr107_0). +Le(i1530,cr107_0). +Le(i1540,cr107_0). +Le(i1560,cr107_0). +Le(i1590,cr107_0). +Le(i1630,cr107_0). +Le(i1660,cr107_0). +Le(i1700,cr107_0). +Le(i1730,cr107_0). +Le(i1760,cr107_0). +Le(i1770,cr107_0). +Le(i1810,cr107_0). +Le(i1840,cr107_0). +Le(i1880,cr107_0). +Le(i1910,cr107_0). +Le(i1950,cr107_0). +Le(i1980,cr107_0). +Le(i2020,cr107_0). +Le(i2050,cr107_0). +Le(i2090,cr107_0). +Le(i2120,cr107_0). +Le(i2160,cr107_0). +Le(i2190,cr107_0). +Le(i2200,cr107_0). +Le(i2230,cr107_0). +Le(i2270,cr107_0). +Le(i2300,cr107_0). +Le(i2340,cr107_0). +Le(i2370,cr107_0). +Le(i2410,cr107_0). +Le(i2420,cr107_0). +Le(i2440,cr107_0). +Le(i2480,cr107_0). +Le(i2510,cr107_0). +Le(i2550,cr107_0). +Le(i2580,cr107_0). +Le(i2620,cr107_0). +Le(i2640,cr107_0). +Le(i2660,cr107_0). +Le(i2730,cr107_0). +Le(i2760,cr107_0). +Le(i2800,cr107_0). +Le(i2830,cr107_0). +Le(i2860,cr107_0). +Le(i2870,cr107_0). +Le(cr107_0,i3010). +Le(cr107_0,i3040). +Le(cr107_0,i3080). +Le(cr107_0,i3120). +Le(cr107_0,i3150). +Le(cr107_0,i3220). +Le(cr107_0,i3260). +Le(cr107_0,i3290). +Le(cr107_0,i3300). +Le(cr107_0,i3330). +Le(cr107_0,i3400). +Le(cr107_0,i3430). +Le(cr107_0,i3500). +Le(cr107_0,i3520). +Le(cr107_0,i3580). +Le(cr107_0,i3610). +Le(cr107_0,i3650). +Le(cr107_0,i3680). +Le(cr107_0,i3720). +Le(cr107_0,i3740). +Le(cr107_0,i3790). +Le(cr107_0,i3820). +Le(cr107_0,i3860). +Le(cr107_0,i3960). +Le(cr107_0,i4040). +Le(cr107_0,i4140). +Le(cr107_0,i4180). +Le(cr107_0,i4400). +Le(cr107_0,i4620). +Le(cr107_0,i4840). +Le(cr107_0,i5060). +Le(cr107_0,i5280). +Le(cr107_0,i5500). +Le(cr107_0,i5720). +Le(cr107_0,i5940). +Le(cr107_0,i6160). +Le(cr107_0,i6380). +Le(cr107_0,i6600). +Le(cr107_0,i6820). +Le(cr107_0,i7040). +Le(cr107_0,i7260). +Le(cr107_0,i7480). +Le(cr107_0,i7700). +Le(cr107_0,i7920). +Le(cr107_0,i8140). +Le(cr107_0,i8360). +Le(cr107_0,i8580). +Eq(i2970,i2970). +Le(i2970,cr108_0). +Le(cr108_0,i3010). +Le(i-30,cr108_0). +Le(i0,cr108_0). +Le(i13,cr108_0). +Le(i26,cr108_0). +Le(i39,cr108_0). +Le(i52,cr108_0). +Le(i60,cr108_0). +Le(i65,cr108_0). +Le(i70,cr108_0). +Le(i78,cr108_0). +Le(i90,cr108_0). +Le(i91,cr108_0). +Le(i104,cr108_0). +Le(i117,cr108_0). +Le(i130,cr108_0). +Le(i143,cr108_0). +Le(i156,cr108_0). +Le(i169,cr108_0). +Le(i182,cr108_0). +Le(i195,cr108_0). +Le(i208,cr108_0). +Le(i221,cr108_0). +Le(i234,cr108_0). +Le(i247,cr108_0). +Le(i260,cr108_0). +Le(i460,cr108_0). +Le(i530,cr108_0). +Le(i600,cr108_0). +Le(i660,cr108_0). +Le(i670,cr108_0). +Le(i710,cr108_0). +Le(i740,cr108_0). +Le(i810,cr108_0). +Le(i850,cr108_0). +Le(i880,cr108_0). +Le(i890,cr108_0). +Le(i920,cr108_0). +Le(i960,cr108_0). +Le(i990,cr108_0). +Le(i1030,cr108_0). +Le(i1060,cr108_0). +Le(i1100,cr108_0). +Le(i1130,cr108_0). +Le(i1170,cr108_0). +Le(i1200,cr108_0). +Le(i1240,cr108_0). +Le(i1260,cr108_0). +Le(i1270,cr108_0). +Le(i1290,cr108_0). +Le(i1310,cr108_0). +Le(i1320,cr108_0). +Le(i1330,cr108_0). +Le(i1350,cr108_0). +Le(i1360,cr108_0). +Le(i1380,cr108_0). +Le(i1390,cr108_0). +Le(i1420,cr108_0). +Le(i1430,cr108_0). +Le(i1450,cr108_0). +Le(i1460,cr108_0). +Le(i1490,cr108_0). +Le(i1520,cr108_0). +Le(i1530,cr108_0). +Le(i1540,cr108_0). +Le(i1560,cr108_0). +Le(i1590,cr108_0). +Le(i1630,cr108_0). +Le(i1660,cr108_0). +Le(i1700,cr108_0). +Le(i1730,cr108_0). +Le(i1760,cr108_0). +Le(i1770,cr108_0). +Le(i1810,cr108_0). +Le(i1840,cr108_0). +Le(i1880,cr108_0). +Le(i1910,cr108_0). +Le(i1950,cr108_0). +Le(i1980,cr108_0). +Le(i2020,cr108_0). +Le(i2050,cr108_0). +Le(i2090,cr108_0). +Le(i2120,cr108_0). +Le(i2160,cr108_0). +Le(i2190,cr108_0). +Le(i2200,cr108_0). +Le(i2230,cr108_0). +Le(i2270,cr108_0). +Le(i2300,cr108_0). +Le(i2340,cr108_0). +Le(i2370,cr108_0). +Le(i2410,cr108_0). +Le(i2420,cr108_0). +Le(i2440,cr108_0). +Le(i2480,cr108_0). +Le(i2510,cr108_0). +Le(i2550,cr108_0). +Le(i2580,cr108_0). +Le(i2620,cr108_0). +Le(i2640,cr108_0). +Le(i2660,cr108_0). +Le(i2730,cr108_0). +Le(i2760,cr108_0). +Le(i2800,cr108_0). +Le(i2830,cr108_0). +Le(i2860,cr108_0). +Le(i2870,cr108_0). +Le(i2940,cr108_0). +Le(cr108_0,i3040). +Le(cr108_0,i3080). +Le(cr108_0,i3120). +Le(cr108_0,i3150). +Le(cr108_0,i3220). +Le(cr108_0,i3260). +Le(cr108_0,i3290). +Le(cr108_0,i3300). +Le(cr108_0,i3330). +Le(cr108_0,i3400). +Le(cr108_0,i3430). +Le(cr108_0,i3500). +Le(cr108_0,i3520). +Le(cr108_0,i3580). +Le(cr108_0,i3610). +Le(cr108_0,i3650). +Le(cr108_0,i3680). +Le(cr108_0,i3720). +Le(cr108_0,i3740). +Le(cr108_0,i3790). +Le(cr108_0,i3820). +Le(cr108_0,i3860). +Le(cr108_0,i3960). +Le(cr108_0,i4040). +Le(cr108_0,i4140). +Le(cr108_0,i4180). +Le(cr108_0,i4400). +Le(cr108_0,i4620). +Le(cr108_0,i4840). +Le(cr108_0,i5060). +Le(cr108_0,i5280). +Le(cr108_0,i5500). +Le(cr108_0,i5720). +Le(cr108_0,i5940). +Le(cr108_0,i6160). +Le(cr108_0,i6380). +Le(cr108_0,i6600). +Le(cr108_0,i6820). +Le(cr108_0,i7040). +Le(cr108_0,i7260). +Le(cr108_0,i7480). +Le(cr108_0,i7700). +Le(cr108_0,i7920). +Le(cr108_0,i8140). +Le(cr108_0,i8360). +Le(cr108_0,i8580). +Eq(i3010,i3010). +Le(i3010,cr109_0). +Le(cr109_0,i3040). +Le(i-30,cr109_0). +Le(i0,cr109_0). +Le(i13,cr109_0). +Le(i26,cr109_0). +Le(i39,cr109_0). +Le(i52,cr109_0). +Le(i60,cr109_0). +Le(i65,cr109_0). +Le(i70,cr109_0). +Le(i78,cr109_0). +Le(i90,cr109_0). +Le(i91,cr109_0). +Le(i104,cr109_0). +Le(i117,cr109_0). +Le(i130,cr109_0). +Le(i143,cr109_0). +Le(i156,cr109_0). +Le(i169,cr109_0). +Le(i182,cr109_0). +Le(i195,cr109_0). +Le(i208,cr109_0). +Le(i221,cr109_0). +Le(i234,cr109_0). +Le(i247,cr109_0). +Le(i260,cr109_0). +Le(i460,cr109_0). +Le(i530,cr109_0). +Le(i600,cr109_0). +Le(i660,cr109_0). +Le(i670,cr109_0). +Le(i710,cr109_0). +Le(i740,cr109_0). +Le(i810,cr109_0). +Le(i850,cr109_0). +Le(i880,cr109_0). +Le(i890,cr109_0). +Le(i920,cr109_0). +Le(i960,cr109_0). +Le(i990,cr109_0). +Le(i1030,cr109_0). +Le(i1060,cr109_0). +Le(i1100,cr109_0). +Le(i1130,cr109_0). +Le(i1170,cr109_0). +Le(i1200,cr109_0). +Le(i1240,cr109_0). +Le(i1260,cr109_0). +Le(i1270,cr109_0). +Le(i1290,cr109_0). +Le(i1310,cr109_0). +Le(i1320,cr109_0). +Le(i1330,cr109_0). +Le(i1350,cr109_0). +Le(i1360,cr109_0). +Le(i1380,cr109_0). +Le(i1390,cr109_0). +Le(i1420,cr109_0). +Le(i1430,cr109_0). +Le(i1450,cr109_0). +Le(i1460,cr109_0). +Le(i1490,cr109_0). +Le(i1520,cr109_0). +Le(i1530,cr109_0). +Le(i1540,cr109_0). +Le(i1560,cr109_0). +Le(i1590,cr109_0). +Le(i1630,cr109_0). +Le(i1660,cr109_0). +Le(i1700,cr109_0). +Le(i1730,cr109_0). +Le(i1760,cr109_0). +Le(i1770,cr109_0). +Le(i1810,cr109_0). +Le(i1840,cr109_0). +Le(i1880,cr109_0). +Le(i1910,cr109_0). +Le(i1950,cr109_0). +Le(i1980,cr109_0). +Le(i2020,cr109_0). +Le(i2050,cr109_0). +Le(i2090,cr109_0). +Le(i2120,cr109_0). +Le(i2160,cr109_0). +Le(i2190,cr109_0). +Le(i2200,cr109_0). +Le(i2230,cr109_0). +Le(i2270,cr109_0). +Le(i2300,cr109_0). +Le(i2340,cr109_0). +Le(i2370,cr109_0). +Le(i2410,cr109_0). +Le(i2420,cr109_0). +Le(i2440,cr109_0). +Le(i2480,cr109_0). +Le(i2510,cr109_0). +Le(i2550,cr109_0). +Le(i2580,cr109_0). +Le(i2620,cr109_0). +Le(i2640,cr109_0). +Le(i2660,cr109_0). +Le(i2730,cr109_0). +Le(i2760,cr109_0). +Le(i2800,cr109_0). +Le(i2830,cr109_0). +Le(i2860,cr109_0). +Le(i2870,cr109_0). +Le(i2940,cr109_0). +Le(i2970,cr109_0). +Le(cr109_0,i3080). +Le(cr109_0,i3120). +Le(cr109_0,i3150). +Le(cr109_0,i3220). +Le(cr109_0,i3260). +Le(cr109_0,i3290). +Le(cr109_0,i3300). +Le(cr109_0,i3330). +Le(cr109_0,i3400). +Le(cr109_0,i3430). +Le(cr109_0,i3500). +Le(cr109_0,i3520). +Le(cr109_0,i3580). +Le(cr109_0,i3610). +Le(cr109_0,i3650). +Le(cr109_0,i3680). +Le(cr109_0,i3720). +Le(cr109_0,i3740). +Le(cr109_0,i3790). +Le(cr109_0,i3820). +Le(cr109_0,i3860). +Le(cr109_0,i3960). +Le(cr109_0,i4040). +Le(cr109_0,i4140). +Le(cr109_0,i4180). +Le(cr109_0,i4400). +Le(cr109_0,i4620). +Le(cr109_0,i4840). +Le(cr109_0,i5060). +Le(cr109_0,i5280). +Le(cr109_0,i5500). +Le(cr109_0,i5720). +Le(cr109_0,i5940). +Le(cr109_0,i6160). +Le(cr109_0,i6380). +Le(cr109_0,i6600). +Le(cr109_0,i6820). +Le(cr109_0,i7040). +Le(cr109_0,i7260). +Le(cr109_0,i7480). +Le(cr109_0,i7700). +Le(cr109_0,i7920). +Le(cr109_0,i8140). +Le(cr109_0,i8360). +Le(cr109_0,i8580). +Eq(i3040,i3040). +Le(i3040,cr110_0). +Le(cr110_0,i3080). +Le(i-30,cr110_0). +Le(i0,cr110_0). +Le(i13,cr110_0). +Le(i26,cr110_0). +Le(i39,cr110_0). +Le(i52,cr110_0). +Le(i60,cr110_0). +Le(i65,cr110_0). +Le(i70,cr110_0). +Le(i78,cr110_0). +Le(i90,cr110_0). +Le(i91,cr110_0). +Le(i104,cr110_0). +Le(i117,cr110_0). +Le(i130,cr110_0). +Le(i143,cr110_0). +Le(i156,cr110_0). +Le(i169,cr110_0). +Le(i182,cr110_0). +Le(i195,cr110_0). +Le(i208,cr110_0). +Le(i221,cr110_0). +Le(i234,cr110_0). +Le(i247,cr110_0). +Le(i260,cr110_0). +Le(i460,cr110_0). +Le(i530,cr110_0). +Le(i600,cr110_0). +Le(i660,cr110_0). +Le(i670,cr110_0). +Le(i710,cr110_0). +Le(i740,cr110_0). +Le(i810,cr110_0). +Le(i850,cr110_0). +Le(i880,cr110_0). +Le(i890,cr110_0). +Le(i920,cr110_0). +Le(i960,cr110_0). +Le(i990,cr110_0). +Le(i1030,cr110_0). +Le(i1060,cr110_0). +Le(i1100,cr110_0). +Le(i1130,cr110_0). +Le(i1170,cr110_0). +Le(i1200,cr110_0). +Le(i1240,cr110_0). +Le(i1260,cr110_0). +Le(i1270,cr110_0). +Le(i1290,cr110_0). +Le(i1310,cr110_0). +Le(i1320,cr110_0). +Le(i1330,cr110_0). +Le(i1350,cr110_0). +Le(i1360,cr110_0). +Le(i1380,cr110_0). +Le(i1390,cr110_0). +Le(i1420,cr110_0). +Le(i1430,cr110_0). +Le(i1450,cr110_0). +Le(i1460,cr110_0). +Le(i1490,cr110_0). +Le(i1520,cr110_0). +Le(i1530,cr110_0). +Le(i1540,cr110_0). +Le(i1560,cr110_0). +Le(i1590,cr110_0). +Le(i1630,cr110_0). +Le(i1660,cr110_0). +Le(i1700,cr110_0). +Le(i1730,cr110_0). +Le(i1760,cr110_0). +Le(i1770,cr110_0). +Le(i1810,cr110_0). +Le(i1840,cr110_0). +Le(i1880,cr110_0). +Le(i1910,cr110_0). +Le(i1950,cr110_0). +Le(i1980,cr110_0). +Le(i2020,cr110_0). +Le(i2050,cr110_0). +Le(i2090,cr110_0). +Le(i2120,cr110_0). +Le(i2160,cr110_0). +Le(i2190,cr110_0). +Le(i2200,cr110_0). +Le(i2230,cr110_0). +Le(i2270,cr110_0). +Le(i2300,cr110_0). +Le(i2340,cr110_0). +Le(i2370,cr110_0). +Le(i2410,cr110_0). +Le(i2420,cr110_0). +Le(i2440,cr110_0). +Le(i2480,cr110_0). +Le(i2510,cr110_0). +Le(i2550,cr110_0). +Le(i2580,cr110_0). +Le(i2620,cr110_0). +Le(i2640,cr110_0). +Le(i2660,cr110_0). +Le(i2730,cr110_0). +Le(i2760,cr110_0). +Le(i2800,cr110_0). +Le(i2830,cr110_0). +Le(i2860,cr110_0). +Le(i2870,cr110_0). +Le(i2940,cr110_0). +Le(i2970,cr110_0). +Le(i3010,cr110_0). +Le(cr110_0,i3120). +Le(cr110_0,i3150). +Le(cr110_0,i3220). +Le(cr110_0,i3260). +Le(cr110_0,i3290). +Le(cr110_0,i3300). +Le(cr110_0,i3330). +Le(cr110_0,i3400). +Le(cr110_0,i3430). +Le(cr110_0,i3500). +Le(cr110_0,i3520). +Le(cr110_0,i3580). +Le(cr110_0,i3610). +Le(cr110_0,i3650). +Le(cr110_0,i3680). +Le(cr110_0,i3720). +Le(cr110_0,i3740). +Le(cr110_0,i3790). +Le(cr110_0,i3820). +Le(cr110_0,i3860). +Le(cr110_0,i3960). +Le(cr110_0,i4040). +Le(cr110_0,i4140). +Le(cr110_0,i4180). +Le(cr110_0,i4400). +Le(cr110_0,i4620). +Le(cr110_0,i4840). +Le(cr110_0,i5060). +Le(cr110_0,i5280). +Le(cr110_0,i5500). +Le(cr110_0,i5720). +Le(cr110_0,i5940). +Le(cr110_0,i6160). +Le(cr110_0,i6380). +Le(cr110_0,i6600). +Le(cr110_0,i6820). +Le(cr110_0,i7040). +Le(cr110_0,i7260). +Le(cr110_0,i7480). +Le(cr110_0,i7700). +Le(cr110_0,i7920). +Le(cr110_0,i8140). +Le(cr110_0,i8360). +Le(cr110_0,i8580). +Eq(i3080,i3080). +Le(i3080,cr111_0). +Le(cr111_0,i3120). +Le(i-30,cr111_0). +Le(i0,cr111_0). +Le(i13,cr111_0). +Le(i26,cr111_0). +Le(i39,cr111_0). +Le(i52,cr111_0). +Le(i60,cr111_0). +Le(i65,cr111_0). +Le(i70,cr111_0). +Le(i78,cr111_0). +Le(i90,cr111_0). +Le(i91,cr111_0). +Le(i104,cr111_0). +Le(i117,cr111_0). +Le(i130,cr111_0). +Le(i143,cr111_0). +Le(i156,cr111_0). +Le(i169,cr111_0). +Le(i182,cr111_0). +Le(i195,cr111_0). +Le(i208,cr111_0). +Le(i221,cr111_0). +Le(i234,cr111_0). +Le(i247,cr111_0). +Le(i260,cr111_0). +Le(i460,cr111_0). +Le(i530,cr111_0). +Le(i600,cr111_0). +Le(i660,cr111_0). +Le(i670,cr111_0). +Le(i710,cr111_0). +Le(i740,cr111_0). +Le(i810,cr111_0). +Le(i850,cr111_0). +Le(i880,cr111_0). +Le(i890,cr111_0). +Le(i920,cr111_0). +Le(i960,cr111_0). +Le(i990,cr111_0). +Le(i1030,cr111_0). +Le(i1060,cr111_0). +Le(i1100,cr111_0). +Le(i1130,cr111_0). +Le(i1170,cr111_0). +Le(i1200,cr111_0). +Le(i1240,cr111_0). +Le(i1260,cr111_0). +Le(i1270,cr111_0). +Le(i1290,cr111_0). +Le(i1310,cr111_0). +Le(i1320,cr111_0). +Le(i1330,cr111_0). +Le(i1350,cr111_0). +Le(i1360,cr111_0). +Le(i1380,cr111_0). +Le(i1390,cr111_0). +Le(i1420,cr111_0). +Le(i1430,cr111_0). +Le(i1450,cr111_0). +Le(i1460,cr111_0). +Le(i1490,cr111_0). +Le(i1520,cr111_0). +Le(i1530,cr111_0). +Le(i1540,cr111_0). +Le(i1560,cr111_0). +Le(i1590,cr111_0). +Le(i1630,cr111_0). +Le(i1660,cr111_0). +Le(i1700,cr111_0). +Le(i1730,cr111_0). +Le(i1760,cr111_0). +Le(i1770,cr111_0). +Le(i1810,cr111_0). +Le(i1840,cr111_0). +Le(i1880,cr111_0). +Le(i1910,cr111_0). +Le(i1950,cr111_0). +Le(i1980,cr111_0). +Le(i2020,cr111_0). +Le(i2050,cr111_0). +Le(i2090,cr111_0). +Le(i2120,cr111_0). +Le(i2160,cr111_0). +Le(i2190,cr111_0). +Le(i2200,cr111_0). +Le(i2230,cr111_0). +Le(i2270,cr111_0). +Le(i2300,cr111_0). +Le(i2340,cr111_0). +Le(i2370,cr111_0). +Le(i2410,cr111_0). +Le(i2420,cr111_0). +Le(i2440,cr111_0). +Le(i2480,cr111_0). +Le(i2510,cr111_0). +Le(i2550,cr111_0). +Le(i2580,cr111_0). +Le(i2620,cr111_0). +Le(i2640,cr111_0). +Le(i2660,cr111_0). +Le(i2730,cr111_0). +Le(i2760,cr111_0). +Le(i2800,cr111_0). +Le(i2830,cr111_0). +Le(i2860,cr111_0). +Le(i2870,cr111_0). +Le(i2940,cr111_0). +Le(i2970,cr111_0). +Le(i3010,cr111_0). +Le(i3040,cr111_0). +Le(cr111_0,i3150). +Le(cr111_0,i3220). +Le(cr111_0,i3260). +Le(cr111_0,i3290). +Le(cr111_0,i3300). +Le(cr111_0,i3330). +Le(cr111_0,i3400). +Le(cr111_0,i3430). +Le(cr111_0,i3500). +Le(cr111_0,i3520). +Le(cr111_0,i3580). +Le(cr111_0,i3610). +Le(cr111_0,i3650). +Le(cr111_0,i3680). +Le(cr111_0,i3720). +Le(cr111_0,i3740). +Le(cr111_0,i3790). +Le(cr111_0,i3820). +Le(cr111_0,i3860). +Le(cr111_0,i3960). +Le(cr111_0,i4040). +Le(cr111_0,i4140). +Le(cr111_0,i4180). +Le(cr111_0,i4400). +Le(cr111_0,i4620). +Le(cr111_0,i4840). +Le(cr111_0,i5060). +Le(cr111_0,i5280). +Le(cr111_0,i5500). +Le(cr111_0,i5720). +Le(cr111_0,i5940). +Le(cr111_0,i6160). +Le(cr111_0,i6380). +Le(cr111_0,i6600). +Le(cr111_0,i6820). +Le(cr111_0,i7040). +Le(cr111_0,i7260). +Le(cr111_0,i7480). +Le(cr111_0,i7700). +Le(cr111_0,i7920). +Le(cr111_0,i8140). +Le(cr111_0,i8360). +Le(cr111_0,i8580). +Eq(i3120,i3120). +Le(i3120,cr112_0). +Le(cr112_0,i3150). +Le(i-30,cr112_0). +Le(i0,cr112_0). +Le(i13,cr112_0). +Le(i26,cr112_0). +Le(i39,cr112_0). +Le(i52,cr112_0). +Le(i60,cr112_0). +Le(i65,cr112_0). +Le(i70,cr112_0). +Le(i78,cr112_0). +Le(i90,cr112_0). +Le(i91,cr112_0). +Le(i104,cr112_0). +Le(i117,cr112_0). +Le(i130,cr112_0). +Le(i143,cr112_0). +Le(i156,cr112_0). +Le(i169,cr112_0). +Le(i182,cr112_0). +Le(i195,cr112_0). +Le(i208,cr112_0). +Le(i221,cr112_0). +Le(i234,cr112_0). +Le(i247,cr112_0). +Le(i260,cr112_0). +Le(i460,cr112_0). +Le(i530,cr112_0). +Le(i600,cr112_0). +Le(i660,cr112_0). +Le(i670,cr112_0). +Le(i710,cr112_0). +Le(i740,cr112_0). +Le(i810,cr112_0). +Le(i850,cr112_0). +Le(i880,cr112_0). +Le(i890,cr112_0). +Le(i920,cr112_0). +Le(i960,cr112_0). +Le(i990,cr112_0). +Le(i1030,cr112_0). +Le(i1060,cr112_0). +Le(i1100,cr112_0). +Le(i1130,cr112_0). +Le(i1170,cr112_0). +Le(i1200,cr112_0). +Le(i1240,cr112_0). +Le(i1260,cr112_0). +Le(i1270,cr112_0). +Le(i1290,cr112_0). +Le(i1310,cr112_0). +Le(i1320,cr112_0). +Le(i1330,cr112_0). +Le(i1350,cr112_0). +Le(i1360,cr112_0). +Le(i1380,cr112_0). +Le(i1390,cr112_0). +Le(i1420,cr112_0). +Le(i1430,cr112_0). +Le(i1450,cr112_0). +Le(i1460,cr112_0). +Le(i1490,cr112_0). +Le(i1520,cr112_0). +Le(i1530,cr112_0). +Le(i1540,cr112_0). +Le(i1560,cr112_0). +Le(i1590,cr112_0). +Le(i1630,cr112_0). +Le(i1660,cr112_0). +Le(i1700,cr112_0). +Le(i1730,cr112_0). +Le(i1760,cr112_0). +Le(i1770,cr112_0). +Le(i1810,cr112_0). +Le(i1840,cr112_0). +Le(i1880,cr112_0). +Le(i1910,cr112_0). +Le(i1950,cr112_0). +Le(i1980,cr112_0). +Le(i2020,cr112_0). +Le(i2050,cr112_0). +Le(i2090,cr112_0). +Le(i2120,cr112_0). +Le(i2160,cr112_0). +Le(i2190,cr112_0). +Le(i2200,cr112_0). +Le(i2230,cr112_0). +Le(i2270,cr112_0). +Le(i2300,cr112_0). +Le(i2340,cr112_0). +Le(i2370,cr112_0). +Le(i2410,cr112_0). +Le(i2420,cr112_0). +Le(i2440,cr112_0). +Le(i2480,cr112_0). +Le(i2510,cr112_0). +Le(i2550,cr112_0). +Le(i2580,cr112_0). +Le(i2620,cr112_0). +Le(i2640,cr112_0). +Le(i2660,cr112_0). +Le(i2730,cr112_0). +Le(i2760,cr112_0). +Le(i2800,cr112_0). +Le(i2830,cr112_0). +Le(i2860,cr112_0). +Le(i2870,cr112_0). +Le(i2940,cr112_0). +Le(i2970,cr112_0). +Le(i3010,cr112_0). +Le(i3040,cr112_0). +Le(i3080,cr112_0). +Le(cr112_0,i3220). +Le(cr112_0,i3260). +Le(cr112_0,i3290). +Le(cr112_0,i3300). +Le(cr112_0,i3330). +Le(cr112_0,i3400). +Le(cr112_0,i3430). +Le(cr112_0,i3500). +Le(cr112_0,i3520). +Le(cr112_0,i3580). +Le(cr112_0,i3610). +Le(cr112_0,i3650). +Le(cr112_0,i3680). +Le(cr112_0,i3720). +Le(cr112_0,i3740). +Le(cr112_0,i3790). +Le(cr112_0,i3820). +Le(cr112_0,i3860). +Le(cr112_0,i3960). +Le(cr112_0,i4040). +Le(cr112_0,i4140). +Le(cr112_0,i4180). +Le(cr112_0,i4400). +Le(cr112_0,i4620). +Le(cr112_0,i4840). +Le(cr112_0,i5060). +Le(cr112_0,i5280). +Le(cr112_0,i5500). +Le(cr112_0,i5720). +Le(cr112_0,i5940). +Le(cr112_0,i6160). +Le(cr112_0,i6380). +Le(cr112_0,i6600). +Le(cr112_0,i6820). +Le(cr112_0,i7040). +Le(cr112_0,i7260). +Le(cr112_0,i7480). +Le(cr112_0,i7700). +Le(cr112_0,i7920). +Le(cr112_0,i8140). +Le(cr112_0,i8360). +Le(cr112_0,i8580). +Eq(i3150,i3150). +Le(i3150,cr113_0). +Le(cr113_0,i3220). +Le(i-30,cr113_0). +Le(i0,cr113_0). +Le(i13,cr113_0). +Le(i26,cr113_0). +Le(i39,cr113_0). +Le(i52,cr113_0). +Le(i60,cr113_0). +Le(i65,cr113_0). +Le(i70,cr113_0). +Le(i78,cr113_0). +Le(i90,cr113_0). +Le(i91,cr113_0). +Le(i104,cr113_0). +Le(i117,cr113_0). +Le(i130,cr113_0). +Le(i143,cr113_0). +Le(i156,cr113_0). +Le(i169,cr113_0). +Le(i182,cr113_0). +Le(i195,cr113_0). +Le(i208,cr113_0). +Le(i221,cr113_0). +Le(i234,cr113_0). +Le(i247,cr113_0). +Le(i260,cr113_0). +Le(i460,cr113_0). +Le(i530,cr113_0). +Le(i600,cr113_0). +Le(i660,cr113_0). +Le(i670,cr113_0). +Le(i710,cr113_0). +Le(i740,cr113_0). +Le(i810,cr113_0). +Le(i850,cr113_0). +Le(i880,cr113_0). +Le(i890,cr113_0). +Le(i920,cr113_0). +Le(i960,cr113_0). +Le(i990,cr113_0). +Le(i1030,cr113_0). +Le(i1060,cr113_0). +Le(i1100,cr113_0). +Le(i1130,cr113_0). +Le(i1170,cr113_0). +Le(i1200,cr113_0). +Le(i1240,cr113_0). +Le(i1260,cr113_0). +Le(i1270,cr113_0). +Le(i1290,cr113_0). +Le(i1310,cr113_0). +Le(i1320,cr113_0). +Le(i1330,cr113_0). +Le(i1350,cr113_0). +Le(i1360,cr113_0). +Le(i1380,cr113_0). +Le(i1390,cr113_0). +Le(i1420,cr113_0). +Le(i1430,cr113_0). +Le(i1450,cr113_0). +Le(i1460,cr113_0). +Le(i1490,cr113_0). +Le(i1520,cr113_0). +Le(i1530,cr113_0). +Le(i1540,cr113_0). +Le(i1560,cr113_0). +Le(i1590,cr113_0). +Le(i1630,cr113_0). +Le(i1660,cr113_0). +Le(i1700,cr113_0). +Le(i1730,cr113_0). +Le(i1760,cr113_0). +Le(i1770,cr113_0). +Le(i1810,cr113_0). +Le(i1840,cr113_0). +Le(i1880,cr113_0). +Le(i1910,cr113_0). +Le(i1950,cr113_0). +Le(i1980,cr113_0). +Le(i2020,cr113_0). +Le(i2050,cr113_0). +Le(i2090,cr113_0). +Le(i2120,cr113_0). +Le(i2160,cr113_0). +Le(i2190,cr113_0). +Le(i2200,cr113_0). +Le(i2230,cr113_0). +Le(i2270,cr113_0). +Le(i2300,cr113_0). +Le(i2340,cr113_0). +Le(i2370,cr113_0). +Le(i2410,cr113_0). +Le(i2420,cr113_0). +Le(i2440,cr113_0). +Le(i2480,cr113_0). +Le(i2510,cr113_0). +Le(i2550,cr113_0). +Le(i2580,cr113_0). +Le(i2620,cr113_0). +Le(i2640,cr113_0). +Le(i2660,cr113_0). +Le(i2730,cr113_0). +Le(i2760,cr113_0). +Le(i2800,cr113_0). +Le(i2830,cr113_0). +Le(i2860,cr113_0). +Le(i2870,cr113_0). +Le(i2940,cr113_0). +Le(i2970,cr113_0). +Le(i3010,cr113_0). +Le(i3040,cr113_0). +Le(i3080,cr113_0). +Le(i3120,cr113_0). +Le(cr113_0,i3260). +Le(cr113_0,i3290). +Le(cr113_0,i3300). +Le(cr113_0,i3330). +Le(cr113_0,i3400). +Le(cr113_0,i3430). +Le(cr113_0,i3500). +Le(cr113_0,i3520). +Le(cr113_0,i3580). +Le(cr113_0,i3610). +Le(cr113_0,i3650). +Le(cr113_0,i3680). +Le(cr113_0,i3720). +Le(cr113_0,i3740). +Le(cr113_0,i3790). +Le(cr113_0,i3820). +Le(cr113_0,i3860). +Le(cr113_0,i3960). +Le(cr113_0,i4040). +Le(cr113_0,i4140). +Le(cr113_0,i4180). +Le(cr113_0,i4400). +Le(cr113_0,i4620). +Le(cr113_0,i4840). +Le(cr113_0,i5060). +Le(cr113_0,i5280). +Le(cr113_0,i5500). +Le(cr113_0,i5720). +Le(cr113_0,i5940). +Le(cr113_0,i6160). +Le(cr113_0,i6380). +Le(cr113_0,i6600). +Le(cr113_0,i6820). +Le(cr113_0,i7040). +Le(cr113_0,i7260). +Le(cr113_0,i7480). +Le(cr113_0,i7700). +Le(cr113_0,i7920). +Le(cr113_0,i8140). +Le(cr113_0,i8360). +Le(cr113_0,i8580). +Eq(i3220,i3220). +Le(i3220,cr114_0). +Le(cr114_0,i3260). +Le(i-30,cr114_0). +Le(i0,cr114_0). +Le(i13,cr114_0). +Le(i26,cr114_0). +Le(i39,cr114_0). +Le(i52,cr114_0). +Le(i60,cr114_0). +Le(i65,cr114_0). +Le(i70,cr114_0). +Le(i78,cr114_0). +Le(i90,cr114_0). +Le(i91,cr114_0). +Le(i104,cr114_0). +Le(i117,cr114_0). +Le(i130,cr114_0). +Le(i143,cr114_0). +Le(i156,cr114_0). +Le(i169,cr114_0). +Le(i182,cr114_0). +Le(i195,cr114_0). +Le(i208,cr114_0). +Le(i221,cr114_0). +Le(i234,cr114_0). +Le(i247,cr114_0). +Le(i260,cr114_0). +Le(i460,cr114_0). +Le(i530,cr114_0). +Le(i600,cr114_0). +Le(i660,cr114_0). +Le(i670,cr114_0). +Le(i710,cr114_0). +Le(i740,cr114_0). +Le(i810,cr114_0). +Le(i850,cr114_0). +Le(i880,cr114_0). +Le(i890,cr114_0). +Le(i920,cr114_0). +Le(i960,cr114_0). +Le(i990,cr114_0). +Le(i1030,cr114_0). +Le(i1060,cr114_0). +Le(i1100,cr114_0). +Le(i1130,cr114_0). +Le(i1170,cr114_0). +Le(i1200,cr114_0). +Le(i1240,cr114_0). +Le(i1260,cr114_0). +Le(i1270,cr114_0). +Le(i1290,cr114_0). +Le(i1310,cr114_0). +Le(i1320,cr114_0). +Le(i1330,cr114_0). +Le(i1350,cr114_0). +Le(i1360,cr114_0). +Le(i1380,cr114_0). +Le(i1390,cr114_0). +Le(i1420,cr114_0). +Le(i1430,cr114_0). +Le(i1450,cr114_0). +Le(i1460,cr114_0). +Le(i1490,cr114_0). +Le(i1520,cr114_0). +Le(i1530,cr114_0). +Le(i1540,cr114_0). +Le(i1560,cr114_0). +Le(i1590,cr114_0). +Le(i1630,cr114_0). +Le(i1660,cr114_0). +Le(i1700,cr114_0). +Le(i1730,cr114_0). +Le(i1760,cr114_0). +Le(i1770,cr114_0). +Le(i1810,cr114_0). +Le(i1840,cr114_0). +Le(i1880,cr114_0). +Le(i1910,cr114_0). +Le(i1950,cr114_0). +Le(i1980,cr114_0). +Le(i2020,cr114_0). +Le(i2050,cr114_0). +Le(i2090,cr114_0). +Le(i2120,cr114_0). +Le(i2160,cr114_0). +Le(i2190,cr114_0). +Le(i2200,cr114_0). +Le(i2230,cr114_0). +Le(i2270,cr114_0). +Le(i2300,cr114_0). +Le(i2340,cr114_0). +Le(i2370,cr114_0). +Le(i2410,cr114_0). +Le(i2420,cr114_0). +Le(i2440,cr114_0). +Le(i2480,cr114_0). +Le(i2510,cr114_0). +Le(i2550,cr114_0). +Le(i2580,cr114_0). +Le(i2620,cr114_0). +Le(i2640,cr114_0). +Le(i2660,cr114_0). +Le(i2730,cr114_0). +Le(i2760,cr114_0). +Le(i2800,cr114_0). +Le(i2830,cr114_0). +Le(i2860,cr114_0). +Le(i2870,cr114_0). +Le(i2940,cr114_0). +Le(i2970,cr114_0). +Le(i3010,cr114_0). +Le(i3040,cr114_0). +Le(i3080,cr114_0). +Le(i3120,cr114_0). +Le(i3150,cr114_0). +Le(cr114_0,i3290). +Le(cr114_0,i3300). +Le(cr114_0,i3330). +Le(cr114_0,i3400). +Le(cr114_0,i3430). +Le(cr114_0,i3500). +Le(cr114_0,i3520). +Le(cr114_0,i3580). +Le(cr114_0,i3610). +Le(cr114_0,i3650). +Le(cr114_0,i3680). +Le(cr114_0,i3720). +Le(cr114_0,i3740). +Le(cr114_0,i3790). +Le(cr114_0,i3820). +Le(cr114_0,i3860). +Le(cr114_0,i3960). +Le(cr114_0,i4040). +Le(cr114_0,i4140). +Le(cr114_0,i4180). +Le(cr114_0,i4400). +Le(cr114_0,i4620). +Le(cr114_0,i4840). +Le(cr114_0,i5060). +Le(cr114_0,i5280). +Le(cr114_0,i5500). +Le(cr114_0,i5720). +Le(cr114_0,i5940). +Le(cr114_0,i6160). +Le(cr114_0,i6380). +Le(cr114_0,i6600). +Le(cr114_0,i6820). +Le(cr114_0,i7040). +Le(cr114_0,i7260). +Le(cr114_0,i7480). +Le(cr114_0,i7700). +Le(cr114_0,i7920). +Le(cr114_0,i8140). +Le(cr114_0,i8360). +Le(cr114_0,i8580). +Eq(i3260,i3260). +Le(i3260,cr115_0). +Le(cr115_0,i3290). +Le(i-30,cr115_0). +Le(i0,cr115_0). +Le(i13,cr115_0). +Le(i26,cr115_0). +Le(i39,cr115_0). +Le(i52,cr115_0). +Le(i60,cr115_0). +Le(i65,cr115_0). +Le(i70,cr115_0). +Le(i78,cr115_0). +Le(i90,cr115_0). +Le(i91,cr115_0). +Le(i104,cr115_0). +Le(i117,cr115_0). +Le(i130,cr115_0). +Le(i143,cr115_0). +Le(i156,cr115_0). +Le(i169,cr115_0). +Le(i182,cr115_0). +Le(i195,cr115_0). +Le(i208,cr115_0). +Le(i221,cr115_0). +Le(i234,cr115_0). +Le(i247,cr115_0). +Le(i260,cr115_0). +Le(i460,cr115_0). +Le(i530,cr115_0). +Le(i600,cr115_0). +Le(i660,cr115_0). +Le(i670,cr115_0). +Le(i710,cr115_0). +Le(i740,cr115_0). +Le(i810,cr115_0). +Le(i850,cr115_0). +Le(i880,cr115_0). +Le(i890,cr115_0). +Le(i920,cr115_0). +Le(i960,cr115_0). +Le(i990,cr115_0). +Le(i1030,cr115_0). +Le(i1060,cr115_0). +Le(i1100,cr115_0). +Le(i1130,cr115_0). +Le(i1170,cr115_0). +Le(i1200,cr115_0). +Le(i1240,cr115_0). +Le(i1260,cr115_0). +Le(i1270,cr115_0). +Le(i1290,cr115_0). +Le(i1310,cr115_0). +Le(i1320,cr115_0). +Le(i1330,cr115_0). +Le(i1350,cr115_0). +Le(i1360,cr115_0). +Le(i1380,cr115_0). +Le(i1390,cr115_0). +Le(i1420,cr115_0). +Le(i1430,cr115_0). +Le(i1450,cr115_0). +Le(i1460,cr115_0). +Le(i1490,cr115_0). +Le(i1520,cr115_0). +Le(i1530,cr115_0). +Le(i1540,cr115_0). +Le(i1560,cr115_0). +Le(i1590,cr115_0). +Le(i1630,cr115_0). +Le(i1660,cr115_0). +Le(i1700,cr115_0). +Le(i1730,cr115_0). +Le(i1760,cr115_0). +Le(i1770,cr115_0). +Le(i1810,cr115_0). +Le(i1840,cr115_0). +Le(i1880,cr115_0). +Le(i1910,cr115_0). +Le(i1950,cr115_0). +Le(i1980,cr115_0). +Le(i2020,cr115_0). +Le(i2050,cr115_0). +Le(i2090,cr115_0). +Le(i2120,cr115_0). +Le(i2160,cr115_0). +Le(i2190,cr115_0). +Le(i2200,cr115_0). +Le(i2230,cr115_0). +Le(i2270,cr115_0). +Le(i2300,cr115_0). +Le(i2340,cr115_0). +Le(i2370,cr115_0). +Le(i2410,cr115_0). +Le(i2420,cr115_0). +Le(i2440,cr115_0). +Le(i2480,cr115_0). +Le(i2510,cr115_0). +Le(i2550,cr115_0). +Le(i2580,cr115_0). +Le(i2620,cr115_0). +Le(i2640,cr115_0). +Le(i2660,cr115_0). +Le(i2730,cr115_0). +Le(i2760,cr115_0). +Le(i2800,cr115_0). +Le(i2830,cr115_0). +Le(i2860,cr115_0). +Le(i2870,cr115_0). +Le(i2940,cr115_0). +Le(i2970,cr115_0). +Le(i3010,cr115_0). +Le(i3040,cr115_0). +Le(i3080,cr115_0). +Le(i3120,cr115_0). +Le(i3150,cr115_0). +Le(i3220,cr115_0). +Le(cr115_0,i3300). +Le(cr115_0,i3330). +Le(cr115_0,i3400). +Le(cr115_0,i3430). +Le(cr115_0,i3500). +Le(cr115_0,i3520). +Le(cr115_0,i3580). +Le(cr115_0,i3610). +Le(cr115_0,i3650). +Le(cr115_0,i3680). +Le(cr115_0,i3720). +Le(cr115_0,i3740). +Le(cr115_0,i3790). +Le(cr115_0,i3820). +Le(cr115_0,i3860). +Le(cr115_0,i3960). +Le(cr115_0,i4040). +Le(cr115_0,i4140). +Le(cr115_0,i4180). +Le(cr115_0,i4400). +Le(cr115_0,i4620). +Le(cr115_0,i4840). +Le(cr115_0,i5060). +Le(cr115_0,i5280). +Le(cr115_0,i5500). +Le(cr115_0,i5720). +Le(cr115_0,i5940). +Le(cr115_0,i6160). +Le(cr115_0,i6380). +Le(cr115_0,i6600). +Le(cr115_0,i6820). +Le(cr115_0,i7040). +Le(cr115_0,i7260). +Le(cr115_0,i7480). +Le(cr115_0,i7700). +Le(cr115_0,i7920). +Le(cr115_0,i8140). +Le(cr115_0,i8360). +Le(cr115_0,i8580). +Eq(i3290,i3290). +Le(i3290,cr116_0). +Le(cr116_0,i3300). +Le(i-30,cr116_0). +Le(i0,cr116_0). +Le(i13,cr116_0). +Le(i26,cr116_0). +Le(i39,cr116_0). +Le(i52,cr116_0). +Le(i60,cr116_0). +Le(i65,cr116_0). +Le(i70,cr116_0). +Le(i78,cr116_0). +Le(i90,cr116_0). +Le(i91,cr116_0). +Le(i104,cr116_0). +Le(i117,cr116_0). +Le(i130,cr116_0). +Le(i143,cr116_0). +Le(i156,cr116_0). +Le(i169,cr116_0). +Le(i182,cr116_0). +Le(i195,cr116_0). +Le(i208,cr116_0). +Le(i221,cr116_0). +Le(i234,cr116_0). +Le(i247,cr116_0). +Le(i260,cr116_0). +Le(i460,cr116_0). +Le(i530,cr116_0). +Le(i600,cr116_0). +Le(i660,cr116_0). +Le(i670,cr116_0). +Le(i710,cr116_0). +Le(i740,cr116_0). +Le(i810,cr116_0). +Le(i850,cr116_0). +Le(i880,cr116_0). +Le(i890,cr116_0). +Le(i920,cr116_0). +Le(i960,cr116_0). +Le(i990,cr116_0). +Le(i1030,cr116_0). +Le(i1060,cr116_0). +Le(i1100,cr116_0). +Le(i1130,cr116_0). +Le(i1170,cr116_0). +Le(i1200,cr116_0). +Le(i1240,cr116_0). +Le(i1260,cr116_0). +Le(i1270,cr116_0). +Le(i1290,cr116_0). +Le(i1310,cr116_0). +Le(i1320,cr116_0). +Le(i1330,cr116_0). +Le(i1350,cr116_0). +Le(i1360,cr116_0). +Le(i1380,cr116_0). +Le(i1390,cr116_0). +Le(i1420,cr116_0). +Le(i1430,cr116_0). +Le(i1450,cr116_0). +Le(i1460,cr116_0). +Le(i1490,cr116_0). +Le(i1520,cr116_0). +Le(i1530,cr116_0). +Le(i1540,cr116_0). +Le(i1560,cr116_0). +Le(i1590,cr116_0). +Le(i1630,cr116_0). +Le(i1660,cr116_0). +Le(i1700,cr116_0). +Le(i1730,cr116_0). +Le(i1760,cr116_0). +Le(i1770,cr116_0). +Le(i1810,cr116_0). +Le(i1840,cr116_0). +Le(i1880,cr116_0). +Le(i1910,cr116_0). +Le(i1950,cr116_0). +Le(i1980,cr116_0). +Le(i2020,cr116_0). +Le(i2050,cr116_0). +Le(i2090,cr116_0). +Le(i2120,cr116_0). +Le(i2160,cr116_0). +Le(i2190,cr116_0). +Le(i2200,cr116_0). +Le(i2230,cr116_0). +Le(i2270,cr116_0). +Le(i2300,cr116_0). +Le(i2340,cr116_0). +Le(i2370,cr116_0). +Le(i2410,cr116_0). +Le(i2420,cr116_0). +Le(i2440,cr116_0). +Le(i2480,cr116_0). +Le(i2510,cr116_0). +Le(i2550,cr116_0). +Le(i2580,cr116_0). +Le(i2620,cr116_0). +Le(i2640,cr116_0). +Le(i2660,cr116_0). +Le(i2730,cr116_0). +Le(i2760,cr116_0). +Le(i2800,cr116_0). +Le(i2830,cr116_0). +Le(i2860,cr116_0). +Le(i2870,cr116_0). +Le(i2940,cr116_0). +Le(i2970,cr116_0). +Le(i3010,cr116_0). +Le(i3040,cr116_0). +Le(i3080,cr116_0). +Le(i3120,cr116_0). +Le(i3150,cr116_0). +Le(i3220,cr116_0). +Le(i3260,cr116_0). +Le(cr116_0,i3330). +Le(cr116_0,i3400). +Le(cr116_0,i3430). +Le(cr116_0,i3500). +Le(cr116_0,i3520). +Le(cr116_0,i3580). +Le(cr116_0,i3610). +Le(cr116_0,i3650). +Le(cr116_0,i3680). +Le(cr116_0,i3720). +Le(cr116_0,i3740). +Le(cr116_0,i3790). +Le(cr116_0,i3820). +Le(cr116_0,i3860). +Le(cr116_0,i3960). +Le(cr116_0,i4040). +Le(cr116_0,i4140). +Le(cr116_0,i4180). +Le(cr116_0,i4400). +Le(cr116_0,i4620). +Le(cr116_0,i4840). +Le(cr116_0,i5060). +Le(cr116_0,i5280). +Le(cr116_0,i5500). +Le(cr116_0,i5720). +Le(cr116_0,i5940). +Le(cr116_0,i6160). +Le(cr116_0,i6380). +Le(cr116_0,i6600). +Le(cr116_0,i6820). +Le(cr116_0,i7040). +Le(cr116_0,i7260). +Le(cr116_0,i7480). +Le(cr116_0,i7700). +Le(cr116_0,i7920). +Le(cr116_0,i8140). +Le(cr116_0,i8360). +Le(cr116_0,i8580). +Eq(i3300,i3300). +Le(i3300,cr117_0). +Le(cr117_0,i3330). +Le(i-30,cr117_0). +Le(i0,cr117_0). +Le(i13,cr117_0). +Le(i26,cr117_0). +Le(i39,cr117_0). +Le(i52,cr117_0). +Le(i60,cr117_0). +Le(i65,cr117_0). +Le(i70,cr117_0). +Le(i78,cr117_0). +Le(i90,cr117_0). +Le(i91,cr117_0). +Le(i104,cr117_0). +Le(i117,cr117_0). +Le(i130,cr117_0). +Le(i143,cr117_0). +Le(i156,cr117_0). +Le(i169,cr117_0). +Le(i182,cr117_0). +Le(i195,cr117_0). +Le(i208,cr117_0). +Le(i221,cr117_0). +Le(i234,cr117_0). +Le(i247,cr117_0). +Le(i260,cr117_0). +Le(i460,cr117_0). +Le(i530,cr117_0). +Le(i600,cr117_0). +Le(i660,cr117_0). +Le(i670,cr117_0). +Le(i710,cr117_0). +Le(i740,cr117_0). +Le(i810,cr117_0). +Le(i850,cr117_0). +Le(i880,cr117_0). +Le(i890,cr117_0). +Le(i920,cr117_0). +Le(i960,cr117_0). +Le(i990,cr117_0). +Le(i1030,cr117_0). +Le(i1060,cr117_0). +Le(i1100,cr117_0). +Le(i1130,cr117_0). +Le(i1170,cr117_0). +Le(i1200,cr117_0). +Le(i1240,cr117_0). +Le(i1260,cr117_0). +Le(i1270,cr117_0). +Le(i1290,cr117_0). +Le(i1310,cr117_0). +Le(i1320,cr117_0). +Le(i1330,cr117_0). +Le(i1350,cr117_0). +Le(i1360,cr117_0). +Le(i1380,cr117_0). +Le(i1390,cr117_0). +Le(i1420,cr117_0). +Le(i1430,cr117_0). +Le(i1450,cr117_0). +Le(i1460,cr117_0). +Le(i1490,cr117_0). +Le(i1520,cr117_0). +Le(i1530,cr117_0). +Le(i1540,cr117_0). +Le(i1560,cr117_0). +Le(i1590,cr117_0). +Le(i1630,cr117_0). +Le(i1660,cr117_0). +Le(i1700,cr117_0). +Le(i1730,cr117_0). +Le(i1760,cr117_0). +Le(i1770,cr117_0). +Le(i1810,cr117_0). +Le(i1840,cr117_0). +Le(i1880,cr117_0). +Le(i1910,cr117_0). +Le(i1950,cr117_0). +Le(i1980,cr117_0). +Le(i2020,cr117_0). +Le(i2050,cr117_0). +Le(i2090,cr117_0). +Le(i2120,cr117_0). +Le(i2160,cr117_0). +Le(i2190,cr117_0). +Le(i2200,cr117_0). +Le(i2230,cr117_0). +Le(i2270,cr117_0). +Le(i2300,cr117_0). +Le(i2340,cr117_0). +Le(i2370,cr117_0). +Le(i2410,cr117_0). +Le(i2420,cr117_0). +Le(i2440,cr117_0). +Le(i2480,cr117_0). +Le(i2510,cr117_0). +Le(i2550,cr117_0). +Le(i2580,cr117_0). +Le(i2620,cr117_0). +Le(i2640,cr117_0). +Le(i2660,cr117_0). +Le(i2730,cr117_0). +Le(i2760,cr117_0). +Le(i2800,cr117_0). +Le(i2830,cr117_0). +Le(i2860,cr117_0). +Le(i2870,cr117_0). +Le(i2940,cr117_0). +Le(i2970,cr117_0). +Le(i3010,cr117_0). +Le(i3040,cr117_0). +Le(i3080,cr117_0). +Le(i3120,cr117_0). +Le(i3150,cr117_0). +Le(i3220,cr117_0). +Le(i3260,cr117_0). +Le(i3290,cr117_0). +Le(cr117_0,i3400). +Le(cr117_0,i3430). +Le(cr117_0,i3500). +Le(cr117_0,i3520). +Le(cr117_0,i3580). +Le(cr117_0,i3610). +Le(cr117_0,i3650). +Le(cr117_0,i3680). +Le(cr117_0,i3720). +Le(cr117_0,i3740). +Le(cr117_0,i3790). +Le(cr117_0,i3820). +Le(cr117_0,i3860). +Le(cr117_0,i3960). +Le(cr117_0,i4040). +Le(cr117_0,i4140). +Le(cr117_0,i4180). +Le(cr117_0,i4400). +Le(cr117_0,i4620). +Le(cr117_0,i4840). +Le(cr117_0,i5060). +Le(cr117_0,i5280). +Le(cr117_0,i5500). +Le(cr117_0,i5720). +Le(cr117_0,i5940). +Le(cr117_0,i6160). +Le(cr117_0,i6380). +Le(cr117_0,i6600). +Le(cr117_0,i6820). +Le(cr117_0,i7040). +Le(cr117_0,i7260). +Le(cr117_0,i7480). +Le(cr117_0,i7700). +Le(cr117_0,i7920). +Le(cr117_0,i8140). +Le(cr117_0,i8360). +Le(cr117_0,i8580). +Eq(i3330,i3330). +Le(i3330,cr118_0). +Le(cr118_0,i3400). +Le(i-30,cr118_0). +Le(i0,cr118_0). +Le(i13,cr118_0). +Le(i26,cr118_0). +Le(i39,cr118_0). +Le(i52,cr118_0). +Le(i60,cr118_0). +Le(i65,cr118_0). +Le(i70,cr118_0). +Le(i78,cr118_0). +Le(i90,cr118_0). +Le(i91,cr118_0). +Le(i104,cr118_0). +Le(i117,cr118_0). +Le(i130,cr118_0). +Le(i143,cr118_0). +Le(i156,cr118_0). +Le(i169,cr118_0). +Le(i182,cr118_0). +Le(i195,cr118_0). +Le(i208,cr118_0). +Le(i221,cr118_0). +Le(i234,cr118_0). +Le(i247,cr118_0). +Le(i260,cr118_0). +Le(i460,cr118_0). +Le(i530,cr118_0). +Le(i600,cr118_0). +Le(i660,cr118_0). +Le(i670,cr118_0). +Le(i710,cr118_0). +Le(i740,cr118_0). +Le(i810,cr118_0). +Le(i850,cr118_0). +Le(i880,cr118_0). +Le(i890,cr118_0). +Le(i920,cr118_0). +Le(i960,cr118_0). +Le(i990,cr118_0). +Le(i1030,cr118_0). +Le(i1060,cr118_0). +Le(i1100,cr118_0). +Le(i1130,cr118_0). +Le(i1170,cr118_0). +Le(i1200,cr118_0). +Le(i1240,cr118_0). +Le(i1260,cr118_0). +Le(i1270,cr118_0). +Le(i1290,cr118_0). +Le(i1310,cr118_0). +Le(i1320,cr118_0). +Le(i1330,cr118_0). +Le(i1350,cr118_0). +Le(i1360,cr118_0). +Le(i1380,cr118_0). +Le(i1390,cr118_0). +Le(i1420,cr118_0). +Le(i1430,cr118_0). +Le(i1450,cr118_0). +Le(i1460,cr118_0). +Le(i1490,cr118_0). +Le(i1520,cr118_0). +Le(i1530,cr118_0). +Le(i1540,cr118_0). +Le(i1560,cr118_0). +Le(i1590,cr118_0). +Le(i1630,cr118_0). +Le(i1660,cr118_0). +Le(i1700,cr118_0). +Le(i1730,cr118_0). +Le(i1760,cr118_0). +Le(i1770,cr118_0). +Le(i1810,cr118_0). +Le(i1840,cr118_0). +Le(i1880,cr118_0). +Le(i1910,cr118_0). +Le(i1950,cr118_0). +Le(i1980,cr118_0). +Le(i2020,cr118_0). +Le(i2050,cr118_0). +Le(i2090,cr118_0). +Le(i2120,cr118_0). +Le(i2160,cr118_0). +Le(i2190,cr118_0). +Le(i2200,cr118_0). +Le(i2230,cr118_0). +Le(i2270,cr118_0). +Le(i2300,cr118_0). +Le(i2340,cr118_0). +Le(i2370,cr118_0). +Le(i2410,cr118_0). +Le(i2420,cr118_0). +Le(i2440,cr118_0). +Le(i2480,cr118_0). +Le(i2510,cr118_0). +Le(i2550,cr118_0). +Le(i2580,cr118_0). +Le(i2620,cr118_0). +Le(i2640,cr118_0). +Le(i2660,cr118_0). +Le(i2730,cr118_0). +Le(i2760,cr118_0). +Le(i2800,cr118_0). +Le(i2830,cr118_0). +Le(i2860,cr118_0). +Le(i2870,cr118_0). +Le(i2940,cr118_0). +Le(i2970,cr118_0). +Le(i3010,cr118_0). +Le(i3040,cr118_0). +Le(i3080,cr118_0). +Le(i3120,cr118_0). +Le(i3150,cr118_0). +Le(i3220,cr118_0). +Le(i3260,cr118_0). +Le(i3290,cr118_0). +Le(i3300,cr118_0). +Le(cr118_0,i3430). +Le(cr118_0,i3500). +Le(cr118_0,i3520). +Le(cr118_0,i3580). +Le(cr118_0,i3610). +Le(cr118_0,i3650). +Le(cr118_0,i3680). +Le(cr118_0,i3720). +Le(cr118_0,i3740). +Le(cr118_0,i3790). +Le(cr118_0,i3820). +Le(cr118_0,i3860). +Le(cr118_0,i3960). +Le(cr118_0,i4040). +Le(cr118_0,i4140). +Le(cr118_0,i4180). +Le(cr118_0,i4400). +Le(cr118_0,i4620). +Le(cr118_0,i4840). +Le(cr118_0,i5060). +Le(cr118_0,i5280). +Le(cr118_0,i5500). +Le(cr118_0,i5720). +Le(cr118_0,i5940). +Le(cr118_0,i6160). +Le(cr118_0,i6380). +Le(cr118_0,i6600). +Le(cr118_0,i6820). +Le(cr118_0,i7040). +Le(cr118_0,i7260). +Le(cr118_0,i7480). +Le(cr118_0,i7700). +Le(cr118_0,i7920). +Le(cr118_0,i8140). +Le(cr118_0,i8360). +Le(cr118_0,i8580). +Eq(i3400,i3400). +Le(i3400,cr119_0). +Le(cr119_0,i3430). +Le(i-30,cr119_0). +Le(i0,cr119_0). +Le(i13,cr119_0). +Le(i26,cr119_0). +Le(i39,cr119_0). +Le(i52,cr119_0). +Le(i60,cr119_0). +Le(i65,cr119_0). +Le(i70,cr119_0). +Le(i78,cr119_0). +Le(i90,cr119_0). +Le(i91,cr119_0). +Le(i104,cr119_0). +Le(i117,cr119_0). +Le(i130,cr119_0). +Le(i143,cr119_0). +Le(i156,cr119_0). +Le(i169,cr119_0). +Le(i182,cr119_0). +Le(i195,cr119_0). +Le(i208,cr119_0). +Le(i221,cr119_0). +Le(i234,cr119_0). +Le(i247,cr119_0). +Le(i260,cr119_0). +Le(i460,cr119_0). +Le(i530,cr119_0). +Le(i600,cr119_0). +Le(i660,cr119_0). +Le(i670,cr119_0). +Le(i710,cr119_0). +Le(i740,cr119_0). +Le(i810,cr119_0). +Le(i850,cr119_0). +Le(i880,cr119_0). +Le(i890,cr119_0). +Le(i920,cr119_0). +Le(i960,cr119_0). +Le(i990,cr119_0). +Le(i1030,cr119_0). +Le(i1060,cr119_0). +Le(i1100,cr119_0). +Le(i1130,cr119_0). +Le(i1170,cr119_0). +Le(i1200,cr119_0). +Le(i1240,cr119_0). +Le(i1260,cr119_0). +Le(i1270,cr119_0). +Le(i1290,cr119_0). +Le(i1310,cr119_0). +Le(i1320,cr119_0). +Le(i1330,cr119_0). +Le(i1350,cr119_0). +Le(i1360,cr119_0). +Le(i1380,cr119_0). +Le(i1390,cr119_0). +Le(i1420,cr119_0). +Le(i1430,cr119_0). +Le(i1450,cr119_0). +Le(i1460,cr119_0). +Le(i1490,cr119_0). +Le(i1520,cr119_0). +Le(i1530,cr119_0). +Le(i1540,cr119_0). +Le(i1560,cr119_0). +Le(i1590,cr119_0). +Le(i1630,cr119_0). +Le(i1660,cr119_0). +Le(i1700,cr119_0). +Le(i1730,cr119_0). +Le(i1760,cr119_0). +Le(i1770,cr119_0). +Le(i1810,cr119_0). +Le(i1840,cr119_0). +Le(i1880,cr119_0). +Le(i1910,cr119_0). +Le(i1950,cr119_0). +Le(i1980,cr119_0). +Le(i2020,cr119_0). +Le(i2050,cr119_0). +Le(i2090,cr119_0). +Le(i2120,cr119_0). +Le(i2160,cr119_0). +Le(i2190,cr119_0). +Le(i2200,cr119_0). +Le(i2230,cr119_0). +Le(i2270,cr119_0). +Le(i2300,cr119_0). +Le(i2340,cr119_0). +Le(i2370,cr119_0). +Le(i2410,cr119_0). +Le(i2420,cr119_0). +Le(i2440,cr119_0). +Le(i2480,cr119_0). +Le(i2510,cr119_0). +Le(i2550,cr119_0). +Le(i2580,cr119_0). +Le(i2620,cr119_0). +Le(i2640,cr119_0). +Le(i2660,cr119_0). +Le(i2730,cr119_0). +Le(i2760,cr119_0). +Le(i2800,cr119_0). +Le(i2830,cr119_0). +Le(i2860,cr119_0). +Le(i2870,cr119_0). +Le(i2940,cr119_0). +Le(i2970,cr119_0). +Le(i3010,cr119_0). +Le(i3040,cr119_0). +Le(i3080,cr119_0). +Le(i3120,cr119_0). +Le(i3150,cr119_0). +Le(i3220,cr119_0). +Le(i3260,cr119_0). +Le(i3290,cr119_0). +Le(i3300,cr119_0). +Le(i3330,cr119_0). +Le(cr119_0,i3500). +Le(cr119_0,i3520). +Le(cr119_0,i3580). +Le(cr119_0,i3610). +Le(cr119_0,i3650). +Le(cr119_0,i3680). +Le(cr119_0,i3720). +Le(cr119_0,i3740). +Le(cr119_0,i3790). +Le(cr119_0,i3820). +Le(cr119_0,i3860). +Le(cr119_0,i3960). +Le(cr119_0,i4040). +Le(cr119_0,i4140). +Le(cr119_0,i4180). +Le(cr119_0,i4400). +Le(cr119_0,i4620). +Le(cr119_0,i4840). +Le(cr119_0,i5060). +Le(cr119_0,i5280). +Le(cr119_0,i5500). +Le(cr119_0,i5720). +Le(cr119_0,i5940). +Le(cr119_0,i6160). +Le(cr119_0,i6380). +Le(cr119_0,i6600). +Le(cr119_0,i6820). +Le(cr119_0,i7040). +Le(cr119_0,i7260). +Le(cr119_0,i7480). +Le(cr119_0,i7700). +Le(cr119_0,i7920). +Le(cr119_0,i8140). +Le(cr119_0,i8360). +Le(cr119_0,i8580). +Eq(i3430,i3430). +Le(i3430,cr120_0). +Le(cr120_0,i3500). +Le(i-30,cr120_0). +Le(i0,cr120_0). +Le(i13,cr120_0). +Le(i26,cr120_0). +Le(i39,cr120_0). +Le(i52,cr120_0). +Le(i60,cr120_0). +Le(i65,cr120_0). +Le(i70,cr120_0). +Le(i78,cr120_0). +Le(i90,cr120_0). +Le(i91,cr120_0). +Le(i104,cr120_0). +Le(i117,cr120_0). +Le(i130,cr120_0). +Le(i143,cr120_0). +Le(i156,cr120_0). +Le(i169,cr120_0). +Le(i182,cr120_0). +Le(i195,cr120_0). +Le(i208,cr120_0). +Le(i221,cr120_0). +Le(i234,cr120_0). +Le(i247,cr120_0). +Le(i260,cr120_0). +Le(i460,cr120_0). +Le(i530,cr120_0). +Le(i600,cr120_0). +Le(i660,cr120_0). +Le(i670,cr120_0). +Le(i710,cr120_0). +Le(i740,cr120_0). +Le(i810,cr120_0). +Le(i850,cr120_0). +Le(i880,cr120_0). +Le(i890,cr120_0). +Le(i920,cr120_0). +Le(i960,cr120_0). +Le(i990,cr120_0). +Le(i1030,cr120_0). +Le(i1060,cr120_0). +Le(i1100,cr120_0). +Le(i1130,cr120_0). +Le(i1170,cr120_0). +Le(i1200,cr120_0). +Le(i1240,cr120_0). +Le(i1260,cr120_0). +Le(i1270,cr120_0). +Le(i1290,cr120_0). +Le(i1310,cr120_0). +Le(i1320,cr120_0). +Le(i1330,cr120_0). +Le(i1350,cr120_0). +Le(i1360,cr120_0). +Le(i1380,cr120_0). +Le(i1390,cr120_0). +Le(i1420,cr120_0). +Le(i1430,cr120_0). +Le(i1450,cr120_0). +Le(i1460,cr120_0). +Le(i1490,cr120_0). +Le(i1520,cr120_0). +Le(i1530,cr120_0). +Le(i1540,cr120_0). +Le(i1560,cr120_0). +Le(i1590,cr120_0). +Le(i1630,cr120_0). +Le(i1660,cr120_0). +Le(i1700,cr120_0). +Le(i1730,cr120_0). +Le(i1760,cr120_0). +Le(i1770,cr120_0). +Le(i1810,cr120_0). +Le(i1840,cr120_0). +Le(i1880,cr120_0). +Le(i1910,cr120_0). +Le(i1950,cr120_0). +Le(i1980,cr120_0). +Le(i2020,cr120_0). +Le(i2050,cr120_0). +Le(i2090,cr120_0). +Le(i2120,cr120_0). +Le(i2160,cr120_0). +Le(i2190,cr120_0). +Le(i2200,cr120_0). +Le(i2230,cr120_0). +Le(i2270,cr120_0). +Le(i2300,cr120_0). +Le(i2340,cr120_0). +Le(i2370,cr120_0). +Le(i2410,cr120_0). +Le(i2420,cr120_0). +Le(i2440,cr120_0). +Le(i2480,cr120_0). +Le(i2510,cr120_0). +Le(i2550,cr120_0). +Le(i2580,cr120_0). +Le(i2620,cr120_0). +Le(i2640,cr120_0). +Le(i2660,cr120_0). +Le(i2730,cr120_0). +Le(i2760,cr120_0). +Le(i2800,cr120_0). +Le(i2830,cr120_0). +Le(i2860,cr120_0). +Le(i2870,cr120_0). +Le(i2940,cr120_0). +Le(i2970,cr120_0). +Le(i3010,cr120_0). +Le(i3040,cr120_0). +Le(i3080,cr120_0). +Le(i3120,cr120_0). +Le(i3150,cr120_0). +Le(i3220,cr120_0). +Le(i3260,cr120_0). +Le(i3290,cr120_0). +Le(i3300,cr120_0). +Le(i3330,cr120_0). +Le(i3400,cr120_0). +Le(cr120_0,i3520). +Le(cr120_0,i3580). +Le(cr120_0,i3610). +Le(cr120_0,i3650). +Le(cr120_0,i3680). +Le(cr120_0,i3720). +Le(cr120_0,i3740). +Le(cr120_0,i3790). +Le(cr120_0,i3820). +Le(cr120_0,i3860). +Le(cr120_0,i3960). +Le(cr120_0,i4040). +Le(cr120_0,i4140). +Le(cr120_0,i4180). +Le(cr120_0,i4400). +Le(cr120_0,i4620). +Le(cr120_0,i4840). +Le(cr120_0,i5060). +Le(cr120_0,i5280). +Le(cr120_0,i5500). +Le(cr120_0,i5720). +Le(cr120_0,i5940). +Le(cr120_0,i6160). +Le(cr120_0,i6380). +Le(cr120_0,i6600). +Le(cr120_0,i6820). +Le(cr120_0,i7040). +Le(cr120_0,i7260). +Le(cr120_0,i7480). +Le(cr120_0,i7700). +Le(cr120_0,i7920). +Le(cr120_0,i8140). +Le(cr120_0,i8360). +Le(cr120_0,i8580). +Eq(i3500,i3500). +Le(i3500,cr121_0). +Le(cr121_0,i3520). +Le(i-30,cr121_0). +Le(i0,cr121_0). +Le(i13,cr121_0). +Le(i26,cr121_0). +Le(i39,cr121_0). +Le(i52,cr121_0). +Le(i60,cr121_0). +Le(i65,cr121_0). +Le(i70,cr121_0). +Le(i78,cr121_0). +Le(i90,cr121_0). +Le(i91,cr121_0). +Le(i104,cr121_0). +Le(i117,cr121_0). +Le(i130,cr121_0). +Le(i143,cr121_0). +Le(i156,cr121_0). +Le(i169,cr121_0). +Le(i182,cr121_0). +Le(i195,cr121_0). +Le(i208,cr121_0). +Le(i221,cr121_0). +Le(i234,cr121_0). +Le(i247,cr121_0). +Le(i260,cr121_0). +Le(i460,cr121_0). +Le(i530,cr121_0). +Le(i600,cr121_0). +Le(i660,cr121_0). +Le(i670,cr121_0). +Le(i710,cr121_0). +Le(i740,cr121_0). +Le(i810,cr121_0). +Le(i850,cr121_0). +Le(i880,cr121_0). +Le(i890,cr121_0). +Le(i920,cr121_0). +Le(i960,cr121_0). +Le(i990,cr121_0). +Le(i1030,cr121_0). +Le(i1060,cr121_0). +Le(i1100,cr121_0). +Le(i1130,cr121_0). +Le(i1170,cr121_0). +Le(i1200,cr121_0). +Le(i1240,cr121_0). +Le(i1260,cr121_0). +Le(i1270,cr121_0). +Le(i1290,cr121_0). +Le(i1310,cr121_0). +Le(i1320,cr121_0). +Le(i1330,cr121_0). +Le(i1350,cr121_0). +Le(i1360,cr121_0). +Le(i1380,cr121_0). +Le(i1390,cr121_0). +Le(i1420,cr121_0). +Le(i1430,cr121_0). +Le(i1450,cr121_0). +Le(i1460,cr121_0). +Le(i1490,cr121_0). +Le(i1520,cr121_0). +Le(i1530,cr121_0). +Le(i1540,cr121_0). +Le(i1560,cr121_0). +Le(i1590,cr121_0). +Le(i1630,cr121_0). +Le(i1660,cr121_0). +Le(i1700,cr121_0). +Le(i1730,cr121_0). +Le(i1760,cr121_0). +Le(i1770,cr121_0). +Le(i1810,cr121_0). +Le(i1840,cr121_0). +Le(i1880,cr121_0). +Le(i1910,cr121_0). +Le(i1950,cr121_0). +Le(i1980,cr121_0). +Le(i2020,cr121_0). +Le(i2050,cr121_0). +Le(i2090,cr121_0). +Le(i2120,cr121_0). +Le(i2160,cr121_0). +Le(i2190,cr121_0). +Le(i2200,cr121_0). +Le(i2230,cr121_0). +Le(i2270,cr121_0). +Le(i2300,cr121_0). +Le(i2340,cr121_0). +Le(i2370,cr121_0). +Le(i2410,cr121_0). +Le(i2420,cr121_0). +Le(i2440,cr121_0). +Le(i2480,cr121_0). +Le(i2510,cr121_0). +Le(i2550,cr121_0). +Le(i2580,cr121_0). +Le(i2620,cr121_0). +Le(i2640,cr121_0). +Le(i2660,cr121_0). +Le(i2730,cr121_0). +Le(i2760,cr121_0). +Le(i2800,cr121_0). +Le(i2830,cr121_0). +Le(i2860,cr121_0). +Le(i2870,cr121_0). +Le(i2940,cr121_0). +Le(i2970,cr121_0). +Le(i3010,cr121_0). +Le(i3040,cr121_0). +Le(i3080,cr121_0). +Le(i3120,cr121_0). +Le(i3150,cr121_0). +Le(i3220,cr121_0). +Le(i3260,cr121_0). +Le(i3290,cr121_0). +Le(i3300,cr121_0). +Le(i3330,cr121_0). +Le(i3400,cr121_0). +Le(i3430,cr121_0). +Le(cr121_0,i3580). +Le(cr121_0,i3610). +Le(cr121_0,i3650). +Le(cr121_0,i3680). +Le(cr121_0,i3720). +Le(cr121_0,i3740). +Le(cr121_0,i3790). +Le(cr121_0,i3820). +Le(cr121_0,i3860). +Le(cr121_0,i3960). +Le(cr121_0,i4040). +Le(cr121_0,i4140). +Le(cr121_0,i4180). +Le(cr121_0,i4400). +Le(cr121_0,i4620). +Le(cr121_0,i4840). +Le(cr121_0,i5060). +Le(cr121_0,i5280). +Le(cr121_0,i5500). +Le(cr121_0,i5720). +Le(cr121_0,i5940). +Le(cr121_0,i6160). +Le(cr121_0,i6380). +Le(cr121_0,i6600). +Le(cr121_0,i6820). +Le(cr121_0,i7040). +Le(cr121_0,i7260). +Le(cr121_0,i7480). +Le(cr121_0,i7700). +Le(cr121_0,i7920). +Le(cr121_0,i8140). +Le(cr121_0,i8360). +Le(cr121_0,i8580). +Eq(i3520,i3520). +Le(i3520,cr122_0). +Le(cr122_0,i3580). +Le(i-30,cr122_0). +Le(i0,cr122_0). +Le(i13,cr122_0). +Le(i26,cr122_0). +Le(i39,cr122_0). +Le(i52,cr122_0). +Le(i60,cr122_0). +Le(i65,cr122_0). +Le(i70,cr122_0). +Le(i78,cr122_0). +Le(i90,cr122_0). +Le(i91,cr122_0). +Le(i104,cr122_0). +Le(i117,cr122_0). +Le(i130,cr122_0). +Le(i143,cr122_0). +Le(i156,cr122_0). +Le(i169,cr122_0). +Le(i182,cr122_0). +Le(i195,cr122_0). +Le(i208,cr122_0). +Le(i221,cr122_0). +Le(i234,cr122_0). +Le(i247,cr122_0). +Le(i260,cr122_0). +Le(i460,cr122_0). +Le(i530,cr122_0). +Le(i600,cr122_0). +Le(i660,cr122_0). +Le(i670,cr122_0). +Le(i710,cr122_0). +Le(i740,cr122_0). +Le(i810,cr122_0). +Le(i850,cr122_0). +Le(i880,cr122_0). +Le(i890,cr122_0). +Le(i920,cr122_0). +Le(i960,cr122_0). +Le(i990,cr122_0). +Le(i1030,cr122_0). +Le(i1060,cr122_0). +Le(i1100,cr122_0). +Le(i1130,cr122_0). +Le(i1170,cr122_0). +Le(i1200,cr122_0). +Le(i1240,cr122_0). +Le(i1260,cr122_0). +Le(i1270,cr122_0). +Le(i1290,cr122_0). +Le(i1310,cr122_0). +Le(i1320,cr122_0). +Le(i1330,cr122_0). +Le(i1350,cr122_0). +Le(i1360,cr122_0). +Le(i1380,cr122_0). +Le(i1390,cr122_0). +Le(i1420,cr122_0). +Le(i1430,cr122_0). +Le(i1450,cr122_0). +Le(i1460,cr122_0). +Le(i1490,cr122_0). +Le(i1520,cr122_0). +Le(i1530,cr122_0). +Le(i1540,cr122_0). +Le(i1560,cr122_0). +Le(i1590,cr122_0). +Le(i1630,cr122_0). +Le(i1660,cr122_0). +Le(i1700,cr122_0). +Le(i1730,cr122_0). +Le(i1760,cr122_0). +Le(i1770,cr122_0). +Le(i1810,cr122_0). +Le(i1840,cr122_0). +Le(i1880,cr122_0). +Le(i1910,cr122_0). +Le(i1950,cr122_0). +Le(i1980,cr122_0). +Le(i2020,cr122_0). +Le(i2050,cr122_0). +Le(i2090,cr122_0). +Le(i2120,cr122_0). +Le(i2160,cr122_0). +Le(i2190,cr122_0). +Le(i2200,cr122_0). +Le(i2230,cr122_0). +Le(i2270,cr122_0). +Le(i2300,cr122_0). +Le(i2340,cr122_0). +Le(i2370,cr122_0). +Le(i2410,cr122_0). +Le(i2420,cr122_0). +Le(i2440,cr122_0). +Le(i2480,cr122_0). +Le(i2510,cr122_0). +Le(i2550,cr122_0). +Le(i2580,cr122_0). +Le(i2620,cr122_0). +Le(i2640,cr122_0). +Le(i2660,cr122_0). +Le(i2730,cr122_0). +Le(i2760,cr122_0). +Le(i2800,cr122_0). +Le(i2830,cr122_0). +Le(i2860,cr122_0). +Le(i2870,cr122_0). +Le(i2940,cr122_0). +Le(i2970,cr122_0). +Le(i3010,cr122_0). +Le(i3040,cr122_0). +Le(i3080,cr122_0). +Le(i3120,cr122_0). +Le(i3150,cr122_0). +Le(i3220,cr122_0). +Le(i3260,cr122_0). +Le(i3290,cr122_0). +Le(i3300,cr122_0). +Le(i3330,cr122_0). +Le(i3400,cr122_0). +Le(i3430,cr122_0). +Le(i3500,cr122_0). +Le(cr122_0,i3610). +Le(cr122_0,i3650). +Le(cr122_0,i3680). +Le(cr122_0,i3720). +Le(cr122_0,i3740). +Le(cr122_0,i3790). +Le(cr122_0,i3820). +Le(cr122_0,i3860). +Le(cr122_0,i3960). +Le(cr122_0,i4040). +Le(cr122_0,i4140). +Le(cr122_0,i4180). +Le(cr122_0,i4400). +Le(cr122_0,i4620). +Le(cr122_0,i4840). +Le(cr122_0,i5060). +Le(cr122_0,i5280). +Le(cr122_0,i5500). +Le(cr122_0,i5720). +Le(cr122_0,i5940). +Le(cr122_0,i6160). +Le(cr122_0,i6380). +Le(cr122_0,i6600). +Le(cr122_0,i6820). +Le(cr122_0,i7040). +Le(cr122_0,i7260). +Le(cr122_0,i7480). +Le(cr122_0,i7700). +Le(cr122_0,i7920). +Le(cr122_0,i8140). +Le(cr122_0,i8360). +Le(cr122_0,i8580). +Eq(i3580,i3580). +Le(i3580,cr123_0). +Le(cr123_0,i3610). +Le(i-30,cr123_0). +Le(i0,cr123_0). +Le(i13,cr123_0). +Le(i26,cr123_0). +Le(i39,cr123_0). +Le(i52,cr123_0). +Le(i60,cr123_0). +Le(i65,cr123_0). +Le(i70,cr123_0). +Le(i78,cr123_0). +Le(i90,cr123_0). +Le(i91,cr123_0). +Le(i104,cr123_0). +Le(i117,cr123_0). +Le(i130,cr123_0). +Le(i143,cr123_0). +Le(i156,cr123_0). +Le(i169,cr123_0). +Le(i182,cr123_0). +Le(i195,cr123_0). +Le(i208,cr123_0). +Le(i221,cr123_0). +Le(i234,cr123_0). +Le(i247,cr123_0). +Le(i260,cr123_0). +Le(i460,cr123_0). +Le(i530,cr123_0). +Le(i600,cr123_0). +Le(i660,cr123_0). +Le(i670,cr123_0). +Le(i710,cr123_0). +Le(i740,cr123_0). +Le(i810,cr123_0). +Le(i850,cr123_0). +Le(i880,cr123_0). +Le(i890,cr123_0). +Le(i920,cr123_0). +Le(i960,cr123_0). +Le(i990,cr123_0). +Le(i1030,cr123_0). +Le(i1060,cr123_0). +Le(i1100,cr123_0). +Le(i1130,cr123_0). +Le(i1170,cr123_0). +Le(i1200,cr123_0). +Le(i1240,cr123_0). +Le(i1260,cr123_0). +Le(i1270,cr123_0). +Le(i1290,cr123_0). +Le(i1310,cr123_0). +Le(i1320,cr123_0). +Le(i1330,cr123_0). +Le(i1350,cr123_0). +Le(i1360,cr123_0). +Le(i1380,cr123_0). +Le(i1390,cr123_0). +Le(i1420,cr123_0). +Le(i1430,cr123_0). +Le(i1450,cr123_0). +Le(i1460,cr123_0). +Le(i1490,cr123_0). +Le(i1520,cr123_0). +Le(i1530,cr123_0). +Le(i1540,cr123_0). +Le(i1560,cr123_0). +Le(i1590,cr123_0). +Le(i1630,cr123_0). +Le(i1660,cr123_0). +Le(i1700,cr123_0). +Le(i1730,cr123_0). +Le(i1760,cr123_0). +Le(i1770,cr123_0). +Le(i1810,cr123_0). +Le(i1840,cr123_0). +Le(i1880,cr123_0). +Le(i1910,cr123_0). +Le(i1950,cr123_0). +Le(i1980,cr123_0). +Le(i2020,cr123_0). +Le(i2050,cr123_0). +Le(i2090,cr123_0). +Le(i2120,cr123_0). +Le(i2160,cr123_0). +Le(i2190,cr123_0). +Le(i2200,cr123_0). +Le(i2230,cr123_0). +Le(i2270,cr123_0). +Le(i2300,cr123_0). +Le(i2340,cr123_0). +Le(i2370,cr123_0). +Le(i2410,cr123_0). +Le(i2420,cr123_0). +Le(i2440,cr123_0). +Le(i2480,cr123_0). +Le(i2510,cr123_0). +Le(i2550,cr123_0). +Le(i2580,cr123_0). +Le(i2620,cr123_0). +Le(i2640,cr123_0). +Le(i2660,cr123_0). +Le(i2730,cr123_0). +Le(i2760,cr123_0). +Le(i2800,cr123_0). +Le(i2830,cr123_0). +Le(i2860,cr123_0). +Le(i2870,cr123_0). +Le(i2940,cr123_0). +Le(i2970,cr123_0). +Le(i3010,cr123_0). +Le(i3040,cr123_0). +Le(i3080,cr123_0). +Le(i3120,cr123_0). +Le(i3150,cr123_0). +Le(i3220,cr123_0). +Le(i3260,cr123_0). +Le(i3290,cr123_0). +Le(i3300,cr123_0). +Le(i3330,cr123_0). +Le(i3400,cr123_0). +Le(i3430,cr123_0). +Le(i3500,cr123_0). +Le(i3520,cr123_0). +Le(cr123_0,i3650). +Le(cr123_0,i3680). +Le(cr123_0,i3720). +Le(cr123_0,i3740). +Le(cr123_0,i3790). +Le(cr123_0,i3820). +Le(cr123_0,i3860). +Le(cr123_0,i3960). +Le(cr123_0,i4040). +Le(cr123_0,i4140). +Le(cr123_0,i4180). +Le(cr123_0,i4400). +Le(cr123_0,i4620). +Le(cr123_0,i4840). +Le(cr123_0,i5060). +Le(cr123_0,i5280). +Le(cr123_0,i5500). +Le(cr123_0,i5720). +Le(cr123_0,i5940). +Le(cr123_0,i6160). +Le(cr123_0,i6380). +Le(cr123_0,i6600). +Le(cr123_0,i6820). +Le(cr123_0,i7040). +Le(cr123_0,i7260). +Le(cr123_0,i7480). +Le(cr123_0,i7700). +Le(cr123_0,i7920). +Le(cr123_0,i8140). +Le(cr123_0,i8360). +Le(cr123_0,i8580). +Eq(i3610,i3610). +Le(i3610,cr124_0). +Le(cr124_0,i3650). +Le(i-30,cr124_0). +Le(i0,cr124_0). +Le(i13,cr124_0). +Le(i26,cr124_0). +Le(i39,cr124_0). +Le(i52,cr124_0). +Le(i60,cr124_0). +Le(i65,cr124_0). +Le(i70,cr124_0). +Le(i78,cr124_0). +Le(i90,cr124_0). +Le(i91,cr124_0). +Le(i104,cr124_0). +Le(i117,cr124_0). +Le(i130,cr124_0). +Le(i143,cr124_0). +Le(i156,cr124_0). +Le(i169,cr124_0). +Le(i182,cr124_0). +Le(i195,cr124_0). +Le(i208,cr124_0). +Le(i221,cr124_0). +Le(i234,cr124_0). +Le(i247,cr124_0). +Le(i260,cr124_0). +Le(i460,cr124_0). +Le(i530,cr124_0). +Le(i600,cr124_0). +Le(i660,cr124_0). +Le(i670,cr124_0). +Le(i710,cr124_0). +Le(i740,cr124_0). +Le(i810,cr124_0). +Le(i850,cr124_0). +Le(i880,cr124_0). +Le(i890,cr124_0). +Le(i920,cr124_0). +Le(i960,cr124_0). +Le(i990,cr124_0). +Le(i1030,cr124_0). +Le(i1060,cr124_0). +Le(i1100,cr124_0). +Le(i1130,cr124_0). +Le(i1170,cr124_0). +Le(i1200,cr124_0). +Le(i1240,cr124_0). +Le(i1260,cr124_0). +Le(i1270,cr124_0). +Le(i1290,cr124_0). +Le(i1310,cr124_0). +Le(i1320,cr124_0). +Le(i1330,cr124_0). +Le(i1350,cr124_0). +Le(i1360,cr124_0). +Le(i1380,cr124_0). +Le(i1390,cr124_0). +Le(i1420,cr124_0). +Le(i1430,cr124_0). +Le(i1450,cr124_0). +Le(i1460,cr124_0). +Le(i1490,cr124_0). +Le(i1520,cr124_0). +Le(i1530,cr124_0). +Le(i1540,cr124_0). +Le(i1560,cr124_0). +Le(i1590,cr124_0). +Le(i1630,cr124_0). +Le(i1660,cr124_0). +Le(i1700,cr124_0). +Le(i1730,cr124_0). +Le(i1760,cr124_0). +Le(i1770,cr124_0). +Le(i1810,cr124_0). +Le(i1840,cr124_0). +Le(i1880,cr124_0). +Le(i1910,cr124_0). +Le(i1950,cr124_0). +Le(i1980,cr124_0). +Le(i2020,cr124_0). +Le(i2050,cr124_0). +Le(i2090,cr124_0). +Le(i2120,cr124_0). +Le(i2160,cr124_0). +Le(i2190,cr124_0). +Le(i2200,cr124_0). +Le(i2230,cr124_0). +Le(i2270,cr124_0). +Le(i2300,cr124_0). +Le(i2340,cr124_0). +Le(i2370,cr124_0). +Le(i2410,cr124_0). +Le(i2420,cr124_0). +Le(i2440,cr124_0). +Le(i2480,cr124_0). +Le(i2510,cr124_0). +Le(i2550,cr124_0). +Le(i2580,cr124_0). +Le(i2620,cr124_0). +Le(i2640,cr124_0). +Le(i2660,cr124_0). +Le(i2730,cr124_0). +Le(i2760,cr124_0). +Le(i2800,cr124_0). +Le(i2830,cr124_0). +Le(i2860,cr124_0). +Le(i2870,cr124_0). +Le(i2940,cr124_0). +Le(i2970,cr124_0). +Le(i3010,cr124_0). +Le(i3040,cr124_0). +Le(i3080,cr124_0). +Le(i3120,cr124_0). +Le(i3150,cr124_0). +Le(i3220,cr124_0). +Le(i3260,cr124_0). +Le(i3290,cr124_0). +Le(i3300,cr124_0). +Le(i3330,cr124_0). +Le(i3400,cr124_0). +Le(i3430,cr124_0). +Le(i3500,cr124_0). +Le(i3520,cr124_0). +Le(i3580,cr124_0). +Le(cr124_0,i3680). +Le(cr124_0,i3720). +Le(cr124_0,i3740). +Le(cr124_0,i3790). +Le(cr124_0,i3820). +Le(cr124_0,i3860). +Le(cr124_0,i3960). +Le(cr124_0,i4040). +Le(cr124_0,i4140). +Le(cr124_0,i4180). +Le(cr124_0,i4400). +Le(cr124_0,i4620). +Le(cr124_0,i4840). +Le(cr124_0,i5060). +Le(cr124_0,i5280). +Le(cr124_0,i5500). +Le(cr124_0,i5720). +Le(cr124_0,i5940). +Le(cr124_0,i6160). +Le(cr124_0,i6380). +Le(cr124_0,i6600). +Le(cr124_0,i6820). +Le(cr124_0,i7040). +Le(cr124_0,i7260). +Le(cr124_0,i7480). +Le(cr124_0,i7700). +Le(cr124_0,i7920). +Le(cr124_0,i8140). +Le(cr124_0,i8360). +Le(cr124_0,i8580). +Eq(i3650,i3650). +Le(i3650,cr125_0). +Le(cr125_0,i3680). +Le(i-30,cr125_0). +Le(i0,cr125_0). +Le(i13,cr125_0). +Le(i26,cr125_0). +Le(i39,cr125_0). +Le(i52,cr125_0). +Le(i60,cr125_0). +Le(i65,cr125_0). +Le(i70,cr125_0). +Le(i78,cr125_0). +Le(i90,cr125_0). +Le(i91,cr125_0). +Le(i104,cr125_0). +Le(i117,cr125_0). +Le(i130,cr125_0). +Le(i143,cr125_0). +Le(i156,cr125_0). +Le(i169,cr125_0). +Le(i182,cr125_0). +Le(i195,cr125_0). +Le(i208,cr125_0). +Le(i221,cr125_0). +Le(i234,cr125_0). +Le(i247,cr125_0). +Le(i260,cr125_0). +Le(i460,cr125_0). +Le(i530,cr125_0). +Le(i600,cr125_0). +Le(i660,cr125_0). +Le(i670,cr125_0). +Le(i710,cr125_0). +Le(i740,cr125_0). +Le(i810,cr125_0). +Le(i850,cr125_0). +Le(i880,cr125_0). +Le(i890,cr125_0). +Le(i920,cr125_0). +Le(i960,cr125_0). +Le(i990,cr125_0). +Le(i1030,cr125_0). +Le(i1060,cr125_0). +Le(i1100,cr125_0). +Le(i1130,cr125_0). +Le(i1170,cr125_0). +Le(i1200,cr125_0). +Le(i1240,cr125_0). +Le(i1260,cr125_0). +Le(i1270,cr125_0). +Le(i1290,cr125_0). +Le(i1310,cr125_0). +Le(i1320,cr125_0). +Le(i1330,cr125_0). +Le(i1350,cr125_0). +Le(i1360,cr125_0). +Le(i1380,cr125_0). +Le(i1390,cr125_0). +Le(i1420,cr125_0). +Le(i1430,cr125_0). +Le(i1450,cr125_0). +Le(i1460,cr125_0). +Le(i1490,cr125_0). +Le(i1520,cr125_0). +Le(i1530,cr125_0). +Le(i1540,cr125_0). +Le(i1560,cr125_0). +Le(i1590,cr125_0). +Le(i1630,cr125_0). +Le(i1660,cr125_0). +Le(i1700,cr125_0). +Le(i1730,cr125_0). +Le(i1760,cr125_0). +Le(i1770,cr125_0). +Le(i1810,cr125_0). +Le(i1840,cr125_0). +Le(i1880,cr125_0). +Le(i1910,cr125_0). +Le(i1950,cr125_0). +Le(i1980,cr125_0). +Le(i2020,cr125_0). +Le(i2050,cr125_0). +Le(i2090,cr125_0). +Le(i2120,cr125_0). +Le(i2160,cr125_0). +Le(i2190,cr125_0). +Le(i2200,cr125_0). +Le(i2230,cr125_0). +Le(i2270,cr125_0). +Le(i2300,cr125_0). +Le(i2340,cr125_0). +Le(i2370,cr125_0). +Le(i2410,cr125_0). +Le(i2420,cr125_0). +Le(i2440,cr125_0). +Le(i2480,cr125_0). +Le(i2510,cr125_0). +Le(i2550,cr125_0). +Le(i2580,cr125_0). +Le(i2620,cr125_0). +Le(i2640,cr125_0). +Le(i2660,cr125_0). +Le(i2730,cr125_0). +Le(i2760,cr125_0). +Le(i2800,cr125_0). +Le(i2830,cr125_0). +Le(i2860,cr125_0). +Le(i2870,cr125_0). +Le(i2940,cr125_0). +Le(i2970,cr125_0). +Le(i3010,cr125_0). +Le(i3040,cr125_0). +Le(i3080,cr125_0). +Le(i3120,cr125_0). +Le(i3150,cr125_0). +Le(i3220,cr125_0). +Le(i3260,cr125_0). +Le(i3290,cr125_0). +Le(i3300,cr125_0). +Le(i3330,cr125_0). +Le(i3400,cr125_0). +Le(i3430,cr125_0). +Le(i3500,cr125_0). +Le(i3520,cr125_0). +Le(i3580,cr125_0). +Le(i3610,cr125_0). +Le(cr125_0,i3720). +Le(cr125_0,i3740). +Le(cr125_0,i3790). +Le(cr125_0,i3820). +Le(cr125_0,i3860). +Le(cr125_0,i3960). +Le(cr125_0,i4040). +Le(cr125_0,i4140). +Le(cr125_0,i4180). +Le(cr125_0,i4400). +Le(cr125_0,i4620). +Le(cr125_0,i4840). +Le(cr125_0,i5060). +Le(cr125_0,i5280). +Le(cr125_0,i5500). +Le(cr125_0,i5720). +Le(cr125_0,i5940). +Le(cr125_0,i6160). +Le(cr125_0,i6380). +Le(cr125_0,i6600). +Le(cr125_0,i6820). +Le(cr125_0,i7040). +Le(cr125_0,i7260). +Le(cr125_0,i7480). +Le(cr125_0,i7700). +Le(cr125_0,i7920). +Le(cr125_0,i8140). +Le(cr125_0,i8360). +Le(cr125_0,i8580). +Eq(i3680,i3680). +Le(i3680,cr126_0). +Le(cr126_0,i3720). +Le(i-30,cr126_0). +Le(i0,cr126_0). +Le(i13,cr126_0). +Le(i26,cr126_0). +Le(i39,cr126_0). +Le(i52,cr126_0). +Le(i60,cr126_0). +Le(i65,cr126_0). +Le(i70,cr126_0). +Le(i78,cr126_0). +Le(i90,cr126_0). +Le(i91,cr126_0). +Le(i104,cr126_0). +Le(i117,cr126_0). +Le(i130,cr126_0). +Le(i143,cr126_0). +Le(i156,cr126_0). +Le(i169,cr126_0). +Le(i182,cr126_0). +Le(i195,cr126_0). +Le(i208,cr126_0). +Le(i221,cr126_0). +Le(i234,cr126_0). +Le(i247,cr126_0). +Le(i260,cr126_0). +Le(i460,cr126_0). +Le(i530,cr126_0). +Le(i600,cr126_0). +Le(i660,cr126_0). +Le(i670,cr126_0). +Le(i710,cr126_0). +Le(i740,cr126_0). +Le(i810,cr126_0). +Le(i850,cr126_0). +Le(i880,cr126_0). +Le(i890,cr126_0). +Le(i920,cr126_0). +Le(i960,cr126_0). +Le(i990,cr126_0). +Le(i1030,cr126_0). +Le(i1060,cr126_0). +Le(i1100,cr126_0). +Le(i1130,cr126_0). +Le(i1170,cr126_0). +Le(i1200,cr126_0). +Le(i1240,cr126_0). +Le(i1260,cr126_0). +Le(i1270,cr126_0). +Le(i1290,cr126_0). +Le(i1310,cr126_0). +Le(i1320,cr126_0). +Le(i1330,cr126_0). +Le(i1350,cr126_0). +Le(i1360,cr126_0). +Le(i1380,cr126_0). +Le(i1390,cr126_0). +Le(i1420,cr126_0). +Le(i1430,cr126_0). +Le(i1450,cr126_0). +Le(i1460,cr126_0). +Le(i1490,cr126_0). +Le(i1520,cr126_0). +Le(i1530,cr126_0). +Le(i1540,cr126_0). +Le(i1560,cr126_0). +Le(i1590,cr126_0). +Le(i1630,cr126_0). +Le(i1660,cr126_0). +Le(i1700,cr126_0). +Le(i1730,cr126_0). +Le(i1760,cr126_0). +Le(i1770,cr126_0). +Le(i1810,cr126_0). +Le(i1840,cr126_0). +Le(i1880,cr126_0). +Le(i1910,cr126_0). +Le(i1950,cr126_0). +Le(i1980,cr126_0). +Le(i2020,cr126_0). +Le(i2050,cr126_0). +Le(i2090,cr126_0). +Le(i2120,cr126_0). +Le(i2160,cr126_0). +Le(i2190,cr126_0). +Le(i2200,cr126_0). +Le(i2230,cr126_0). +Le(i2270,cr126_0). +Le(i2300,cr126_0). +Le(i2340,cr126_0). +Le(i2370,cr126_0). +Le(i2410,cr126_0). +Le(i2420,cr126_0). +Le(i2440,cr126_0). +Le(i2480,cr126_0). +Le(i2510,cr126_0). +Le(i2550,cr126_0). +Le(i2580,cr126_0). +Le(i2620,cr126_0). +Le(i2640,cr126_0). +Le(i2660,cr126_0). +Le(i2730,cr126_0). +Le(i2760,cr126_0). +Le(i2800,cr126_0). +Le(i2830,cr126_0). +Le(i2860,cr126_0). +Le(i2870,cr126_0). +Le(i2940,cr126_0). +Le(i2970,cr126_0). +Le(i3010,cr126_0). +Le(i3040,cr126_0). +Le(i3080,cr126_0). +Le(i3120,cr126_0). +Le(i3150,cr126_0). +Le(i3220,cr126_0). +Le(i3260,cr126_0). +Le(i3290,cr126_0). +Le(i3300,cr126_0). +Le(i3330,cr126_0). +Le(i3400,cr126_0). +Le(i3430,cr126_0). +Le(i3500,cr126_0). +Le(i3520,cr126_0). +Le(i3580,cr126_0). +Le(i3610,cr126_0). +Le(i3650,cr126_0). +Le(cr126_0,i3740). +Le(cr126_0,i3790). +Le(cr126_0,i3820). +Le(cr126_0,i3860). +Le(cr126_0,i3960). +Le(cr126_0,i4040). +Le(cr126_0,i4140). +Le(cr126_0,i4180). +Le(cr126_0,i4400). +Le(cr126_0,i4620). +Le(cr126_0,i4840). +Le(cr126_0,i5060). +Le(cr126_0,i5280). +Le(cr126_0,i5500). +Le(cr126_0,i5720). +Le(cr126_0,i5940). +Le(cr126_0,i6160). +Le(cr126_0,i6380). +Le(cr126_0,i6600). +Le(cr126_0,i6820). +Le(cr126_0,i7040). +Le(cr126_0,i7260). +Le(cr126_0,i7480). +Le(cr126_0,i7700). +Le(cr126_0,i7920). +Le(cr126_0,i8140). +Le(cr126_0,i8360). +Le(cr126_0,i8580). +Eq(i3720,i3720). +Le(i3720,cr127_0). +Le(cr127_0,i3740). +Le(i-30,cr127_0). +Le(i0,cr127_0). +Le(i13,cr127_0). +Le(i26,cr127_0). +Le(i39,cr127_0). +Le(i52,cr127_0). +Le(i60,cr127_0). +Le(i65,cr127_0). +Le(i70,cr127_0). +Le(i78,cr127_0). +Le(i90,cr127_0). +Le(i91,cr127_0). +Le(i104,cr127_0). +Le(i117,cr127_0). +Le(i130,cr127_0). +Le(i143,cr127_0). +Le(i156,cr127_0). +Le(i169,cr127_0). +Le(i182,cr127_0). +Le(i195,cr127_0). +Le(i208,cr127_0). +Le(i221,cr127_0). +Le(i234,cr127_0). +Le(i247,cr127_0). +Le(i260,cr127_0). +Le(i460,cr127_0). +Le(i530,cr127_0). +Le(i600,cr127_0). +Le(i660,cr127_0). +Le(i670,cr127_0). +Le(i710,cr127_0). +Le(i740,cr127_0). +Le(i810,cr127_0). +Le(i850,cr127_0). +Le(i880,cr127_0). +Le(i890,cr127_0). +Le(i920,cr127_0). +Le(i960,cr127_0). +Le(i990,cr127_0). +Le(i1030,cr127_0). +Le(i1060,cr127_0). +Le(i1100,cr127_0). +Le(i1130,cr127_0). +Le(i1170,cr127_0). +Le(i1200,cr127_0). +Le(i1240,cr127_0). +Le(i1260,cr127_0). +Le(i1270,cr127_0). +Le(i1290,cr127_0). +Le(i1310,cr127_0). +Le(i1320,cr127_0). +Le(i1330,cr127_0). +Le(i1350,cr127_0). +Le(i1360,cr127_0). +Le(i1380,cr127_0). +Le(i1390,cr127_0). +Le(i1420,cr127_0). +Le(i1430,cr127_0). +Le(i1450,cr127_0). +Le(i1460,cr127_0). +Le(i1490,cr127_0). +Le(i1520,cr127_0). +Le(i1530,cr127_0). +Le(i1540,cr127_0). +Le(i1560,cr127_0). +Le(i1590,cr127_0). +Le(i1630,cr127_0). +Le(i1660,cr127_0). +Le(i1700,cr127_0). +Le(i1730,cr127_0). +Le(i1760,cr127_0). +Le(i1770,cr127_0). +Le(i1810,cr127_0). +Le(i1840,cr127_0). +Le(i1880,cr127_0). +Le(i1910,cr127_0). +Le(i1950,cr127_0). +Le(i1980,cr127_0). +Le(i2020,cr127_0). +Le(i2050,cr127_0). +Le(i2090,cr127_0). +Le(i2120,cr127_0). +Le(i2160,cr127_0). +Le(i2190,cr127_0). +Le(i2200,cr127_0). +Le(i2230,cr127_0). +Le(i2270,cr127_0). +Le(i2300,cr127_0). +Le(i2340,cr127_0). +Le(i2370,cr127_0). +Le(i2410,cr127_0). +Le(i2420,cr127_0). +Le(i2440,cr127_0). +Le(i2480,cr127_0). +Le(i2510,cr127_0). +Le(i2550,cr127_0). +Le(i2580,cr127_0). +Le(i2620,cr127_0). +Le(i2640,cr127_0). +Le(i2660,cr127_0). +Le(i2730,cr127_0). +Le(i2760,cr127_0). +Le(i2800,cr127_0). +Le(i2830,cr127_0). +Le(i2860,cr127_0). +Le(i2870,cr127_0). +Le(i2940,cr127_0). +Le(i2970,cr127_0). +Le(i3010,cr127_0). +Le(i3040,cr127_0). +Le(i3080,cr127_0). +Le(i3120,cr127_0). +Le(i3150,cr127_0). +Le(i3220,cr127_0). +Le(i3260,cr127_0). +Le(i3290,cr127_0). +Le(i3300,cr127_0). +Le(i3330,cr127_0). +Le(i3400,cr127_0). +Le(i3430,cr127_0). +Le(i3500,cr127_0). +Le(i3520,cr127_0). +Le(i3580,cr127_0). +Le(i3610,cr127_0). +Le(i3650,cr127_0). +Le(i3680,cr127_0). +Le(cr127_0,i3790). +Le(cr127_0,i3820). +Le(cr127_0,i3860). +Le(cr127_0,i3960). +Le(cr127_0,i4040). +Le(cr127_0,i4140). +Le(cr127_0,i4180). +Le(cr127_0,i4400). +Le(cr127_0,i4620). +Le(cr127_0,i4840). +Le(cr127_0,i5060). +Le(cr127_0,i5280). +Le(cr127_0,i5500). +Le(cr127_0,i5720). +Le(cr127_0,i5940). +Le(cr127_0,i6160). +Le(cr127_0,i6380). +Le(cr127_0,i6600). +Le(cr127_0,i6820). +Le(cr127_0,i7040). +Le(cr127_0,i7260). +Le(cr127_0,i7480). +Le(cr127_0,i7700). +Le(cr127_0,i7920). +Le(cr127_0,i8140). +Le(cr127_0,i8360). +Le(cr127_0,i8580). +Eq(i3740,i3740). +Le(i3740,cr128_0). +Le(cr128_0,i3790). +Le(i-30,cr128_0). +Le(i0,cr128_0). +Le(i13,cr128_0). +Le(i26,cr128_0). +Le(i39,cr128_0). +Le(i52,cr128_0). +Le(i60,cr128_0). +Le(i65,cr128_0). +Le(i70,cr128_0). +Le(i78,cr128_0). +Le(i90,cr128_0). +Le(i91,cr128_0). +Le(i104,cr128_0). +Le(i117,cr128_0). +Le(i130,cr128_0). +Le(i143,cr128_0). +Le(i156,cr128_0). +Le(i169,cr128_0). +Le(i182,cr128_0). +Le(i195,cr128_0). +Le(i208,cr128_0). +Le(i221,cr128_0). +Le(i234,cr128_0). +Le(i247,cr128_0). +Le(i260,cr128_0). +Le(i460,cr128_0). +Le(i530,cr128_0). +Le(i600,cr128_0). +Le(i660,cr128_0). +Le(i670,cr128_0). +Le(i710,cr128_0). +Le(i740,cr128_0). +Le(i810,cr128_0). +Le(i850,cr128_0). +Le(i880,cr128_0). +Le(i890,cr128_0). +Le(i920,cr128_0). +Le(i960,cr128_0). +Le(i990,cr128_0). +Le(i1030,cr128_0). +Le(i1060,cr128_0). +Le(i1100,cr128_0). +Le(i1130,cr128_0). +Le(i1170,cr128_0). +Le(i1200,cr128_0). +Le(i1240,cr128_0). +Le(i1260,cr128_0). +Le(i1270,cr128_0). +Le(i1290,cr128_0). +Le(i1310,cr128_0). +Le(i1320,cr128_0). +Le(i1330,cr128_0). +Le(i1350,cr128_0). +Le(i1360,cr128_0). +Le(i1380,cr128_0). +Le(i1390,cr128_0). +Le(i1420,cr128_0). +Le(i1430,cr128_0). +Le(i1450,cr128_0). +Le(i1460,cr128_0). +Le(i1490,cr128_0). +Le(i1520,cr128_0). +Le(i1530,cr128_0). +Le(i1540,cr128_0). +Le(i1560,cr128_0). +Le(i1590,cr128_0). +Le(i1630,cr128_0). +Le(i1660,cr128_0). +Le(i1700,cr128_0). +Le(i1730,cr128_0). +Le(i1760,cr128_0). +Le(i1770,cr128_0). +Le(i1810,cr128_0). +Le(i1840,cr128_0). +Le(i1880,cr128_0). +Le(i1910,cr128_0). +Le(i1950,cr128_0). +Le(i1980,cr128_0). +Le(i2020,cr128_0). +Le(i2050,cr128_0). +Le(i2090,cr128_0). +Le(i2120,cr128_0). +Le(i2160,cr128_0). +Le(i2190,cr128_0). +Le(i2200,cr128_0). +Le(i2230,cr128_0). +Le(i2270,cr128_0). +Le(i2300,cr128_0). +Le(i2340,cr128_0). +Le(i2370,cr128_0). +Le(i2410,cr128_0). +Le(i2420,cr128_0). +Le(i2440,cr128_0). +Le(i2480,cr128_0). +Le(i2510,cr128_0). +Le(i2550,cr128_0). +Le(i2580,cr128_0). +Le(i2620,cr128_0). +Le(i2640,cr128_0). +Le(i2660,cr128_0). +Le(i2730,cr128_0). +Le(i2760,cr128_0). +Le(i2800,cr128_0). +Le(i2830,cr128_0). +Le(i2860,cr128_0). +Le(i2870,cr128_0). +Le(i2940,cr128_0). +Le(i2970,cr128_0). +Le(i3010,cr128_0). +Le(i3040,cr128_0). +Le(i3080,cr128_0). +Le(i3120,cr128_0). +Le(i3150,cr128_0). +Le(i3220,cr128_0). +Le(i3260,cr128_0). +Le(i3290,cr128_0). +Le(i3300,cr128_0). +Le(i3330,cr128_0). +Le(i3400,cr128_0). +Le(i3430,cr128_0). +Le(i3500,cr128_0). +Le(i3520,cr128_0). +Le(i3580,cr128_0). +Le(i3610,cr128_0). +Le(i3650,cr128_0). +Le(i3680,cr128_0). +Le(i3720,cr128_0). +Le(cr128_0,i3820). +Le(cr128_0,i3860). +Le(cr128_0,i3960). +Le(cr128_0,i4040). +Le(cr128_0,i4140). +Le(cr128_0,i4180). +Le(cr128_0,i4400). +Le(cr128_0,i4620). +Le(cr128_0,i4840). +Le(cr128_0,i5060). +Le(cr128_0,i5280). +Le(cr128_0,i5500). +Le(cr128_0,i5720). +Le(cr128_0,i5940). +Le(cr128_0,i6160). +Le(cr128_0,i6380). +Le(cr128_0,i6600). +Le(cr128_0,i6820). +Le(cr128_0,i7040). +Le(cr128_0,i7260). +Le(cr128_0,i7480). +Le(cr128_0,i7700). +Le(cr128_0,i7920). +Le(cr128_0,i8140). +Le(cr128_0,i8360). +Le(cr128_0,i8580). +Eq(i3790,i3790). +Le(i3790,cr129_0). +Le(cr129_0,i3820). +Le(i-30,cr129_0). +Le(i0,cr129_0). +Le(i13,cr129_0). +Le(i26,cr129_0). +Le(i39,cr129_0). +Le(i52,cr129_0). +Le(i60,cr129_0). +Le(i65,cr129_0). +Le(i70,cr129_0). +Le(i78,cr129_0). +Le(i90,cr129_0). +Le(i91,cr129_0). +Le(i104,cr129_0). +Le(i117,cr129_0). +Le(i130,cr129_0). +Le(i143,cr129_0). +Le(i156,cr129_0). +Le(i169,cr129_0). +Le(i182,cr129_0). +Le(i195,cr129_0). +Le(i208,cr129_0). +Le(i221,cr129_0). +Le(i234,cr129_0). +Le(i247,cr129_0). +Le(i260,cr129_0). +Le(i460,cr129_0). +Le(i530,cr129_0). +Le(i600,cr129_0). +Le(i660,cr129_0). +Le(i670,cr129_0). +Le(i710,cr129_0). +Le(i740,cr129_0). +Le(i810,cr129_0). +Le(i850,cr129_0). +Le(i880,cr129_0). +Le(i890,cr129_0). +Le(i920,cr129_0). +Le(i960,cr129_0). +Le(i990,cr129_0). +Le(i1030,cr129_0). +Le(i1060,cr129_0). +Le(i1100,cr129_0). +Le(i1130,cr129_0). +Le(i1170,cr129_0). +Le(i1200,cr129_0). +Le(i1240,cr129_0). +Le(i1260,cr129_0). +Le(i1270,cr129_0). +Le(i1290,cr129_0). +Le(i1310,cr129_0). +Le(i1320,cr129_0). +Le(i1330,cr129_0). +Le(i1350,cr129_0). +Le(i1360,cr129_0). +Le(i1380,cr129_0). +Le(i1390,cr129_0). +Le(i1420,cr129_0). +Le(i1430,cr129_0). +Le(i1450,cr129_0). +Le(i1460,cr129_0). +Le(i1490,cr129_0). +Le(i1520,cr129_0). +Le(i1530,cr129_0). +Le(i1540,cr129_0). +Le(i1560,cr129_0). +Le(i1590,cr129_0). +Le(i1630,cr129_0). +Le(i1660,cr129_0). +Le(i1700,cr129_0). +Le(i1730,cr129_0). +Le(i1760,cr129_0). +Le(i1770,cr129_0). +Le(i1810,cr129_0). +Le(i1840,cr129_0). +Le(i1880,cr129_0). +Le(i1910,cr129_0). +Le(i1950,cr129_0). +Le(i1980,cr129_0). +Le(i2020,cr129_0). +Le(i2050,cr129_0). +Le(i2090,cr129_0). +Le(i2120,cr129_0). +Le(i2160,cr129_0). +Le(i2190,cr129_0). +Le(i2200,cr129_0). +Le(i2230,cr129_0). +Le(i2270,cr129_0). +Le(i2300,cr129_0). +Le(i2340,cr129_0). +Le(i2370,cr129_0). +Le(i2410,cr129_0). +Le(i2420,cr129_0). +Le(i2440,cr129_0). +Le(i2480,cr129_0). +Le(i2510,cr129_0). +Le(i2550,cr129_0). +Le(i2580,cr129_0). +Le(i2620,cr129_0). +Le(i2640,cr129_0). +Le(i2660,cr129_0). +Le(i2730,cr129_0). +Le(i2760,cr129_0). +Le(i2800,cr129_0). +Le(i2830,cr129_0). +Le(i2860,cr129_0). +Le(i2870,cr129_0). +Le(i2940,cr129_0). +Le(i2970,cr129_0). +Le(i3010,cr129_0). +Le(i3040,cr129_0). +Le(i3080,cr129_0). +Le(i3120,cr129_0). +Le(i3150,cr129_0). +Le(i3220,cr129_0). +Le(i3260,cr129_0). +Le(i3290,cr129_0). +Le(i3300,cr129_0). +Le(i3330,cr129_0). +Le(i3400,cr129_0). +Le(i3430,cr129_0). +Le(i3500,cr129_0). +Le(i3520,cr129_0). +Le(i3580,cr129_0). +Le(i3610,cr129_0). +Le(i3650,cr129_0). +Le(i3680,cr129_0). +Le(i3720,cr129_0). +Le(i3740,cr129_0). +Le(cr129_0,i3860). +Le(cr129_0,i3960). +Le(cr129_0,i4040). +Le(cr129_0,i4140). +Le(cr129_0,i4180). +Le(cr129_0,i4400). +Le(cr129_0,i4620). +Le(cr129_0,i4840). +Le(cr129_0,i5060). +Le(cr129_0,i5280). +Le(cr129_0,i5500). +Le(cr129_0,i5720). +Le(cr129_0,i5940). +Le(cr129_0,i6160). +Le(cr129_0,i6380). +Le(cr129_0,i6600). +Le(cr129_0,i6820). +Le(cr129_0,i7040). +Le(cr129_0,i7260). +Le(cr129_0,i7480). +Le(cr129_0,i7700). +Le(cr129_0,i7920). +Le(cr129_0,i8140). +Le(cr129_0,i8360). +Le(cr129_0,i8580). +Eq(i3820,i3820). +Le(i3820,cr130_0). +Le(cr130_0,i3860). +Le(i-30,cr130_0). +Le(i0,cr130_0). +Le(i13,cr130_0). +Le(i26,cr130_0). +Le(i39,cr130_0). +Le(i52,cr130_0). +Le(i60,cr130_0). +Le(i65,cr130_0). +Le(i70,cr130_0). +Le(i78,cr130_0). +Le(i90,cr130_0). +Le(i91,cr130_0). +Le(i104,cr130_0). +Le(i117,cr130_0). +Le(i130,cr130_0). +Le(i143,cr130_0). +Le(i156,cr130_0). +Le(i169,cr130_0). +Le(i182,cr130_0). +Le(i195,cr130_0). +Le(i208,cr130_0). +Le(i221,cr130_0). +Le(i234,cr130_0). +Le(i247,cr130_0). +Le(i260,cr130_0). +Le(i460,cr130_0). +Le(i530,cr130_0). +Le(i600,cr130_0). +Le(i660,cr130_0). +Le(i670,cr130_0). +Le(i710,cr130_0). +Le(i740,cr130_0). +Le(i810,cr130_0). +Le(i850,cr130_0). +Le(i880,cr130_0). +Le(i890,cr130_0). +Le(i920,cr130_0). +Le(i960,cr130_0). +Le(i990,cr130_0). +Le(i1030,cr130_0). +Le(i1060,cr130_0). +Le(i1100,cr130_0). +Le(i1130,cr130_0). +Le(i1170,cr130_0). +Le(i1200,cr130_0). +Le(i1240,cr130_0). +Le(i1260,cr130_0). +Le(i1270,cr130_0). +Le(i1290,cr130_0). +Le(i1310,cr130_0). +Le(i1320,cr130_0). +Le(i1330,cr130_0). +Le(i1350,cr130_0). +Le(i1360,cr130_0). +Le(i1380,cr130_0). +Le(i1390,cr130_0). +Le(i1420,cr130_0). +Le(i1430,cr130_0). +Le(i1450,cr130_0). +Le(i1460,cr130_0). +Le(i1490,cr130_0). +Le(i1520,cr130_0). +Le(i1530,cr130_0). +Le(i1540,cr130_0). +Le(i1560,cr130_0). +Le(i1590,cr130_0). +Le(i1630,cr130_0). +Le(i1660,cr130_0). +Le(i1700,cr130_0). +Le(i1730,cr130_0). +Le(i1760,cr130_0). +Le(i1770,cr130_0). +Le(i1810,cr130_0). +Le(i1840,cr130_0). +Le(i1880,cr130_0). +Le(i1910,cr130_0). +Le(i1950,cr130_0). +Le(i1980,cr130_0). +Le(i2020,cr130_0). +Le(i2050,cr130_0). +Le(i2090,cr130_0). +Le(i2120,cr130_0). +Le(i2160,cr130_0). +Le(i2190,cr130_0). +Le(i2200,cr130_0). +Le(i2230,cr130_0). +Le(i2270,cr130_0). +Le(i2300,cr130_0). +Le(i2340,cr130_0). +Le(i2370,cr130_0). +Le(i2410,cr130_0). +Le(i2420,cr130_0). +Le(i2440,cr130_0). +Le(i2480,cr130_0). +Le(i2510,cr130_0). +Le(i2550,cr130_0). +Le(i2580,cr130_0). +Le(i2620,cr130_0). +Le(i2640,cr130_0). +Le(i2660,cr130_0). +Le(i2730,cr130_0). +Le(i2760,cr130_0). +Le(i2800,cr130_0). +Le(i2830,cr130_0). +Le(i2860,cr130_0). +Le(i2870,cr130_0). +Le(i2940,cr130_0). +Le(i2970,cr130_0). +Le(i3010,cr130_0). +Le(i3040,cr130_0). +Le(i3080,cr130_0). +Le(i3120,cr130_0). +Le(i3150,cr130_0). +Le(i3220,cr130_0). +Le(i3260,cr130_0). +Le(i3290,cr130_0). +Le(i3300,cr130_0). +Le(i3330,cr130_0). +Le(i3400,cr130_0). +Le(i3430,cr130_0). +Le(i3500,cr130_0). +Le(i3520,cr130_0). +Le(i3580,cr130_0). +Le(i3610,cr130_0). +Le(i3650,cr130_0). +Le(i3680,cr130_0). +Le(i3720,cr130_0). +Le(i3740,cr130_0). +Le(i3790,cr130_0). +Le(cr130_0,i3960). +Le(cr130_0,i4040). +Le(cr130_0,i4140). +Le(cr130_0,i4180). +Le(cr130_0,i4400). +Le(cr130_0,i4620). +Le(cr130_0,i4840). +Le(cr130_0,i5060). +Le(cr130_0,i5280). +Le(cr130_0,i5500). +Le(cr130_0,i5720). +Le(cr130_0,i5940). +Le(cr130_0,i6160). +Le(cr130_0,i6380). +Le(cr130_0,i6600). +Le(cr130_0,i6820). +Le(cr130_0,i7040). +Le(cr130_0,i7260). +Le(cr130_0,i7480). +Le(cr130_0,i7700). +Le(cr130_0,i7920). +Le(cr130_0,i8140). +Le(cr130_0,i8360). +Le(cr130_0,i8580). +Eq(i3860,i3860). +Le(i3860,cr131_0). +Le(cr131_0,i3960). +Le(i-30,cr131_0). +Le(i0,cr131_0). +Le(i13,cr131_0). +Le(i26,cr131_0). +Le(i39,cr131_0). +Le(i52,cr131_0). +Le(i60,cr131_0). +Le(i65,cr131_0). +Le(i70,cr131_0). +Le(i78,cr131_0). +Le(i90,cr131_0). +Le(i91,cr131_0). +Le(i104,cr131_0). +Le(i117,cr131_0). +Le(i130,cr131_0). +Le(i143,cr131_0). +Le(i156,cr131_0). +Le(i169,cr131_0). +Le(i182,cr131_0). +Le(i195,cr131_0). +Le(i208,cr131_0). +Le(i221,cr131_0). +Le(i234,cr131_0). +Le(i247,cr131_0). +Le(i260,cr131_0). +Le(i460,cr131_0). +Le(i530,cr131_0). +Le(i600,cr131_0). +Le(i660,cr131_0). +Le(i670,cr131_0). +Le(i710,cr131_0). +Le(i740,cr131_0). +Le(i810,cr131_0). +Le(i850,cr131_0). +Le(i880,cr131_0). +Le(i890,cr131_0). +Le(i920,cr131_0). +Le(i960,cr131_0). +Le(i990,cr131_0). +Le(i1030,cr131_0). +Le(i1060,cr131_0). +Le(i1100,cr131_0). +Le(i1130,cr131_0). +Le(i1170,cr131_0). +Le(i1200,cr131_0). +Le(i1240,cr131_0). +Le(i1260,cr131_0). +Le(i1270,cr131_0). +Le(i1290,cr131_0). +Le(i1310,cr131_0). +Le(i1320,cr131_0). +Le(i1330,cr131_0). +Le(i1350,cr131_0). +Le(i1360,cr131_0). +Le(i1380,cr131_0). +Le(i1390,cr131_0). +Le(i1420,cr131_0). +Le(i1430,cr131_0). +Le(i1450,cr131_0). +Le(i1460,cr131_0). +Le(i1490,cr131_0). +Le(i1520,cr131_0). +Le(i1530,cr131_0). +Le(i1540,cr131_0). +Le(i1560,cr131_0). +Le(i1590,cr131_0). +Le(i1630,cr131_0). +Le(i1660,cr131_0). +Le(i1700,cr131_0). +Le(i1730,cr131_0). +Le(i1760,cr131_0). +Le(i1770,cr131_0). +Le(i1810,cr131_0). +Le(i1840,cr131_0). +Le(i1880,cr131_0). +Le(i1910,cr131_0). +Le(i1950,cr131_0). +Le(i1980,cr131_0). +Le(i2020,cr131_0). +Le(i2050,cr131_0). +Le(i2090,cr131_0). +Le(i2120,cr131_0). +Le(i2160,cr131_0). +Le(i2190,cr131_0). +Le(i2200,cr131_0). +Le(i2230,cr131_0). +Le(i2270,cr131_0). +Le(i2300,cr131_0). +Le(i2340,cr131_0). +Le(i2370,cr131_0). +Le(i2410,cr131_0). +Le(i2420,cr131_0). +Le(i2440,cr131_0). +Le(i2480,cr131_0). +Le(i2510,cr131_0). +Le(i2550,cr131_0). +Le(i2580,cr131_0). +Le(i2620,cr131_0). +Le(i2640,cr131_0). +Le(i2660,cr131_0). +Le(i2730,cr131_0). +Le(i2760,cr131_0). +Le(i2800,cr131_0). +Le(i2830,cr131_0). +Le(i2860,cr131_0). +Le(i2870,cr131_0). +Le(i2940,cr131_0). +Le(i2970,cr131_0). +Le(i3010,cr131_0). +Le(i3040,cr131_0). +Le(i3080,cr131_0). +Le(i3120,cr131_0). +Le(i3150,cr131_0). +Le(i3220,cr131_0). +Le(i3260,cr131_0). +Le(i3290,cr131_0). +Le(i3300,cr131_0). +Le(i3330,cr131_0). +Le(i3400,cr131_0). +Le(i3430,cr131_0). +Le(i3500,cr131_0). +Le(i3520,cr131_0). +Le(i3580,cr131_0). +Le(i3610,cr131_0). +Le(i3650,cr131_0). +Le(i3680,cr131_0). +Le(i3720,cr131_0). +Le(i3740,cr131_0). +Le(i3790,cr131_0). +Le(i3820,cr131_0). +Le(cr131_0,i4040). +Le(cr131_0,i4140). +Le(cr131_0,i4180). +Le(cr131_0,i4400). +Le(cr131_0,i4620). +Le(cr131_0,i4840). +Le(cr131_0,i5060). +Le(cr131_0,i5280). +Le(cr131_0,i5500). +Le(cr131_0,i5720). +Le(cr131_0,i5940). +Le(cr131_0,i6160). +Le(cr131_0,i6380). +Le(cr131_0,i6600). +Le(cr131_0,i6820). +Le(cr131_0,i7040). +Le(cr131_0,i7260). +Le(cr131_0,i7480). +Le(cr131_0,i7700). +Le(cr131_0,i7920). +Le(cr131_0,i8140). +Le(cr131_0,i8360). +Le(cr131_0,i8580). +Eq(i3960,i3960). +Le(i3960,cr132_0). +Le(cr132_0,i4040). +Le(i-30,cr132_0). +Le(i0,cr132_0). +Le(i13,cr132_0). +Le(i26,cr132_0). +Le(i39,cr132_0). +Le(i52,cr132_0). +Le(i60,cr132_0). +Le(i65,cr132_0). +Le(i70,cr132_0). +Le(i78,cr132_0). +Le(i90,cr132_0). +Le(i91,cr132_0). +Le(i104,cr132_0). +Le(i117,cr132_0). +Le(i130,cr132_0). +Le(i143,cr132_0). +Le(i156,cr132_0). +Le(i169,cr132_0). +Le(i182,cr132_0). +Le(i195,cr132_0). +Le(i208,cr132_0). +Le(i221,cr132_0). +Le(i234,cr132_0). +Le(i247,cr132_0). +Le(i260,cr132_0). +Le(i460,cr132_0). +Le(i530,cr132_0). +Le(i600,cr132_0). +Le(i660,cr132_0). +Le(i670,cr132_0). +Le(i710,cr132_0). +Le(i740,cr132_0). +Le(i810,cr132_0). +Le(i850,cr132_0). +Le(i880,cr132_0). +Le(i890,cr132_0). +Le(i920,cr132_0). +Le(i960,cr132_0). +Le(i990,cr132_0). +Le(i1030,cr132_0). +Le(i1060,cr132_0). +Le(i1100,cr132_0). +Le(i1130,cr132_0). +Le(i1170,cr132_0). +Le(i1200,cr132_0). +Le(i1240,cr132_0). +Le(i1260,cr132_0). +Le(i1270,cr132_0). +Le(i1290,cr132_0). +Le(i1310,cr132_0). +Le(i1320,cr132_0). +Le(i1330,cr132_0). +Le(i1350,cr132_0). +Le(i1360,cr132_0). +Le(i1380,cr132_0). +Le(i1390,cr132_0). +Le(i1420,cr132_0). +Le(i1430,cr132_0). +Le(i1450,cr132_0). +Le(i1460,cr132_0). +Le(i1490,cr132_0). +Le(i1520,cr132_0). +Le(i1530,cr132_0). +Le(i1540,cr132_0). +Le(i1560,cr132_0). +Le(i1590,cr132_0). +Le(i1630,cr132_0). +Le(i1660,cr132_0). +Le(i1700,cr132_0). +Le(i1730,cr132_0). +Le(i1760,cr132_0). +Le(i1770,cr132_0). +Le(i1810,cr132_0). +Le(i1840,cr132_0). +Le(i1880,cr132_0). +Le(i1910,cr132_0). +Le(i1950,cr132_0). +Le(i1980,cr132_0). +Le(i2020,cr132_0). +Le(i2050,cr132_0). +Le(i2090,cr132_0). +Le(i2120,cr132_0). +Le(i2160,cr132_0). +Le(i2190,cr132_0). +Le(i2200,cr132_0). +Le(i2230,cr132_0). +Le(i2270,cr132_0). +Le(i2300,cr132_0). +Le(i2340,cr132_0). +Le(i2370,cr132_0). +Le(i2410,cr132_0). +Le(i2420,cr132_0). +Le(i2440,cr132_0). +Le(i2480,cr132_0). +Le(i2510,cr132_0). +Le(i2550,cr132_0). +Le(i2580,cr132_0). +Le(i2620,cr132_0). +Le(i2640,cr132_0). +Le(i2660,cr132_0). +Le(i2730,cr132_0). +Le(i2760,cr132_0). +Le(i2800,cr132_0). +Le(i2830,cr132_0). +Le(i2860,cr132_0). +Le(i2870,cr132_0). +Le(i2940,cr132_0). +Le(i2970,cr132_0). +Le(i3010,cr132_0). +Le(i3040,cr132_0). +Le(i3080,cr132_0). +Le(i3120,cr132_0). +Le(i3150,cr132_0). +Le(i3220,cr132_0). +Le(i3260,cr132_0). +Le(i3290,cr132_0). +Le(i3300,cr132_0). +Le(i3330,cr132_0). +Le(i3400,cr132_0). +Le(i3430,cr132_0). +Le(i3500,cr132_0). +Le(i3520,cr132_0). +Le(i3580,cr132_0). +Le(i3610,cr132_0). +Le(i3650,cr132_0). +Le(i3680,cr132_0). +Le(i3720,cr132_0). +Le(i3740,cr132_0). +Le(i3790,cr132_0). +Le(i3820,cr132_0). +Le(i3860,cr132_0). +Le(cr132_0,i4140). +Le(cr132_0,i4180). +Le(cr132_0,i4400). +Le(cr132_0,i4620). +Le(cr132_0,i4840). +Le(cr132_0,i5060). +Le(cr132_0,i5280). +Le(cr132_0,i5500). +Le(cr132_0,i5720). +Le(cr132_0,i5940). +Le(cr132_0,i6160). +Le(cr132_0,i6380). +Le(cr132_0,i6600). +Le(cr132_0,i6820). +Le(cr132_0,i7040). +Le(cr132_0,i7260). +Le(cr132_0,i7480). +Le(cr132_0,i7700). +Le(cr132_0,i7920). +Le(cr132_0,i8140). +Le(cr132_0,i8360). +Le(cr132_0,i8580). +Eq(i4040,i4040). +Le(i4040,cr133_0). +Le(cr133_0,i4140). +Le(i-30,cr133_0). +Le(i0,cr133_0). +Le(i13,cr133_0). +Le(i26,cr133_0). +Le(i39,cr133_0). +Le(i52,cr133_0). +Le(i60,cr133_0). +Le(i65,cr133_0). +Le(i70,cr133_0). +Le(i78,cr133_0). +Le(i90,cr133_0). +Le(i91,cr133_0). +Le(i104,cr133_0). +Le(i117,cr133_0). +Le(i130,cr133_0). +Le(i143,cr133_0). +Le(i156,cr133_0). +Le(i169,cr133_0). +Le(i182,cr133_0). +Le(i195,cr133_0). +Le(i208,cr133_0). +Le(i221,cr133_0). +Le(i234,cr133_0). +Le(i247,cr133_0). +Le(i260,cr133_0). +Le(i460,cr133_0). +Le(i530,cr133_0). +Le(i600,cr133_0). +Le(i660,cr133_0). +Le(i670,cr133_0). +Le(i710,cr133_0). +Le(i740,cr133_0). +Le(i810,cr133_0). +Le(i850,cr133_0). +Le(i880,cr133_0). +Le(i890,cr133_0). +Le(i920,cr133_0). +Le(i960,cr133_0). +Le(i990,cr133_0). +Le(i1030,cr133_0). +Le(i1060,cr133_0). +Le(i1100,cr133_0). +Le(i1130,cr133_0). +Le(i1170,cr133_0). +Le(i1200,cr133_0). +Le(i1240,cr133_0). +Le(i1260,cr133_0). +Le(i1270,cr133_0). +Le(i1290,cr133_0). +Le(i1310,cr133_0). +Le(i1320,cr133_0). +Le(i1330,cr133_0). +Le(i1350,cr133_0). +Le(i1360,cr133_0). +Le(i1380,cr133_0). +Le(i1390,cr133_0). +Le(i1420,cr133_0). +Le(i1430,cr133_0). +Le(i1450,cr133_0). +Le(i1460,cr133_0). +Le(i1490,cr133_0). +Le(i1520,cr133_0). +Le(i1530,cr133_0). +Le(i1540,cr133_0). +Le(i1560,cr133_0). +Le(i1590,cr133_0). +Le(i1630,cr133_0). +Le(i1660,cr133_0). +Le(i1700,cr133_0). +Le(i1730,cr133_0). +Le(i1760,cr133_0). +Le(i1770,cr133_0). +Le(i1810,cr133_0). +Le(i1840,cr133_0). +Le(i1880,cr133_0). +Le(i1910,cr133_0). +Le(i1950,cr133_0). +Le(i1980,cr133_0). +Le(i2020,cr133_0). +Le(i2050,cr133_0). +Le(i2090,cr133_0). +Le(i2120,cr133_0). +Le(i2160,cr133_0). +Le(i2190,cr133_0). +Le(i2200,cr133_0). +Le(i2230,cr133_0). +Le(i2270,cr133_0). +Le(i2300,cr133_0). +Le(i2340,cr133_0). +Le(i2370,cr133_0). +Le(i2410,cr133_0). +Le(i2420,cr133_0). +Le(i2440,cr133_0). +Le(i2480,cr133_0). +Le(i2510,cr133_0). +Le(i2550,cr133_0). +Le(i2580,cr133_0). +Le(i2620,cr133_0). +Le(i2640,cr133_0). +Le(i2660,cr133_0). +Le(i2730,cr133_0). +Le(i2760,cr133_0). +Le(i2800,cr133_0). +Le(i2830,cr133_0). +Le(i2860,cr133_0). +Le(i2870,cr133_0). +Le(i2940,cr133_0). +Le(i2970,cr133_0). +Le(i3010,cr133_0). +Le(i3040,cr133_0). +Le(i3080,cr133_0). +Le(i3120,cr133_0). +Le(i3150,cr133_0). +Le(i3220,cr133_0). +Le(i3260,cr133_0). +Le(i3290,cr133_0). +Le(i3300,cr133_0). +Le(i3330,cr133_0). +Le(i3400,cr133_0). +Le(i3430,cr133_0). +Le(i3500,cr133_0). +Le(i3520,cr133_0). +Le(i3580,cr133_0). +Le(i3610,cr133_0). +Le(i3650,cr133_0). +Le(i3680,cr133_0). +Le(i3720,cr133_0). +Le(i3740,cr133_0). +Le(i3790,cr133_0). +Le(i3820,cr133_0). +Le(i3860,cr133_0). +Le(i3960,cr133_0). +Le(cr133_0,i4180). +Le(cr133_0,i4400). +Le(cr133_0,i4620). +Le(cr133_0,i4840). +Le(cr133_0,i5060). +Le(cr133_0,i5280). +Le(cr133_0,i5500). +Le(cr133_0,i5720). +Le(cr133_0,i5940). +Le(cr133_0,i6160). +Le(cr133_0,i6380). +Le(cr133_0,i6600). +Le(cr133_0,i6820). +Le(cr133_0,i7040). +Le(cr133_0,i7260). +Le(cr133_0,i7480). +Le(cr133_0,i7700). +Le(cr133_0,i7920). +Le(cr133_0,i8140). +Le(cr133_0,i8360). +Le(cr133_0,i8580). +Eq(i4140,i4140). +Le(i4140,cr134_0). +Le(cr134_0,i4180). +Le(i-30,cr134_0). +Le(i0,cr134_0). +Le(i13,cr134_0). +Le(i26,cr134_0). +Le(i39,cr134_0). +Le(i52,cr134_0). +Le(i60,cr134_0). +Le(i65,cr134_0). +Le(i70,cr134_0). +Le(i78,cr134_0). +Le(i90,cr134_0). +Le(i91,cr134_0). +Le(i104,cr134_0). +Le(i117,cr134_0). +Le(i130,cr134_0). +Le(i143,cr134_0). +Le(i156,cr134_0). +Le(i169,cr134_0). +Le(i182,cr134_0). +Le(i195,cr134_0). +Le(i208,cr134_0). +Le(i221,cr134_0). +Le(i234,cr134_0). +Le(i247,cr134_0). +Le(i260,cr134_0). +Le(i460,cr134_0). +Le(i530,cr134_0). +Le(i600,cr134_0). +Le(i660,cr134_0). +Le(i670,cr134_0). +Le(i710,cr134_0). +Le(i740,cr134_0). +Le(i810,cr134_0). +Le(i850,cr134_0). +Le(i880,cr134_0). +Le(i890,cr134_0). +Le(i920,cr134_0). +Le(i960,cr134_0). +Le(i990,cr134_0). +Le(i1030,cr134_0). +Le(i1060,cr134_0). +Le(i1100,cr134_0). +Le(i1130,cr134_0). +Le(i1170,cr134_0). +Le(i1200,cr134_0). +Le(i1240,cr134_0). +Le(i1260,cr134_0). +Le(i1270,cr134_0). +Le(i1290,cr134_0). +Le(i1310,cr134_0). +Le(i1320,cr134_0). +Le(i1330,cr134_0). +Le(i1350,cr134_0). +Le(i1360,cr134_0). +Le(i1380,cr134_0). +Le(i1390,cr134_0). +Le(i1420,cr134_0). +Le(i1430,cr134_0). +Le(i1450,cr134_0). +Le(i1460,cr134_0). +Le(i1490,cr134_0). +Le(i1520,cr134_0). +Le(i1530,cr134_0). +Le(i1540,cr134_0). +Le(i1560,cr134_0). +Le(i1590,cr134_0). +Le(i1630,cr134_0). +Le(i1660,cr134_0). +Le(i1700,cr134_0). +Le(i1730,cr134_0). +Le(i1760,cr134_0). +Le(i1770,cr134_0). +Le(i1810,cr134_0). +Le(i1840,cr134_0). +Le(i1880,cr134_0). +Le(i1910,cr134_0). +Le(i1950,cr134_0). +Le(i1980,cr134_0). +Le(i2020,cr134_0). +Le(i2050,cr134_0). +Le(i2090,cr134_0). +Le(i2120,cr134_0). +Le(i2160,cr134_0). +Le(i2190,cr134_0). +Le(i2200,cr134_0). +Le(i2230,cr134_0). +Le(i2270,cr134_0). +Le(i2300,cr134_0). +Le(i2340,cr134_0). +Le(i2370,cr134_0). +Le(i2410,cr134_0). +Le(i2420,cr134_0). +Le(i2440,cr134_0). +Le(i2480,cr134_0). +Le(i2510,cr134_0). +Le(i2550,cr134_0). +Le(i2580,cr134_0). +Le(i2620,cr134_0). +Le(i2640,cr134_0). +Le(i2660,cr134_0). +Le(i2730,cr134_0). +Le(i2760,cr134_0). +Le(i2800,cr134_0). +Le(i2830,cr134_0). +Le(i2860,cr134_0). +Le(i2870,cr134_0). +Le(i2940,cr134_0). +Le(i2970,cr134_0). +Le(i3010,cr134_0). +Le(i3040,cr134_0). +Le(i3080,cr134_0). +Le(i3120,cr134_0). +Le(i3150,cr134_0). +Le(i3220,cr134_0). +Le(i3260,cr134_0). +Le(i3290,cr134_0). +Le(i3300,cr134_0). +Le(i3330,cr134_0). +Le(i3400,cr134_0). +Le(i3430,cr134_0). +Le(i3500,cr134_0). +Le(i3520,cr134_0). +Le(i3580,cr134_0). +Le(i3610,cr134_0). +Le(i3650,cr134_0). +Le(i3680,cr134_0). +Le(i3720,cr134_0). +Le(i3740,cr134_0). +Le(i3790,cr134_0). +Le(i3820,cr134_0). +Le(i3860,cr134_0). +Le(i3960,cr134_0). +Le(i4040,cr134_0). +Le(cr134_0,i4400). +Le(cr134_0,i4620). +Le(cr134_0,i4840). +Le(cr134_0,i5060). +Le(cr134_0,i5280). +Le(cr134_0,i5500). +Le(cr134_0,i5720). +Le(cr134_0,i5940). +Le(cr134_0,i6160). +Le(cr134_0,i6380). +Le(cr134_0,i6600). +Le(cr134_0,i6820). +Le(cr134_0,i7040). +Le(cr134_0,i7260). +Le(cr134_0,i7480). +Le(cr134_0,i7700). +Le(cr134_0,i7920). +Le(cr134_0,i8140). +Le(cr134_0,i8360). +Le(cr134_0,i8580). +Eq(i4180,i4180). +Le(i4180,cr135_0). +Le(cr135_0,i4400). +Le(i-30,cr135_0). +Le(i0,cr135_0). +Le(i13,cr135_0). +Le(i26,cr135_0). +Le(i39,cr135_0). +Le(i52,cr135_0). +Le(i60,cr135_0). +Le(i65,cr135_0). +Le(i70,cr135_0). +Le(i78,cr135_0). +Le(i90,cr135_0). +Le(i91,cr135_0). +Le(i104,cr135_0). +Le(i117,cr135_0). +Le(i130,cr135_0). +Le(i143,cr135_0). +Le(i156,cr135_0). +Le(i169,cr135_0). +Le(i182,cr135_0). +Le(i195,cr135_0). +Le(i208,cr135_0). +Le(i221,cr135_0). +Le(i234,cr135_0). +Le(i247,cr135_0). +Le(i260,cr135_0). +Le(i460,cr135_0). +Le(i530,cr135_0). +Le(i600,cr135_0). +Le(i660,cr135_0). +Le(i670,cr135_0). +Le(i710,cr135_0). +Le(i740,cr135_0). +Le(i810,cr135_0). +Le(i850,cr135_0). +Le(i880,cr135_0). +Le(i890,cr135_0). +Le(i920,cr135_0). +Le(i960,cr135_0). +Le(i990,cr135_0). +Le(i1030,cr135_0). +Le(i1060,cr135_0). +Le(i1100,cr135_0). +Le(i1130,cr135_0). +Le(i1170,cr135_0). +Le(i1200,cr135_0). +Le(i1240,cr135_0). +Le(i1260,cr135_0). +Le(i1270,cr135_0). +Le(i1290,cr135_0). +Le(i1310,cr135_0). +Le(i1320,cr135_0). +Le(i1330,cr135_0). +Le(i1350,cr135_0). +Le(i1360,cr135_0). +Le(i1380,cr135_0). +Le(i1390,cr135_0). +Le(i1420,cr135_0). +Le(i1430,cr135_0). +Le(i1450,cr135_0). +Le(i1460,cr135_0). +Le(i1490,cr135_0). +Le(i1520,cr135_0). +Le(i1530,cr135_0). +Le(i1540,cr135_0). +Le(i1560,cr135_0). +Le(i1590,cr135_0). +Le(i1630,cr135_0). +Le(i1660,cr135_0). +Le(i1700,cr135_0). +Le(i1730,cr135_0). +Le(i1760,cr135_0). +Le(i1770,cr135_0). +Le(i1810,cr135_0). +Le(i1840,cr135_0). +Le(i1880,cr135_0). +Le(i1910,cr135_0). +Le(i1950,cr135_0). +Le(i1980,cr135_0). +Le(i2020,cr135_0). +Le(i2050,cr135_0). +Le(i2090,cr135_0). +Le(i2120,cr135_0). +Le(i2160,cr135_0). +Le(i2190,cr135_0). +Le(i2200,cr135_0). +Le(i2230,cr135_0). +Le(i2270,cr135_0). +Le(i2300,cr135_0). +Le(i2340,cr135_0). +Le(i2370,cr135_0). +Le(i2410,cr135_0). +Le(i2420,cr135_0). +Le(i2440,cr135_0). +Le(i2480,cr135_0). +Le(i2510,cr135_0). +Le(i2550,cr135_0). +Le(i2580,cr135_0). +Le(i2620,cr135_0). +Le(i2640,cr135_0). +Le(i2660,cr135_0). +Le(i2730,cr135_0). +Le(i2760,cr135_0). +Le(i2800,cr135_0). +Le(i2830,cr135_0). +Le(i2860,cr135_0). +Le(i2870,cr135_0). +Le(i2940,cr135_0). +Le(i2970,cr135_0). +Le(i3010,cr135_0). +Le(i3040,cr135_0). +Le(i3080,cr135_0). +Le(i3120,cr135_0). +Le(i3150,cr135_0). +Le(i3220,cr135_0). +Le(i3260,cr135_0). +Le(i3290,cr135_0). +Le(i3300,cr135_0). +Le(i3330,cr135_0). +Le(i3400,cr135_0). +Le(i3430,cr135_0). +Le(i3500,cr135_0). +Le(i3520,cr135_0). +Le(i3580,cr135_0). +Le(i3610,cr135_0). +Le(i3650,cr135_0). +Le(i3680,cr135_0). +Le(i3720,cr135_0). +Le(i3740,cr135_0). +Le(i3790,cr135_0). +Le(i3820,cr135_0). +Le(i3860,cr135_0). +Le(i3960,cr135_0). +Le(i4040,cr135_0). +Le(i4140,cr135_0). +Le(cr135_0,i4620). +Le(cr135_0,i4840). +Le(cr135_0,i5060). +Le(cr135_0,i5280). +Le(cr135_0,i5500). +Le(cr135_0,i5720). +Le(cr135_0,i5940). +Le(cr135_0,i6160). +Le(cr135_0,i6380). +Le(cr135_0,i6600). +Le(cr135_0,i6820). +Le(cr135_0,i7040). +Le(cr135_0,i7260). +Le(cr135_0,i7480). +Le(cr135_0,i7700). +Le(cr135_0,i7920). +Le(cr135_0,i8140). +Le(cr135_0,i8360). +Le(cr135_0,i8580). +Eq(i4400,i4400). +Le(i4400,cr136_0). +Le(cr136_0,i4620). +Le(i-30,cr136_0). +Le(i0,cr136_0). +Le(i13,cr136_0). +Le(i26,cr136_0). +Le(i39,cr136_0). +Le(i52,cr136_0). +Le(i60,cr136_0). +Le(i65,cr136_0). +Le(i70,cr136_0). +Le(i78,cr136_0). +Le(i90,cr136_0). +Le(i91,cr136_0). +Le(i104,cr136_0). +Le(i117,cr136_0). +Le(i130,cr136_0). +Le(i143,cr136_0). +Le(i156,cr136_0). +Le(i169,cr136_0). +Le(i182,cr136_0). +Le(i195,cr136_0). +Le(i208,cr136_0). +Le(i221,cr136_0). +Le(i234,cr136_0). +Le(i247,cr136_0). +Le(i260,cr136_0). +Le(i460,cr136_0). +Le(i530,cr136_0). +Le(i600,cr136_0). +Le(i660,cr136_0). +Le(i670,cr136_0). +Le(i710,cr136_0). +Le(i740,cr136_0). +Le(i810,cr136_0). +Le(i850,cr136_0). +Le(i880,cr136_0). +Le(i890,cr136_0). +Le(i920,cr136_0). +Le(i960,cr136_0). +Le(i990,cr136_0). +Le(i1030,cr136_0). +Le(i1060,cr136_0). +Le(i1100,cr136_0). +Le(i1130,cr136_0). +Le(i1170,cr136_0). +Le(i1200,cr136_0). +Le(i1240,cr136_0). +Le(i1260,cr136_0). +Le(i1270,cr136_0). +Le(i1290,cr136_0). +Le(i1310,cr136_0). +Le(i1320,cr136_0). +Le(i1330,cr136_0). +Le(i1350,cr136_0). +Le(i1360,cr136_0). +Le(i1380,cr136_0). +Le(i1390,cr136_0). +Le(i1420,cr136_0). +Le(i1430,cr136_0). +Le(i1450,cr136_0). +Le(i1460,cr136_0). +Le(i1490,cr136_0). +Le(i1520,cr136_0). +Le(i1530,cr136_0). +Le(i1540,cr136_0). +Le(i1560,cr136_0). +Le(i1590,cr136_0). +Le(i1630,cr136_0). +Le(i1660,cr136_0). +Le(i1700,cr136_0). +Le(i1730,cr136_0). +Le(i1760,cr136_0). +Le(i1770,cr136_0). +Le(i1810,cr136_0). +Le(i1840,cr136_0). +Le(i1880,cr136_0). +Le(i1910,cr136_0). +Le(i1950,cr136_0). +Le(i1980,cr136_0). +Le(i2020,cr136_0). +Le(i2050,cr136_0). +Le(i2090,cr136_0). +Le(i2120,cr136_0). +Le(i2160,cr136_0). +Le(i2190,cr136_0). +Le(i2200,cr136_0). +Le(i2230,cr136_0). +Le(i2270,cr136_0). +Le(i2300,cr136_0). +Le(i2340,cr136_0). +Le(i2370,cr136_0). +Le(i2410,cr136_0). +Le(i2420,cr136_0). +Le(i2440,cr136_0). +Le(i2480,cr136_0). +Le(i2510,cr136_0). +Le(i2550,cr136_0). +Le(i2580,cr136_0). +Le(i2620,cr136_0). +Le(i2640,cr136_0). +Le(i2660,cr136_0). +Le(i2730,cr136_0). +Le(i2760,cr136_0). +Le(i2800,cr136_0). +Le(i2830,cr136_0). +Le(i2860,cr136_0). +Le(i2870,cr136_0). +Le(i2940,cr136_0). +Le(i2970,cr136_0). +Le(i3010,cr136_0). +Le(i3040,cr136_0). +Le(i3080,cr136_0). +Le(i3120,cr136_0). +Le(i3150,cr136_0). +Le(i3220,cr136_0). +Le(i3260,cr136_0). +Le(i3290,cr136_0). +Le(i3300,cr136_0). +Le(i3330,cr136_0). +Le(i3400,cr136_0). +Le(i3430,cr136_0). +Le(i3500,cr136_0). +Le(i3520,cr136_0). +Le(i3580,cr136_0). +Le(i3610,cr136_0). +Le(i3650,cr136_0). +Le(i3680,cr136_0). +Le(i3720,cr136_0). +Le(i3740,cr136_0). +Le(i3790,cr136_0). +Le(i3820,cr136_0). +Le(i3860,cr136_0). +Le(i3960,cr136_0). +Le(i4040,cr136_0). +Le(i4140,cr136_0). +Le(i4180,cr136_0). +Le(cr136_0,i4840). +Le(cr136_0,i5060). +Le(cr136_0,i5280). +Le(cr136_0,i5500). +Le(cr136_0,i5720). +Le(cr136_0,i5940). +Le(cr136_0,i6160). +Le(cr136_0,i6380). +Le(cr136_0,i6600). +Le(cr136_0,i6820). +Le(cr136_0,i7040). +Le(cr136_0,i7260). +Le(cr136_0,i7480). +Le(cr136_0,i7700). +Le(cr136_0,i7920). +Le(cr136_0,i8140). +Le(cr136_0,i8360). +Le(cr136_0,i8580). +Eq(i4620,i4620). +Le(i4620,cr137_0). +Le(cr137_0,i4840). +Le(i-30,cr137_0). +Le(i0,cr137_0). +Le(i13,cr137_0). +Le(i26,cr137_0). +Le(i39,cr137_0). +Le(i52,cr137_0). +Le(i60,cr137_0). +Le(i65,cr137_0). +Le(i70,cr137_0). +Le(i78,cr137_0). +Le(i90,cr137_0). +Le(i91,cr137_0). +Le(i104,cr137_0). +Le(i117,cr137_0). +Le(i130,cr137_0). +Le(i143,cr137_0). +Le(i156,cr137_0). +Le(i169,cr137_0). +Le(i182,cr137_0). +Le(i195,cr137_0). +Le(i208,cr137_0). +Le(i221,cr137_0). +Le(i234,cr137_0). +Le(i247,cr137_0). +Le(i260,cr137_0). +Le(i460,cr137_0). +Le(i530,cr137_0). +Le(i600,cr137_0). +Le(i660,cr137_0). +Le(i670,cr137_0). +Le(i710,cr137_0). +Le(i740,cr137_0). +Le(i810,cr137_0). +Le(i850,cr137_0). +Le(i880,cr137_0). +Le(i890,cr137_0). +Le(i920,cr137_0). +Le(i960,cr137_0). +Le(i990,cr137_0). +Le(i1030,cr137_0). +Le(i1060,cr137_0). +Le(i1100,cr137_0). +Le(i1130,cr137_0). +Le(i1170,cr137_0). +Le(i1200,cr137_0). +Le(i1240,cr137_0). +Le(i1260,cr137_0). +Le(i1270,cr137_0). +Le(i1290,cr137_0). +Le(i1310,cr137_0). +Le(i1320,cr137_0). +Le(i1330,cr137_0). +Le(i1350,cr137_0). +Le(i1360,cr137_0). +Le(i1380,cr137_0). +Le(i1390,cr137_0). +Le(i1420,cr137_0). +Le(i1430,cr137_0). +Le(i1450,cr137_0). +Le(i1460,cr137_0). +Le(i1490,cr137_0). +Le(i1520,cr137_0). +Le(i1530,cr137_0). +Le(i1540,cr137_0). +Le(i1560,cr137_0). +Le(i1590,cr137_0). +Le(i1630,cr137_0). +Le(i1660,cr137_0). +Le(i1700,cr137_0). +Le(i1730,cr137_0). +Le(i1760,cr137_0). +Le(i1770,cr137_0). +Le(i1810,cr137_0). +Le(i1840,cr137_0). +Le(i1880,cr137_0). +Le(i1910,cr137_0). +Le(i1950,cr137_0). +Le(i1980,cr137_0). +Le(i2020,cr137_0). +Le(i2050,cr137_0). +Le(i2090,cr137_0). +Le(i2120,cr137_0). +Le(i2160,cr137_0). +Le(i2190,cr137_0). +Le(i2200,cr137_0). +Le(i2230,cr137_0). +Le(i2270,cr137_0). +Le(i2300,cr137_0). +Le(i2340,cr137_0). +Le(i2370,cr137_0). +Le(i2410,cr137_0). +Le(i2420,cr137_0). +Le(i2440,cr137_0). +Le(i2480,cr137_0). +Le(i2510,cr137_0). +Le(i2550,cr137_0). +Le(i2580,cr137_0). +Le(i2620,cr137_0). +Le(i2640,cr137_0). +Le(i2660,cr137_0). +Le(i2730,cr137_0). +Le(i2760,cr137_0). +Le(i2800,cr137_0). +Le(i2830,cr137_0). +Le(i2860,cr137_0). +Le(i2870,cr137_0). +Le(i2940,cr137_0). +Le(i2970,cr137_0). +Le(i3010,cr137_0). +Le(i3040,cr137_0). +Le(i3080,cr137_0). +Le(i3120,cr137_0). +Le(i3150,cr137_0). +Le(i3220,cr137_0). +Le(i3260,cr137_0). +Le(i3290,cr137_0). +Le(i3300,cr137_0). +Le(i3330,cr137_0). +Le(i3400,cr137_0). +Le(i3430,cr137_0). +Le(i3500,cr137_0). +Le(i3520,cr137_0). +Le(i3580,cr137_0). +Le(i3610,cr137_0). +Le(i3650,cr137_0). +Le(i3680,cr137_0). +Le(i3720,cr137_0). +Le(i3740,cr137_0). +Le(i3790,cr137_0). +Le(i3820,cr137_0). +Le(i3860,cr137_0). +Le(i3960,cr137_0). +Le(i4040,cr137_0). +Le(i4140,cr137_0). +Le(i4180,cr137_0). +Le(i4400,cr137_0). +Le(cr137_0,i5060). +Le(cr137_0,i5280). +Le(cr137_0,i5500). +Le(cr137_0,i5720). +Le(cr137_0,i5940). +Le(cr137_0,i6160). +Le(cr137_0,i6380). +Le(cr137_0,i6600). +Le(cr137_0,i6820). +Le(cr137_0,i7040). +Le(cr137_0,i7260). +Le(cr137_0,i7480). +Le(cr137_0,i7700). +Le(cr137_0,i7920). +Le(cr137_0,i8140). +Le(cr137_0,i8360). +Le(cr137_0,i8580). +Eq(i4840,i4840). +Le(i4840,cr138_0). +Le(cr138_0,i5060). +Le(i-30,cr138_0). +Le(i0,cr138_0). +Le(i13,cr138_0). +Le(i26,cr138_0). +Le(i39,cr138_0). +Le(i52,cr138_0). +Le(i60,cr138_0). +Le(i65,cr138_0). +Le(i70,cr138_0). +Le(i78,cr138_0). +Le(i90,cr138_0). +Le(i91,cr138_0). +Le(i104,cr138_0). +Le(i117,cr138_0). +Le(i130,cr138_0). +Le(i143,cr138_0). +Le(i156,cr138_0). +Le(i169,cr138_0). +Le(i182,cr138_0). +Le(i195,cr138_0). +Le(i208,cr138_0). +Le(i221,cr138_0). +Le(i234,cr138_0). +Le(i247,cr138_0). +Le(i260,cr138_0). +Le(i460,cr138_0). +Le(i530,cr138_0). +Le(i600,cr138_0). +Le(i660,cr138_0). +Le(i670,cr138_0). +Le(i710,cr138_0). +Le(i740,cr138_0). +Le(i810,cr138_0). +Le(i850,cr138_0). +Le(i880,cr138_0). +Le(i890,cr138_0). +Le(i920,cr138_0). +Le(i960,cr138_0). +Le(i990,cr138_0). +Le(i1030,cr138_0). +Le(i1060,cr138_0). +Le(i1100,cr138_0). +Le(i1130,cr138_0). +Le(i1170,cr138_0). +Le(i1200,cr138_0). +Le(i1240,cr138_0). +Le(i1260,cr138_0). +Le(i1270,cr138_0). +Le(i1290,cr138_0). +Le(i1310,cr138_0). +Le(i1320,cr138_0). +Le(i1330,cr138_0). +Le(i1350,cr138_0). +Le(i1360,cr138_0). +Le(i1380,cr138_0). +Le(i1390,cr138_0). +Le(i1420,cr138_0). +Le(i1430,cr138_0). +Le(i1450,cr138_0). +Le(i1460,cr138_0). +Le(i1490,cr138_0). +Le(i1520,cr138_0). +Le(i1530,cr138_0). +Le(i1540,cr138_0). +Le(i1560,cr138_0). +Le(i1590,cr138_0). +Le(i1630,cr138_0). +Le(i1660,cr138_0). +Le(i1700,cr138_0). +Le(i1730,cr138_0). +Le(i1760,cr138_0). +Le(i1770,cr138_0). +Le(i1810,cr138_0). +Le(i1840,cr138_0). +Le(i1880,cr138_0). +Le(i1910,cr138_0). +Le(i1950,cr138_0). +Le(i1980,cr138_0). +Le(i2020,cr138_0). +Le(i2050,cr138_0). +Le(i2090,cr138_0). +Le(i2120,cr138_0). +Le(i2160,cr138_0). +Le(i2190,cr138_0). +Le(i2200,cr138_0). +Le(i2230,cr138_0). +Le(i2270,cr138_0). +Le(i2300,cr138_0). +Le(i2340,cr138_0). +Le(i2370,cr138_0). +Le(i2410,cr138_0). +Le(i2420,cr138_0). +Le(i2440,cr138_0). +Le(i2480,cr138_0). +Le(i2510,cr138_0). +Le(i2550,cr138_0). +Le(i2580,cr138_0). +Le(i2620,cr138_0). +Le(i2640,cr138_0). +Le(i2660,cr138_0). +Le(i2730,cr138_0). +Le(i2760,cr138_0). +Le(i2800,cr138_0). +Le(i2830,cr138_0). +Le(i2860,cr138_0). +Le(i2870,cr138_0). +Le(i2940,cr138_0). +Le(i2970,cr138_0). +Le(i3010,cr138_0). +Le(i3040,cr138_0). +Le(i3080,cr138_0). +Le(i3120,cr138_0). +Le(i3150,cr138_0). +Le(i3220,cr138_0). +Le(i3260,cr138_0). +Le(i3290,cr138_0). +Le(i3300,cr138_0). +Le(i3330,cr138_0). +Le(i3400,cr138_0). +Le(i3430,cr138_0). +Le(i3500,cr138_0). +Le(i3520,cr138_0). +Le(i3580,cr138_0). +Le(i3610,cr138_0). +Le(i3650,cr138_0). +Le(i3680,cr138_0). +Le(i3720,cr138_0). +Le(i3740,cr138_0). +Le(i3790,cr138_0). +Le(i3820,cr138_0). +Le(i3860,cr138_0). +Le(i3960,cr138_0). +Le(i4040,cr138_0). +Le(i4140,cr138_0). +Le(i4180,cr138_0). +Le(i4400,cr138_0). +Le(i4620,cr138_0). +Le(cr138_0,i5280). +Le(cr138_0,i5500). +Le(cr138_0,i5720). +Le(cr138_0,i5940). +Le(cr138_0,i6160). +Le(cr138_0,i6380). +Le(cr138_0,i6600). +Le(cr138_0,i6820). +Le(cr138_0,i7040). +Le(cr138_0,i7260). +Le(cr138_0,i7480). +Le(cr138_0,i7700). +Le(cr138_0,i7920). +Le(cr138_0,i8140). +Le(cr138_0,i8360). +Le(cr138_0,i8580). +Eq(i5060,i5060). +Le(i5060,cr139_0). +Le(cr139_0,i5280). +Le(i-30,cr139_0). +Le(i0,cr139_0). +Le(i13,cr139_0). +Le(i26,cr139_0). +Le(i39,cr139_0). +Le(i52,cr139_0). +Le(i60,cr139_0). +Le(i65,cr139_0). +Le(i70,cr139_0). +Le(i78,cr139_0). +Le(i90,cr139_0). +Le(i91,cr139_0). +Le(i104,cr139_0). +Le(i117,cr139_0). +Le(i130,cr139_0). +Le(i143,cr139_0). +Le(i156,cr139_0). +Le(i169,cr139_0). +Le(i182,cr139_0). +Le(i195,cr139_0). +Le(i208,cr139_0). +Le(i221,cr139_0). +Le(i234,cr139_0). +Le(i247,cr139_0). +Le(i260,cr139_0). +Le(i460,cr139_0). +Le(i530,cr139_0). +Le(i600,cr139_0). +Le(i660,cr139_0). +Le(i670,cr139_0). +Le(i710,cr139_0). +Le(i740,cr139_0). +Le(i810,cr139_0). +Le(i850,cr139_0). +Le(i880,cr139_0). +Le(i890,cr139_0). +Le(i920,cr139_0). +Le(i960,cr139_0). +Le(i990,cr139_0). +Le(i1030,cr139_0). +Le(i1060,cr139_0). +Le(i1100,cr139_0). +Le(i1130,cr139_0). +Le(i1170,cr139_0). +Le(i1200,cr139_0). +Le(i1240,cr139_0). +Le(i1260,cr139_0). +Le(i1270,cr139_0). +Le(i1290,cr139_0). +Le(i1310,cr139_0). +Le(i1320,cr139_0). +Le(i1330,cr139_0). +Le(i1350,cr139_0). +Le(i1360,cr139_0). +Le(i1380,cr139_0). +Le(i1390,cr139_0). +Le(i1420,cr139_0). +Le(i1430,cr139_0). +Le(i1450,cr139_0). +Le(i1460,cr139_0). +Le(i1490,cr139_0). +Le(i1520,cr139_0). +Le(i1530,cr139_0). +Le(i1540,cr139_0). +Le(i1560,cr139_0). +Le(i1590,cr139_0). +Le(i1630,cr139_0). +Le(i1660,cr139_0). +Le(i1700,cr139_0). +Le(i1730,cr139_0). +Le(i1760,cr139_0). +Le(i1770,cr139_0). +Le(i1810,cr139_0). +Le(i1840,cr139_0). +Le(i1880,cr139_0). +Le(i1910,cr139_0). +Le(i1950,cr139_0). +Le(i1980,cr139_0). +Le(i2020,cr139_0). +Le(i2050,cr139_0). +Le(i2090,cr139_0). +Le(i2120,cr139_0). +Le(i2160,cr139_0). +Le(i2190,cr139_0). +Le(i2200,cr139_0). +Le(i2230,cr139_0). +Le(i2270,cr139_0). +Le(i2300,cr139_0). +Le(i2340,cr139_0). +Le(i2370,cr139_0). +Le(i2410,cr139_0). +Le(i2420,cr139_0). +Le(i2440,cr139_0). +Le(i2480,cr139_0). +Le(i2510,cr139_0). +Le(i2550,cr139_0). +Le(i2580,cr139_0). +Le(i2620,cr139_0). +Le(i2640,cr139_0). +Le(i2660,cr139_0). +Le(i2730,cr139_0). +Le(i2760,cr139_0). +Le(i2800,cr139_0). +Le(i2830,cr139_0). +Le(i2860,cr139_0). +Le(i2870,cr139_0). +Le(i2940,cr139_0). +Le(i2970,cr139_0). +Le(i3010,cr139_0). +Le(i3040,cr139_0). +Le(i3080,cr139_0). +Le(i3120,cr139_0). +Le(i3150,cr139_0). +Le(i3220,cr139_0). +Le(i3260,cr139_0). +Le(i3290,cr139_0). +Le(i3300,cr139_0). +Le(i3330,cr139_0). +Le(i3400,cr139_0). +Le(i3430,cr139_0). +Le(i3500,cr139_0). +Le(i3520,cr139_0). +Le(i3580,cr139_0). +Le(i3610,cr139_0). +Le(i3650,cr139_0). +Le(i3680,cr139_0). +Le(i3720,cr139_0). +Le(i3740,cr139_0). +Le(i3790,cr139_0). +Le(i3820,cr139_0). +Le(i3860,cr139_0). +Le(i3960,cr139_0). +Le(i4040,cr139_0). +Le(i4140,cr139_0). +Le(i4180,cr139_0). +Le(i4400,cr139_0). +Le(i4620,cr139_0). +Le(i4840,cr139_0). +Le(cr139_0,i5500). +Le(cr139_0,i5720). +Le(cr139_0,i5940). +Le(cr139_0,i6160). +Le(cr139_0,i6380). +Le(cr139_0,i6600). +Le(cr139_0,i6820). +Le(cr139_0,i7040). +Le(cr139_0,i7260). +Le(cr139_0,i7480). +Le(cr139_0,i7700). +Le(cr139_0,i7920). +Le(cr139_0,i8140). +Le(cr139_0,i8360). +Le(cr139_0,i8580). +Eq(i5280,i5280). +Le(i5280,cr140_0). +Le(cr140_0,i5500). +Le(i-30,cr140_0). +Le(i0,cr140_0). +Le(i13,cr140_0). +Le(i26,cr140_0). +Le(i39,cr140_0). +Le(i52,cr140_0). +Le(i60,cr140_0). +Le(i65,cr140_0). +Le(i70,cr140_0). +Le(i78,cr140_0). +Le(i90,cr140_0). +Le(i91,cr140_0). +Le(i104,cr140_0). +Le(i117,cr140_0). +Le(i130,cr140_0). +Le(i143,cr140_0). +Le(i156,cr140_0). +Le(i169,cr140_0). +Le(i182,cr140_0). +Le(i195,cr140_0). +Le(i208,cr140_0). +Le(i221,cr140_0). +Le(i234,cr140_0). +Le(i247,cr140_0). +Le(i260,cr140_0). +Le(i460,cr140_0). +Le(i530,cr140_0). +Le(i600,cr140_0). +Le(i660,cr140_0). +Le(i670,cr140_0). +Le(i710,cr140_0). +Le(i740,cr140_0). +Le(i810,cr140_0). +Le(i850,cr140_0). +Le(i880,cr140_0). +Le(i890,cr140_0). +Le(i920,cr140_0). +Le(i960,cr140_0). +Le(i990,cr140_0). +Le(i1030,cr140_0). +Le(i1060,cr140_0). +Le(i1100,cr140_0). +Le(i1130,cr140_0). +Le(i1170,cr140_0). +Le(i1200,cr140_0). +Le(i1240,cr140_0). +Le(i1260,cr140_0). +Le(i1270,cr140_0). +Le(i1290,cr140_0). +Le(i1310,cr140_0). +Le(i1320,cr140_0). +Le(i1330,cr140_0). +Le(i1350,cr140_0). +Le(i1360,cr140_0). +Le(i1380,cr140_0). +Le(i1390,cr140_0). +Le(i1420,cr140_0). +Le(i1430,cr140_0). +Le(i1450,cr140_0). +Le(i1460,cr140_0). +Le(i1490,cr140_0). +Le(i1520,cr140_0). +Le(i1530,cr140_0). +Le(i1540,cr140_0). +Le(i1560,cr140_0). +Le(i1590,cr140_0). +Le(i1630,cr140_0). +Le(i1660,cr140_0). +Le(i1700,cr140_0). +Le(i1730,cr140_0). +Le(i1760,cr140_0). +Le(i1770,cr140_0). +Le(i1810,cr140_0). +Le(i1840,cr140_0). +Le(i1880,cr140_0). +Le(i1910,cr140_0). +Le(i1950,cr140_0). +Le(i1980,cr140_0). +Le(i2020,cr140_0). +Le(i2050,cr140_0). +Le(i2090,cr140_0). +Le(i2120,cr140_0). +Le(i2160,cr140_0). +Le(i2190,cr140_0). +Le(i2200,cr140_0). +Le(i2230,cr140_0). +Le(i2270,cr140_0). +Le(i2300,cr140_0). +Le(i2340,cr140_0). +Le(i2370,cr140_0). +Le(i2410,cr140_0). +Le(i2420,cr140_0). +Le(i2440,cr140_0). +Le(i2480,cr140_0). +Le(i2510,cr140_0). +Le(i2550,cr140_0). +Le(i2580,cr140_0). +Le(i2620,cr140_0). +Le(i2640,cr140_0). +Le(i2660,cr140_0). +Le(i2730,cr140_0). +Le(i2760,cr140_0). +Le(i2800,cr140_0). +Le(i2830,cr140_0). +Le(i2860,cr140_0). +Le(i2870,cr140_0). +Le(i2940,cr140_0). +Le(i2970,cr140_0). +Le(i3010,cr140_0). +Le(i3040,cr140_0). +Le(i3080,cr140_0). +Le(i3120,cr140_0). +Le(i3150,cr140_0). +Le(i3220,cr140_0). +Le(i3260,cr140_0). +Le(i3290,cr140_0). +Le(i3300,cr140_0). +Le(i3330,cr140_0). +Le(i3400,cr140_0). +Le(i3430,cr140_0). +Le(i3500,cr140_0). +Le(i3520,cr140_0). +Le(i3580,cr140_0). +Le(i3610,cr140_0). +Le(i3650,cr140_0). +Le(i3680,cr140_0). +Le(i3720,cr140_0). +Le(i3740,cr140_0). +Le(i3790,cr140_0). +Le(i3820,cr140_0). +Le(i3860,cr140_0). +Le(i3960,cr140_0). +Le(i4040,cr140_0). +Le(i4140,cr140_0). +Le(i4180,cr140_0). +Le(i4400,cr140_0). +Le(i4620,cr140_0). +Le(i4840,cr140_0). +Le(i5060,cr140_0). +Le(cr140_0,i5720). +Le(cr140_0,i5940). +Le(cr140_0,i6160). +Le(cr140_0,i6380). +Le(cr140_0,i6600). +Le(cr140_0,i6820). +Le(cr140_0,i7040). +Le(cr140_0,i7260). +Le(cr140_0,i7480). +Le(cr140_0,i7700). +Le(cr140_0,i7920). +Le(cr140_0,i8140). +Le(cr140_0,i8360). +Le(cr140_0,i8580). +Eq(i5500,i5500). +Le(i5500,cr141_0). +Le(cr141_0,i5720). +Le(i-30,cr141_0). +Le(i0,cr141_0). +Le(i13,cr141_0). +Le(i26,cr141_0). +Le(i39,cr141_0). +Le(i52,cr141_0). +Le(i60,cr141_0). +Le(i65,cr141_0). +Le(i70,cr141_0). +Le(i78,cr141_0). +Le(i90,cr141_0). +Le(i91,cr141_0). +Le(i104,cr141_0). +Le(i117,cr141_0). +Le(i130,cr141_0). +Le(i143,cr141_0). +Le(i156,cr141_0). +Le(i169,cr141_0). +Le(i182,cr141_0). +Le(i195,cr141_0). +Le(i208,cr141_0). +Le(i221,cr141_0). +Le(i234,cr141_0). +Le(i247,cr141_0). +Le(i260,cr141_0). +Le(i460,cr141_0). +Le(i530,cr141_0). +Le(i600,cr141_0). +Le(i660,cr141_0). +Le(i670,cr141_0). +Le(i710,cr141_0). +Le(i740,cr141_0). +Le(i810,cr141_0). +Le(i850,cr141_0). +Le(i880,cr141_0). +Le(i890,cr141_0). +Le(i920,cr141_0). +Le(i960,cr141_0). +Le(i990,cr141_0). +Le(i1030,cr141_0). +Le(i1060,cr141_0). +Le(i1100,cr141_0). +Le(i1130,cr141_0). +Le(i1170,cr141_0). +Le(i1200,cr141_0). +Le(i1240,cr141_0). +Le(i1260,cr141_0). +Le(i1270,cr141_0). +Le(i1290,cr141_0). +Le(i1310,cr141_0). +Le(i1320,cr141_0). +Le(i1330,cr141_0). +Le(i1350,cr141_0). +Le(i1360,cr141_0). +Le(i1380,cr141_0). +Le(i1390,cr141_0). +Le(i1420,cr141_0). +Le(i1430,cr141_0). +Le(i1450,cr141_0). +Le(i1460,cr141_0). +Le(i1490,cr141_0). +Le(i1520,cr141_0). +Le(i1530,cr141_0). +Le(i1540,cr141_0). +Le(i1560,cr141_0). +Le(i1590,cr141_0). +Le(i1630,cr141_0). +Le(i1660,cr141_0). +Le(i1700,cr141_0). +Le(i1730,cr141_0). +Le(i1760,cr141_0). +Le(i1770,cr141_0). +Le(i1810,cr141_0). +Le(i1840,cr141_0). +Le(i1880,cr141_0). +Le(i1910,cr141_0). +Le(i1950,cr141_0). +Le(i1980,cr141_0). +Le(i2020,cr141_0). +Le(i2050,cr141_0). +Le(i2090,cr141_0). +Le(i2120,cr141_0). +Le(i2160,cr141_0). +Le(i2190,cr141_0). +Le(i2200,cr141_0). +Le(i2230,cr141_0). +Le(i2270,cr141_0). +Le(i2300,cr141_0). +Le(i2340,cr141_0). +Le(i2370,cr141_0). +Le(i2410,cr141_0). +Le(i2420,cr141_0). +Le(i2440,cr141_0). +Le(i2480,cr141_0). +Le(i2510,cr141_0). +Le(i2550,cr141_0). +Le(i2580,cr141_0). +Le(i2620,cr141_0). +Le(i2640,cr141_0). +Le(i2660,cr141_0). +Le(i2730,cr141_0). +Le(i2760,cr141_0). +Le(i2800,cr141_0). +Le(i2830,cr141_0). +Le(i2860,cr141_0). +Le(i2870,cr141_0). +Le(i2940,cr141_0). +Le(i2970,cr141_0). +Le(i3010,cr141_0). +Le(i3040,cr141_0). +Le(i3080,cr141_0). +Le(i3120,cr141_0). +Le(i3150,cr141_0). +Le(i3220,cr141_0). +Le(i3260,cr141_0). +Le(i3290,cr141_0). +Le(i3300,cr141_0). +Le(i3330,cr141_0). +Le(i3400,cr141_0). +Le(i3430,cr141_0). +Le(i3500,cr141_0). +Le(i3520,cr141_0). +Le(i3580,cr141_0). +Le(i3610,cr141_0). +Le(i3650,cr141_0). +Le(i3680,cr141_0). +Le(i3720,cr141_0). +Le(i3740,cr141_0). +Le(i3790,cr141_0). +Le(i3820,cr141_0). +Le(i3860,cr141_0). +Le(i3960,cr141_0). +Le(i4040,cr141_0). +Le(i4140,cr141_0). +Le(i4180,cr141_0). +Le(i4400,cr141_0). +Le(i4620,cr141_0). +Le(i4840,cr141_0). +Le(i5060,cr141_0). +Le(i5280,cr141_0). +Le(cr141_0,i5940). +Le(cr141_0,i6160). +Le(cr141_0,i6380). +Le(cr141_0,i6600). +Le(cr141_0,i6820). +Le(cr141_0,i7040). +Le(cr141_0,i7260). +Le(cr141_0,i7480). +Le(cr141_0,i7700). +Le(cr141_0,i7920). +Le(cr141_0,i8140). +Le(cr141_0,i8360). +Le(cr141_0,i8580). +Eq(i5720,i5720). +Le(i5720,cr142_0). +Le(cr142_0,i5940). +Le(i-30,cr142_0). +Le(i0,cr142_0). +Le(i13,cr142_0). +Le(i26,cr142_0). +Le(i39,cr142_0). +Le(i52,cr142_0). +Le(i60,cr142_0). +Le(i65,cr142_0). +Le(i70,cr142_0). +Le(i78,cr142_0). +Le(i90,cr142_0). +Le(i91,cr142_0). +Le(i104,cr142_0). +Le(i117,cr142_0). +Le(i130,cr142_0). +Le(i143,cr142_0). +Le(i156,cr142_0). +Le(i169,cr142_0). +Le(i182,cr142_0). +Le(i195,cr142_0). +Le(i208,cr142_0). +Le(i221,cr142_0). +Le(i234,cr142_0). +Le(i247,cr142_0). +Le(i260,cr142_0). +Le(i460,cr142_0). +Le(i530,cr142_0). +Le(i600,cr142_0). +Le(i660,cr142_0). +Le(i670,cr142_0). +Le(i710,cr142_0). +Le(i740,cr142_0). +Le(i810,cr142_0). +Le(i850,cr142_0). +Le(i880,cr142_0). +Le(i890,cr142_0). +Le(i920,cr142_0). +Le(i960,cr142_0). +Le(i990,cr142_0). +Le(i1030,cr142_0). +Le(i1060,cr142_0). +Le(i1100,cr142_0). +Le(i1130,cr142_0). +Le(i1170,cr142_0). +Le(i1200,cr142_0). +Le(i1240,cr142_0). +Le(i1260,cr142_0). +Le(i1270,cr142_0). +Le(i1290,cr142_0). +Le(i1310,cr142_0). +Le(i1320,cr142_0). +Le(i1330,cr142_0). +Le(i1350,cr142_0). +Le(i1360,cr142_0). +Le(i1380,cr142_0). +Le(i1390,cr142_0). +Le(i1420,cr142_0). +Le(i1430,cr142_0). +Le(i1450,cr142_0). +Le(i1460,cr142_0). +Le(i1490,cr142_0). +Le(i1520,cr142_0). +Le(i1530,cr142_0). +Le(i1540,cr142_0). +Le(i1560,cr142_0). +Le(i1590,cr142_0). +Le(i1630,cr142_0). +Le(i1660,cr142_0). +Le(i1700,cr142_0). +Le(i1730,cr142_0). +Le(i1760,cr142_0). +Le(i1770,cr142_0). +Le(i1810,cr142_0). +Le(i1840,cr142_0). +Le(i1880,cr142_0). +Le(i1910,cr142_0). +Le(i1950,cr142_0). +Le(i1980,cr142_0). +Le(i2020,cr142_0). +Le(i2050,cr142_0). +Le(i2090,cr142_0). +Le(i2120,cr142_0). +Le(i2160,cr142_0). +Le(i2190,cr142_0). +Le(i2200,cr142_0). +Le(i2230,cr142_0). +Le(i2270,cr142_0). +Le(i2300,cr142_0). +Le(i2340,cr142_0). +Le(i2370,cr142_0). +Le(i2410,cr142_0). +Le(i2420,cr142_0). +Le(i2440,cr142_0). +Le(i2480,cr142_0). +Le(i2510,cr142_0). +Le(i2550,cr142_0). +Le(i2580,cr142_0). +Le(i2620,cr142_0). +Le(i2640,cr142_0). +Le(i2660,cr142_0). +Le(i2730,cr142_0). +Le(i2760,cr142_0). +Le(i2800,cr142_0). +Le(i2830,cr142_0). +Le(i2860,cr142_0). +Le(i2870,cr142_0). +Le(i2940,cr142_0). +Le(i2970,cr142_0). +Le(i3010,cr142_0). +Le(i3040,cr142_0). +Le(i3080,cr142_0). +Le(i3120,cr142_0). +Le(i3150,cr142_0). +Le(i3220,cr142_0). +Le(i3260,cr142_0). +Le(i3290,cr142_0). +Le(i3300,cr142_0). +Le(i3330,cr142_0). +Le(i3400,cr142_0). +Le(i3430,cr142_0). +Le(i3500,cr142_0). +Le(i3520,cr142_0). +Le(i3580,cr142_0). +Le(i3610,cr142_0). +Le(i3650,cr142_0). +Le(i3680,cr142_0). +Le(i3720,cr142_0). +Le(i3740,cr142_0). +Le(i3790,cr142_0). +Le(i3820,cr142_0). +Le(i3860,cr142_0). +Le(i3960,cr142_0). +Le(i4040,cr142_0). +Le(i4140,cr142_0). +Le(i4180,cr142_0). +Le(i4400,cr142_0). +Le(i4620,cr142_0). +Le(i4840,cr142_0). +Le(i5060,cr142_0). +Le(i5280,cr142_0). +Le(i5500,cr142_0). +Le(cr142_0,i6160). +Le(cr142_0,i6380). +Le(cr142_0,i6600). +Le(cr142_0,i6820). +Le(cr142_0,i7040). +Le(cr142_0,i7260). +Le(cr142_0,i7480). +Le(cr142_0,i7700). +Le(cr142_0,i7920). +Le(cr142_0,i8140). +Le(cr142_0,i8360). +Le(cr142_0,i8580). +Eq(i5940,i5940). +Le(i5940,cr143_0). +Le(cr143_0,i6160). +Le(i-30,cr143_0). +Le(i0,cr143_0). +Le(i13,cr143_0). +Le(i26,cr143_0). +Le(i39,cr143_0). +Le(i52,cr143_0). +Le(i60,cr143_0). +Le(i65,cr143_0). +Le(i70,cr143_0). +Le(i78,cr143_0). +Le(i90,cr143_0). +Le(i91,cr143_0). +Le(i104,cr143_0). +Le(i117,cr143_0). +Le(i130,cr143_0). +Le(i143,cr143_0). +Le(i156,cr143_0). +Le(i169,cr143_0). +Le(i182,cr143_0). +Le(i195,cr143_0). +Le(i208,cr143_0). +Le(i221,cr143_0). +Le(i234,cr143_0). +Le(i247,cr143_0). +Le(i260,cr143_0). +Le(i460,cr143_0). +Le(i530,cr143_0). +Le(i600,cr143_0). +Le(i660,cr143_0). +Le(i670,cr143_0). +Le(i710,cr143_0). +Le(i740,cr143_0). +Le(i810,cr143_0). +Le(i850,cr143_0). +Le(i880,cr143_0). +Le(i890,cr143_0). +Le(i920,cr143_0). +Le(i960,cr143_0). +Le(i990,cr143_0). +Le(i1030,cr143_0). +Le(i1060,cr143_0). +Le(i1100,cr143_0). +Le(i1130,cr143_0). +Le(i1170,cr143_0). +Le(i1200,cr143_0). +Le(i1240,cr143_0). +Le(i1260,cr143_0). +Le(i1270,cr143_0). +Le(i1290,cr143_0). +Le(i1310,cr143_0). +Le(i1320,cr143_0). +Le(i1330,cr143_0). +Le(i1350,cr143_0). +Le(i1360,cr143_0). +Le(i1380,cr143_0). +Le(i1390,cr143_0). +Le(i1420,cr143_0). +Le(i1430,cr143_0). +Le(i1450,cr143_0). +Le(i1460,cr143_0). +Le(i1490,cr143_0). +Le(i1520,cr143_0). +Le(i1530,cr143_0). +Le(i1540,cr143_0). +Le(i1560,cr143_0). +Le(i1590,cr143_0). +Le(i1630,cr143_0). +Le(i1660,cr143_0). +Le(i1700,cr143_0). +Le(i1730,cr143_0). +Le(i1760,cr143_0). +Le(i1770,cr143_0). +Le(i1810,cr143_0). +Le(i1840,cr143_0). +Le(i1880,cr143_0). +Le(i1910,cr143_0). +Le(i1950,cr143_0). +Le(i1980,cr143_0). +Le(i2020,cr143_0). +Le(i2050,cr143_0). +Le(i2090,cr143_0). +Le(i2120,cr143_0). +Le(i2160,cr143_0). +Le(i2190,cr143_0). +Le(i2200,cr143_0). +Le(i2230,cr143_0). +Le(i2270,cr143_0). +Le(i2300,cr143_0). +Le(i2340,cr143_0). +Le(i2370,cr143_0). +Le(i2410,cr143_0). +Le(i2420,cr143_0). +Le(i2440,cr143_0). +Le(i2480,cr143_0). +Le(i2510,cr143_0). +Le(i2550,cr143_0). +Le(i2580,cr143_0). +Le(i2620,cr143_0). +Le(i2640,cr143_0). +Le(i2660,cr143_0). +Le(i2730,cr143_0). +Le(i2760,cr143_0). +Le(i2800,cr143_0). +Le(i2830,cr143_0). +Le(i2860,cr143_0). +Le(i2870,cr143_0). +Le(i2940,cr143_0). +Le(i2970,cr143_0). +Le(i3010,cr143_0). +Le(i3040,cr143_0). +Le(i3080,cr143_0). +Le(i3120,cr143_0). +Le(i3150,cr143_0). +Le(i3220,cr143_0). +Le(i3260,cr143_0). +Le(i3290,cr143_0). +Le(i3300,cr143_0). +Le(i3330,cr143_0). +Le(i3400,cr143_0). +Le(i3430,cr143_0). +Le(i3500,cr143_0). +Le(i3520,cr143_0). +Le(i3580,cr143_0). +Le(i3610,cr143_0). +Le(i3650,cr143_0). +Le(i3680,cr143_0). +Le(i3720,cr143_0). +Le(i3740,cr143_0). +Le(i3790,cr143_0). +Le(i3820,cr143_0). +Le(i3860,cr143_0). +Le(i3960,cr143_0). +Le(i4040,cr143_0). +Le(i4140,cr143_0). +Le(i4180,cr143_0). +Le(i4400,cr143_0). +Le(i4620,cr143_0). +Le(i4840,cr143_0). +Le(i5060,cr143_0). +Le(i5280,cr143_0). +Le(i5500,cr143_0). +Le(i5720,cr143_0). +Le(cr143_0,i6380). +Le(cr143_0,i6600). +Le(cr143_0,i6820). +Le(cr143_0,i7040). +Le(cr143_0,i7260). +Le(cr143_0,i7480). +Le(cr143_0,i7700). +Le(cr143_0,i7920). +Le(cr143_0,i8140). +Le(cr143_0,i8360). +Le(cr143_0,i8580). +Eq(i6160,i6160). +Le(i6160,cr144_0). +Le(cr144_0,i6380). +Le(i-30,cr144_0). +Le(i0,cr144_0). +Le(i13,cr144_0). +Le(i26,cr144_0). +Le(i39,cr144_0). +Le(i52,cr144_0). +Le(i60,cr144_0). +Le(i65,cr144_0). +Le(i70,cr144_0). +Le(i78,cr144_0). +Le(i90,cr144_0). +Le(i91,cr144_0). +Le(i104,cr144_0). +Le(i117,cr144_0). +Le(i130,cr144_0). +Le(i143,cr144_0). +Le(i156,cr144_0). +Le(i169,cr144_0). +Le(i182,cr144_0). +Le(i195,cr144_0). +Le(i208,cr144_0). +Le(i221,cr144_0). +Le(i234,cr144_0). +Le(i247,cr144_0). +Le(i260,cr144_0). +Le(i460,cr144_0). +Le(i530,cr144_0). +Le(i600,cr144_0). +Le(i660,cr144_0). +Le(i670,cr144_0). +Le(i710,cr144_0). +Le(i740,cr144_0). +Le(i810,cr144_0). +Le(i850,cr144_0). +Le(i880,cr144_0). +Le(i890,cr144_0). +Le(i920,cr144_0). +Le(i960,cr144_0). +Le(i990,cr144_0). +Le(i1030,cr144_0). +Le(i1060,cr144_0). +Le(i1100,cr144_0). +Le(i1130,cr144_0). +Le(i1170,cr144_0). +Le(i1200,cr144_0). +Le(i1240,cr144_0). +Le(i1260,cr144_0). +Le(i1270,cr144_0). +Le(i1290,cr144_0). +Le(i1310,cr144_0). +Le(i1320,cr144_0). +Le(i1330,cr144_0). +Le(i1350,cr144_0). +Le(i1360,cr144_0). +Le(i1380,cr144_0). +Le(i1390,cr144_0). +Le(i1420,cr144_0). +Le(i1430,cr144_0). +Le(i1450,cr144_0). +Le(i1460,cr144_0). +Le(i1490,cr144_0). +Le(i1520,cr144_0). +Le(i1530,cr144_0). +Le(i1540,cr144_0). +Le(i1560,cr144_0). +Le(i1590,cr144_0). +Le(i1630,cr144_0). +Le(i1660,cr144_0). +Le(i1700,cr144_0). +Le(i1730,cr144_0). +Le(i1760,cr144_0). +Le(i1770,cr144_0). +Le(i1810,cr144_0). +Le(i1840,cr144_0). +Le(i1880,cr144_0). +Le(i1910,cr144_0). +Le(i1950,cr144_0). +Le(i1980,cr144_0). +Le(i2020,cr144_0). +Le(i2050,cr144_0). +Le(i2090,cr144_0). +Le(i2120,cr144_0). +Le(i2160,cr144_0). +Le(i2190,cr144_0). +Le(i2200,cr144_0). +Le(i2230,cr144_0). +Le(i2270,cr144_0). +Le(i2300,cr144_0). +Le(i2340,cr144_0). +Le(i2370,cr144_0). +Le(i2410,cr144_0). +Le(i2420,cr144_0). +Le(i2440,cr144_0). +Le(i2480,cr144_0). +Le(i2510,cr144_0). +Le(i2550,cr144_0). +Le(i2580,cr144_0). +Le(i2620,cr144_0). +Le(i2640,cr144_0). +Le(i2660,cr144_0). +Le(i2730,cr144_0). +Le(i2760,cr144_0). +Le(i2800,cr144_0). +Le(i2830,cr144_0). +Le(i2860,cr144_0). +Le(i2870,cr144_0). +Le(i2940,cr144_0). +Le(i2970,cr144_0). +Le(i3010,cr144_0). +Le(i3040,cr144_0). +Le(i3080,cr144_0). +Le(i3120,cr144_0). +Le(i3150,cr144_0). +Le(i3220,cr144_0). +Le(i3260,cr144_0). +Le(i3290,cr144_0). +Le(i3300,cr144_0). +Le(i3330,cr144_0). +Le(i3400,cr144_0). +Le(i3430,cr144_0). +Le(i3500,cr144_0). +Le(i3520,cr144_0). +Le(i3580,cr144_0). +Le(i3610,cr144_0). +Le(i3650,cr144_0). +Le(i3680,cr144_0). +Le(i3720,cr144_0). +Le(i3740,cr144_0). +Le(i3790,cr144_0). +Le(i3820,cr144_0). +Le(i3860,cr144_0). +Le(i3960,cr144_0). +Le(i4040,cr144_0). +Le(i4140,cr144_0). +Le(i4180,cr144_0). +Le(i4400,cr144_0). +Le(i4620,cr144_0). +Le(i4840,cr144_0). +Le(i5060,cr144_0). +Le(i5280,cr144_0). +Le(i5500,cr144_0). +Le(i5720,cr144_0). +Le(i5940,cr144_0). +Le(cr144_0,i6600). +Le(cr144_0,i6820). +Le(cr144_0,i7040). +Le(cr144_0,i7260). +Le(cr144_0,i7480). +Le(cr144_0,i7700). +Le(cr144_0,i7920). +Le(cr144_0,i8140). +Le(cr144_0,i8360). +Le(cr144_0,i8580). +Eq(i6380,i6380). +Le(i6380,cr145_0). +Le(cr145_0,i6600). +Le(i-30,cr145_0). +Le(i0,cr145_0). +Le(i13,cr145_0). +Le(i26,cr145_0). +Le(i39,cr145_0). +Le(i52,cr145_0). +Le(i60,cr145_0). +Le(i65,cr145_0). +Le(i70,cr145_0). +Le(i78,cr145_0). +Le(i90,cr145_0). +Le(i91,cr145_0). +Le(i104,cr145_0). +Le(i117,cr145_0). +Le(i130,cr145_0). +Le(i143,cr145_0). +Le(i156,cr145_0). +Le(i169,cr145_0). +Le(i182,cr145_0). +Le(i195,cr145_0). +Le(i208,cr145_0). +Le(i221,cr145_0). +Le(i234,cr145_0). +Le(i247,cr145_0). +Le(i260,cr145_0). +Le(i460,cr145_0). +Le(i530,cr145_0). +Le(i600,cr145_0). +Le(i660,cr145_0). +Le(i670,cr145_0). +Le(i710,cr145_0). +Le(i740,cr145_0). +Le(i810,cr145_0). +Le(i850,cr145_0). +Le(i880,cr145_0). +Le(i890,cr145_0). +Le(i920,cr145_0). +Le(i960,cr145_0). +Le(i990,cr145_0). +Le(i1030,cr145_0). +Le(i1060,cr145_0). +Le(i1100,cr145_0). +Le(i1130,cr145_0). +Le(i1170,cr145_0). +Le(i1200,cr145_0). +Le(i1240,cr145_0). +Le(i1260,cr145_0). +Le(i1270,cr145_0). +Le(i1290,cr145_0). +Le(i1310,cr145_0). +Le(i1320,cr145_0). +Le(i1330,cr145_0). +Le(i1350,cr145_0). +Le(i1360,cr145_0). +Le(i1380,cr145_0). +Le(i1390,cr145_0). +Le(i1420,cr145_0). +Le(i1430,cr145_0). +Le(i1450,cr145_0). +Le(i1460,cr145_0). +Le(i1490,cr145_0). +Le(i1520,cr145_0). +Le(i1530,cr145_0). +Le(i1540,cr145_0). +Le(i1560,cr145_0). +Le(i1590,cr145_0). +Le(i1630,cr145_0). +Le(i1660,cr145_0). +Le(i1700,cr145_0). +Le(i1730,cr145_0). +Le(i1760,cr145_0). +Le(i1770,cr145_0). +Le(i1810,cr145_0). +Le(i1840,cr145_0). +Le(i1880,cr145_0). +Le(i1910,cr145_0). +Le(i1950,cr145_0). +Le(i1980,cr145_0). +Le(i2020,cr145_0). +Le(i2050,cr145_0). +Le(i2090,cr145_0). +Le(i2120,cr145_0). +Le(i2160,cr145_0). +Le(i2190,cr145_0). +Le(i2200,cr145_0). +Le(i2230,cr145_0). +Le(i2270,cr145_0). +Le(i2300,cr145_0). +Le(i2340,cr145_0). +Le(i2370,cr145_0). +Le(i2410,cr145_0). +Le(i2420,cr145_0). +Le(i2440,cr145_0). +Le(i2480,cr145_0). +Le(i2510,cr145_0). +Le(i2550,cr145_0). +Le(i2580,cr145_0). +Le(i2620,cr145_0). +Le(i2640,cr145_0). +Le(i2660,cr145_0). +Le(i2730,cr145_0). +Le(i2760,cr145_0). +Le(i2800,cr145_0). +Le(i2830,cr145_0). +Le(i2860,cr145_0). +Le(i2870,cr145_0). +Le(i2940,cr145_0). +Le(i2970,cr145_0). +Le(i3010,cr145_0). +Le(i3040,cr145_0). +Le(i3080,cr145_0). +Le(i3120,cr145_0). +Le(i3150,cr145_0). +Le(i3220,cr145_0). +Le(i3260,cr145_0). +Le(i3290,cr145_0). +Le(i3300,cr145_0). +Le(i3330,cr145_0). +Le(i3400,cr145_0). +Le(i3430,cr145_0). +Le(i3500,cr145_0). +Le(i3520,cr145_0). +Le(i3580,cr145_0). +Le(i3610,cr145_0). +Le(i3650,cr145_0). +Le(i3680,cr145_0). +Le(i3720,cr145_0). +Le(i3740,cr145_0). +Le(i3790,cr145_0). +Le(i3820,cr145_0). +Le(i3860,cr145_0). +Le(i3960,cr145_0). +Le(i4040,cr145_0). +Le(i4140,cr145_0). +Le(i4180,cr145_0). +Le(i4400,cr145_0). +Le(i4620,cr145_0). +Le(i4840,cr145_0). +Le(i5060,cr145_0). +Le(i5280,cr145_0). +Le(i5500,cr145_0). +Le(i5720,cr145_0). +Le(i5940,cr145_0). +Le(i6160,cr145_0). +Le(cr145_0,i6820). +Le(cr145_0,i7040). +Le(cr145_0,i7260). +Le(cr145_0,i7480). +Le(cr145_0,i7700). +Le(cr145_0,i7920). +Le(cr145_0,i8140). +Le(cr145_0,i8360). +Le(cr145_0,i8580). +Eq(i6600,i6600). +Le(i6600,cr146_0). +Le(cr146_0,i6820). +Le(i-30,cr146_0). +Le(i0,cr146_0). +Le(i13,cr146_0). +Le(i26,cr146_0). +Le(i39,cr146_0). +Le(i52,cr146_0). +Le(i60,cr146_0). +Le(i65,cr146_0). +Le(i70,cr146_0). +Le(i78,cr146_0). +Le(i90,cr146_0). +Le(i91,cr146_0). +Le(i104,cr146_0). +Le(i117,cr146_0). +Le(i130,cr146_0). +Le(i143,cr146_0). +Le(i156,cr146_0). +Le(i169,cr146_0). +Le(i182,cr146_0). +Le(i195,cr146_0). +Le(i208,cr146_0). +Le(i221,cr146_0). +Le(i234,cr146_0). +Le(i247,cr146_0). +Le(i260,cr146_0). +Le(i460,cr146_0). +Le(i530,cr146_0). +Le(i600,cr146_0). +Le(i660,cr146_0). +Le(i670,cr146_0). +Le(i710,cr146_0). +Le(i740,cr146_0). +Le(i810,cr146_0). +Le(i850,cr146_0). +Le(i880,cr146_0). +Le(i890,cr146_0). +Le(i920,cr146_0). +Le(i960,cr146_0). +Le(i990,cr146_0). +Le(i1030,cr146_0). +Le(i1060,cr146_0). +Le(i1100,cr146_0). +Le(i1130,cr146_0). +Le(i1170,cr146_0). +Le(i1200,cr146_0). +Le(i1240,cr146_0). +Le(i1260,cr146_0). +Le(i1270,cr146_0). +Le(i1290,cr146_0). +Le(i1310,cr146_0). +Le(i1320,cr146_0). +Le(i1330,cr146_0). +Le(i1350,cr146_0). +Le(i1360,cr146_0). +Le(i1380,cr146_0). +Le(i1390,cr146_0). +Le(i1420,cr146_0). +Le(i1430,cr146_0). +Le(i1450,cr146_0). +Le(i1460,cr146_0). +Le(i1490,cr146_0). +Le(i1520,cr146_0). +Le(i1530,cr146_0). +Le(i1540,cr146_0). +Le(i1560,cr146_0). +Le(i1590,cr146_0). +Le(i1630,cr146_0). +Le(i1660,cr146_0). +Le(i1700,cr146_0). +Le(i1730,cr146_0). +Le(i1760,cr146_0). +Le(i1770,cr146_0). +Le(i1810,cr146_0). +Le(i1840,cr146_0). +Le(i1880,cr146_0). +Le(i1910,cr146_0). +Le(i1950,cr146_0). +Le(i1980,cr146_0). +Le(i2020,cr146_0). +Le(i2050,cr146_0). +Le(i2090,cr146_0). +Le(i2120,cr146_0). +Le(i2160,cr146_0). +Le(i2190,cr146_0). +Le(i2200,cr146_0). +Le(i2230,cr146_0). +Le(i2270,cr146_0). +Le(i2300,cr146_0). +Le(i2340,cr146_0). +Le(i2370,cr146_0). +Le(i2410,cr146_0). +Le(i2420,cr146_0). +Le(i2440,cr146_0). +Le(i2480,cr146_0). +Le(i2510,cr146_0). +Le(i2550,cr146_0). +Le(i2580,cr146_0). +Le(i2620,cr146_0). +Le(i2640,cr146_0). +Le(i2660,cr146_0). +Le(i2730,cr146_0). +Le(i2760,cr146_0). +Le(i2800,cr146_0). +Le(i2830,cr146_0). +Le(i2860,cr146_0). +Le(i2870,cr146_0). +Le(i2940,cr146_0). +Le(i2970,cr146_0). +Le(i3010,cr146_0). +Le(i3040,cr146_0). +Le(i3080,cr146_0). +Le(i3120,cr146_0). +Le(i3150,cr146_0). +Le(i3220,cr146_0). +Le(i3260,cr146_0). +Le(i3290,cr146_0). +Le(i3300,cr146_0). +Le(i3330,cr146_0). +Le(i3400,cr146_0). +Le(i3430,cr146_0). +Le(i3500,cr146_0). +Le(i3520,cr146_0). +Le(i3580,cr146_0). +Le(i3610,cr146_0). +Le(i3650,cr146_0). +Le(i3680,cr146_0). +Le(i3720,cr146_0). +Le(i3740,cr146_0). +Le(i3790,cr146_0). +Le(i3820,cr146_0). +Le(i3860,cr146_0). +Le(i3960,cr146_0). +Le(i4040,cr146_0). +Le(i4140,cr146_0). +Le(i4180,cr146_0). +Le(i4400,cr146_0). +Le(i4620,cr146_0). +Le(i4840,cr146_0). +Le(i5060,cr146_0). +Le(i5280,cr146_0). +Le(i5500,cr146_0). +Le(i5720,cr146_0). +Le(i5940,cr146_0). +Le(i6160,cr146_0). +Le(i6380,cr146_0). +Le(cr146_0,i7040). +Le(cr146_0,i7260). +Le(cr146_0,i7480). +Le(cr146_0,i7700). +Le(cr146_0,i7920). +Le(cr146_0,i8140). +Le(cr146_0,i8360). +Le(cr146_0,i8580). +Eq(i6820,i6820). +Le(i6820,cr147_0). +Le(cr147_0,i7040). +Le(i-30,cr147_0). +Le(i0,cr147_0). +Le(i13,cr147_0). +Le(i26,cr147_0). +Le(i39,cr147_0). +Le(i52,cr147_0). +Le(i60,cr147_0). +Le(i65,cr147_0). +Le(i70,cr147_0). +Le(i78,cr147_0). +Le(i90,cr147_0). +Le(i91,cr147_0). +Le(i104,cr147_0). +Le(i117,cr147_0). +Le(i130,cr147_0). +Le(i143,cr147_0). +Le(i156,cr147_0). +Le(i169,cr147_0). +Le(i182,cr147_0). +Le(i195,cr147_0). +Le(i208,cr147_0). +Le(i221,cr147_0). +Le(i234,cr147_0). +Le(i247,cr147_0). +Le(i260,cr147_0). +Le(i460,cr147_0). +Le(i530,cr147_0). +Le(i600,cr147_0). +Le(i660,cr147_0). +Le(i670,cr147_0). +Le(i710,cr147_0). +Le(i740,cr147_0). +Le(i810,cr147_0). +Le(i850,cr147_0). +Le(i880,cr147_0). +Le(i890,cr147_0). +Le(i920,cr147_0). +Le(i960,cr147_0). +Le(i990,cr147_0). +Le(i1030,cr147_0). +Le(i1060,cr147_0). +Le(i1100,cr147_0). +Le(i1130,cr147_0). +Le(i1170,cr147_0). +Le(i1200,cr147_0). +Le(i1240,cr147_0). +Le(i1260,cr147_0). +Le(i1270,cr147_0). +Le(i1290,cr147_0). +Le(i1310,cr147_0). +Le(i1320,cr147_0). +Le(i1330,cr147_0). +Le(i1350,cr147_0). +Le(i1360,cr147_0). +Le(i1380,cr147_0). +Le(i1390,cr147_0). +Le(i1420,cr147_0). +Le(i1430,cr147_0). +Le(i1450,cr147_0). +Le(i1460,cr147_0). +Le(i1490,cr147_0). +Le(i1520,cr147_0). +Le(i1530,cr147_0). +Le(i1540,cr147_0). +Le(i1560,cr147_0). +Le(i1590,cr147_0). +Le(i1630,cr147_0). +Le(i1660,cr147_0). +Le(i1700,cr147_0). +Le(i1730,cr147_0). +Le(i1760,cr147_0). +Le(i1770,cr147_0). +Le(i1810,cr147_0). +Le(i1840,cr147_0). +Le(i1880,cr147_0). +Le(i1910,cr147_0). +Le(i1950,cr147_0). +Le(i1980,cr147_0). +Le(i2020,cr147_0). +Le(i2050,cr147_0). +Le(i2090,cr147_0). +Le(i2120,cr147_0). +Le(i2160,cr147_0). +Le(i2190,cr147_0). +Le(i2200,cr147_0). +Le(i2230,cr147_0). +Le(i2270,cr147_0). +Le(i2300,cr147_0). +Le(i2340,cr147_0). +Le(i2370,cr147_0). +Le(i2410,cr147_0). +Le(i2420,cr147_0). +Le(i2440,cr147_0). +Le(i2480,cr147_0). +Le(i2510,cr147_0). +Le(i2550,cr147_0). +Le(i2580,cr147_0). +Le(i2620,cr147_0). +Le(i2640,cr147_0). +Le(i2660,cr147_0). +Le(i2730,cr147_0). +Le(i2760,cr147_0). +Le(i2800,cr147_0). +Le(i2830,cr147_0). +Le(i2860,cr147_0). +Le(i2870,cr147_0). +Le(i2940,cr147_0). +Le(i2970,cr147_0). +Le(i3010,cr147_0). +Le(i3040,cr147_0). +Le(i3080,cr147_0). +Le(i3120,cr147_0). +Le(i3150,cr147_0). +Le(i3220,cr147_0). +Le(i3260,cr147_0). +Le(i3290,cr147_0). +Le(i3300,cr147_0). +Le(i3330,cr147_0). +Le(i3400,cr147_0). +Le(i3430,cr147_0). +Le(i3500,cr147_0). +Le(i3520,cr147_0). +Le(i3580,cr147_0). +Le(i3610,cr147_0). +Le(i3650,cr147_0). +Le(i3680,cr147_0). +Le(i3720,cr147_0). +Le(i3740,cr147_0). +Le(i3790,cr147_0). +Le(i3820,cr147_0). +Le(i3860,cr147_0). +Le(i3960,cr147_0). +Le(i4040,cr147_0). +Le(i4140,cr147_0). +Le(i4180,cr147_0). +Le(i4400,cr147_0). +Le(i4620,cr147_0). +Le(i4840,cr147_0). +Le(i5060,cr147_0). +Le(i5280,cr147_0). +Le(i5500,cr147_0). +Le(i5720,cr147_0). +Le(i5940,cr147_0). +Le(i6160,cr147_0). +Le(i6380,cr147_0). +Le(i6600,cr147_0). +Le(cr147_0,i7260). +Le(cr147_0,i7480). +Le(cr147_0,i7700). +Le(cr147_0,i7920). +Le(cr147_0,i8140). +Le(cr147_0,i8360). +Le(cr147_0,i8580). +Eq(i7040,i7040). +Le(i7040,cr148_0). +Le(cr148_0,i7260). +Le(i-30,cr148_0). +Le(i0,cr148_0). +Le(i13,cr148_0). +Le(i26,cr148_0). +Le(i39,cr148_0). +Le(i52,cr148_0). +Le(i60,cr148_0). +Le(i65,cr148_0). +Le(i70,cr148_0). +Le(i78,cr148_0). +Le(i90,cr148_0). +Le(i91,cr148_0). +Le(i104,cr148_0). +Le(i117,cr148_0). +Le(i130,cr148_0). +Le(i143,cr148_0). +Le(i156,cr148_0). +Le(i169,cr148_0). +Le(i182,cr148_0). +Le(i195,cr148_0). +Le(i208,cr148_0). +Le(i221,cr148_0). +Le(i234,cr148_0). +Le(i247,cr148_0). +Le(i260,cr148_0). +Le(i460,cr148_0). +Le(i530,cr148_0). +Le(i600,cr148_0). +Le(i660,cr148_0). +Le(i670,cr148_0). +Le(i710,cr148_0). +Le(i740,cr148_0). +Le(i810,cr148_0). +Le(i850,cr148_0). +Le(i880,cr148_0). +Le(i890,cr148_0). +Le(i920,cr148_0). +Le(i960,cr148_0). +Le(i990,cr148_0). +Le(i1030,cr148_0). +Le(i1060,cr148_0). +Le(i1100,cr148_0). +Le(i1130,cr148_0). +Le(i1170,cr148_0). +Le(i1200,cr148_0). +Le(i1240,cr148_0). +Le(i1260,cr148_0). +Le(i1270,cr148_0). +Le(i1290,cr148_0). +Le(i1310,cr148_0). +Le(i1320,cr148_0). +Le(i1330,cr148_0). +Le(i1350,cr148_0). +Le(i1360,cr148_0). +Le(i1380,cr148_0). +Le(i1390,cr148_0). +Le(i1420,cr148_0). +Le(i1430,cr148_0). +Le(i1450,cr148_0). +Le(i1460,cr148_0). +Le(i1490,cr148_0). +Le(i1520,cr148_0). +Le(i1530,cr148_0). +Le(i1540,cr148_0). +Le(i1560,cr148_0). +Le(i1590,cr148_0). +Le(i1630,cr148_0). +Le(i1660,cr148_0). +Le(i1700,cr148_0). +Le(i1730,cr148_0). +Le(i1760,cr148_0). +Le(i1770,cr148_0). +Le(i1810,cr148_0). +Le(i1840,cr148_0). +Le(i1880,cr148_0). +Le(i1910,cr148_0). +Le(i1950,cr148_0). +Le(i1980,cr148_0). +Le(i2020,cr148_0). +Le(i2050,cr148_0). +Le(i2090,cr148_0). +Le(i2120,cr148_0). +Le(i2160,cr148_0). +Le(i2190,cr148_0). +Le(i2200,cr148_0). +Le(i2230,cr148_0). +Le(i2270,cr148_0). +Le(i2300,cr148_0). +Le(i2340,cr148_0). +Le(i2370,cr148_0). +Le(i2410,cr148_0). +Le(i2420,cr148_0). +Le(i2440,cr148_0). +Le(i2480,cr148_0). +Le(i2510,cr148_0). +Le(i2550,cr148_0). +Le(i2580,cr148_0). +Le(i2620,cr148_0). +Le(i2640,cr148_0). +Le(i2660,cr148_0). +Le(i2730,cr148_0). +Le(i2760,cr148_0). +Le(i2800,cr148_0). +Le(i2830,cr148_0). +Le(i2860,cr148_0). +Le(i2870,cr148_0). +Le(i2940,cr148_0). +Le(i2970,cr148_0). +Le(i3010,cr148_0). +Le(i3040,cr148_0). +Le(i3080,cr148_0). +Le(i3120,cr148_0). +Le(i3150,cr148_0). +Le(i3220,cr148_0). +Le(i3260,cr148_0). +Le(i3290,cr148_0). +Le(i3300,cr148_0). +Le(i3330,cr148_0). +Le(i3400,cr148_0). +Le(i3430,cr148_0). +Le(i3500,cr148_0). +Le(i3520,cr148_0). +Le(i3580,cr148_0). +Le(i3610,cr148_0). +Le(i3650,cr148_0). +Le(i3680,cr148_0). +Le(i3720,cr148_0). +Le(i3740,cr148_0). +Le(i3790,cr148_0). +Le(i3820,cr148_0). +Le(i3860,cr148_0). +Le(i3960,cr148_0). +Le(i4040,cr148_0). +Le(i4140,cr148_0). +Le(i4180,cr148_0). +Le(i4400,cr148_0). +Le(i4620,cr148_0). +Le(i4840,cr148_0). +Le(i5060,cr148_0). +Le(i5280,cr148_0). +Le(i5500,cr148_0). +Le(i5720,cr148_0). +Le(i5940,cr148_0). +Le(i6160,cr148_0). +Le(i6380,cr148_0). +Le(i6600,cr148_0). +Le(i6820,cr148_0). +Le(cr148_0,i7480). +Le(cr148_0,i7700). +Le(cr148_0,i7920). +Le(cr148_0,i8140). +Le(cr148_0,i8360). +Le(cr148_0,i8580). +Eq(i7260,i7260). +Le(i7260,cr149_0). +Le(cr149_0,i7480). +Le(i-30,cr149_0). +Le(i0,cr149_0). +Le(i13,cr149_0). +Le(i26,cr149_0). +Le(i39,cr149_0). +Le(i52,cr149_0). +Le(i60,cr149_0). +Le(i65,cr149_0). +Le(i70,cr149_0). +Le(i78,cr149_0). +Le(i90,cr149_0). +Le(i91,cr149_0). +Le(i104,cr149_0). +Le(i117,cr149_0). +Le(i130,cr149_0). +Le(i143,cr149_0). +Le(i156,cr149_0). +Le(i169,cr149_0). +Le(i182,cr149_0). +Le(i195,cr149_0). +Le(i208,cr149_0). +Le(i221,cr149_0). +Le(i234,cr149_0). +Le(i247,cr149_0). +Le(i260,cr149_0). +Le(i460,cr149_0). +Le(i530,cr149_0). +Le(i600,cr149_0). +Le(i660,cr149_0). +Le(i670,cr149_0). +Le(i710,cr149_0). +Le(i740,cr149_0). +Le(i810,cr149_0). +Le(i850,cr149_0). +Le(i880,cr149_0). +Le(i890,cr149_0). +Le(i920,cr149_0). +Le(i960,cr149_0). +Le(i990,cr149_0). +Le(i1030,cr149_0). +Le(i1060,cr149_0). +Le(i1100,cr149_0). +Le(i1130,cr149_0). +Le(i1170,cr149_0). +Le(i1200,cr149_0). +Le(i1240,cr149_0). +Le(i1260,cr149_0). +Le(i1270,cr149_0). +Le(i1290,cr149_0). +Le(i1310,cr149_0). +Le(i1320,cr149_0). +Le(i1330,cr149_0). +Le(i1350,cr149_0). +Le(i1360,cr149_0). +Le(i1380,cr149_0). +Le(i1390,cr149_0). +Le(i1420,cr149_0). +Le(i1430,cr149_0). +Le(i1450,cr149_0). +Le(i1460,cr149_0). +Le(i1490,cr149_0). +Le(i1520,cr149_0). +Le(i1530,cr149_0). +Le(i1540,cr149_0). +Le(i1560,cr149_0). +Le(i1590,cr149_0). +Le(i1630,cr149_0). +Le(i1660,cr149_0). +Le(i1700,cr149_0). +Le(i1730,cr149_0). +Le(i1760,cr149_0). +Le(i1770,cr149_0). +Le(i1810,cr149_0). +Le(i1840,cr149_0). +Le(i1880,cr149_0). +Le(i1910,cr149_0). +Le(i1950,cr149_0). +Le(i1980,cr149_0). +Le(i2020,cr149_0). +Le(i2050,cr149_0). +Le(i2090,cr149_0). +Le(i2120,cr149_0). +Le(i2160,cr149_0). +Le(i2190,cr149_0). +Le(i2200,cr149_0). +Le(i2230,cr149_0). +Le(i2270,cr149_0). +Le(i2300,cr149_0). +Le(i2340,cr149_0). +Le(i2370,cr149_0). +Le(i2410,cr149_0). +Le(i2420,cr149_0). +Le(i2440,cr149_0). +Le(i2480,cr149_0). +Le(i2510,cr149_0). +Le(i2550,cr149_0). +Le(i2580,cr149_0). +Le(i2620,cr149_0). +Le(i2640,cr149_0). +Le(i2660,cr149_0). +Le(i2730,cr149_0). +Le(i2760,cr149_0). +Le(i2800,cr149_0). +Le(i2830,cr149_0). +Le(i2860,cr149_0). +Le(i2870,cr149_0). +Le(i2940,cr149_0). +Le(i2970,cr149_0). +Le(i3010,cr149_0). +Le(i3040,cr149_0). +Le(i3080,cr149_0). +Le(i3120,cr149_0). +Le(i3150,cr149_0). +Le(i3220,cr149_0). +Le(i3260,cr149_0). +Le(i3290,cr149_0). +Le(i3300,cr149_0). +Le(i3330,cr149_0). +Le(i3400,cr149_0). +Le(i3430,cr149_0). +Le(i3500,cr149_0). +Le(i3520,cr149_0). +Le(i3580,cr149_0). +Le(i3610,cr149_0). +Le(i3650,cr149_0). +Le(i3680,cr149_0). +Le(i3720,cr149_0). +Le(i3740,cr149_0). +Le(i3790,cr149_0). +Le(i3820,cr149_0). +Le(i3860,cr149_0). +Le(i3960,cr149_0). +Le(i4040,cr149_0). +Le(i4140,cr149_0). +Le(i4180,cr149_0). +Le(i4400,cr149_0). +Le(i4620,cr149_0). +Le(i4840,cr149_0). +Le(i5060,cr149_0). +Le(i5280,cr149_0). +Le(i5500,cr149_0). +Le(i5720,cr149_0). +Le(i5940,cr149_0). +Le(i6160,cr149_0). +Le(i6380,cr149_0). +Le(i6600,cr149_0). +Le(i6820,cr149_0). +Le(i7040,cr149_0). +Le(cr149_0,i7700). +Le(cr149_0,i7920). +Le(cr149_0,i8140). +Le(cr149_0,i8360). +Le(cr149_0,i8580). +Eq(i7480,i7480). +Le(i7480,cr150_0). +Le(cr150_0,i7700). +Le(i-30,cr150_0). +Le(i0,cr150_0). +Le(i13,cr150_0). +Le(i26,cr150_0). +Le(i39,cr150_0). +Le(i52,cr150_0). +Le(i60,cr150_0). +Le(i65,cr150_0). +Le(i70,cr150_0). +Le(i78,cr150_0). +Le(i90,cr150_0). +Le(i91,cr150_0). +Le(i104,cr150_0). +Le(i117,cr150_0). +Le(i130,cr150_0). +Le(i143,cr150_0). +Le(i156,cr150_0). +Le(i169,cr150_0). +Le(i182,cr150_0). +Le(i195,cr150_0). +Le(i208,cr150_0). +Le(i221,cr150_0). +Le(i234,cr150_0). +Le(i247,cr150_0). +Le(i260,cr150_0). +Le(i460,cr150_0). +Le(i530,cr150_0). +Le(i600,cr150_0). +Le(i660,cr150_0). +Le(i670,cr150_0). +Le(i710,cr150_0). +Le(i740,cr150_0). +Le(i810,cr150_0). +Le(i850,cr150_0). +Le(i880,cr150_0). +Le(i890,cr150_0). +Le(i920,cr150_0). +Le(i960,cr150_0). +Le(i990,cr150_0). +Le(i1030,cr150_0). +Le(i1060,cr150_0). +Le(i1100,cr150_0). +Le(i1130,cr150_0). +Le(i1170,cr150_0). +Le(i1200,cr150_0). +Le(i1240,cr150_0). +Le(i1260,cr150_0). +Le(i1270,cr150_0). +Le(i1290,cr150_0). +Le(i1310,cr150_0). +Le(i1320,cr150_0). +Le(i1330,cr150_0). +Le(i1350,cr150_0). +Le(i1360,cr150_0). +Le(i1380,cr150_0). +Le(i1390,cr150_0). +Le(i1420,cr150_0). +Le(i1430,cr150_0). +Le(i1450,cr150_0). +Le(i1460,cr150_0). +Le(i1490,cr150_0). +Le(i1520,cr150_0). +Le(i1530,cr150_0). +Le(i1540,cr150_0). +Le(i1560,cr150_0). +Le(i1590,cr150_0). +Le(i1630,cr150_0). +Le(i1660,cr150_0). +Le(i1700,cr150_0). +Le(i1730,cr150_0). +Le(i1760,cr150_0). +Le(i1770,cr150_0). +Le(i1810,cr150_0). +Le(i1840,cr150_0). +Le(i1880,cr150_0). +Le(i1910,cr150_0). +Le(i1950,cr150_0). +Le(i1980,cr150_0). +Le(i2020,cr150_0). +Le(i2050,cr150_0). +Le(i2090,cr150_0). +Le(i2120,cr150_0). +Le(i2160,cr150_0). +Le(i2190,cr150_0). +Le(i2200,cr150_0). +Le(i2230,cr150_0). +Le(i2270,cr150_0). +Le(i2300,cr150_0). +Le(i2340,cr150_0). +Le(i2370,cr150_0). +Le(i2410,cr150_0). +Le(i2420,cr150_0). +Le(i2440,cr150_0). +Le(i2480,cr150_0). +Le(i2510,cr150_0). +Le(i2550,cr150_0). +Le(i2580,cr150_0). +Le(i2620,cr150_0). +Le(i2640,cr150_0). +Le(i2660,cr150_0). +Le(i2730,cr150_0). +Le(i2760,cr150_0). +Le(i2800,cr150_0). +Le(i2830,cr150_0). +Le(i2860,cr150_0). +Le(i2870,cr150_0). +Le(i2940,cr150_0). +Le(i2970,cr150_0). +Le(i3010,cr150_0). +Le(i3040,cr150_0). +Le(i3080,cr150_0). +Le(i3120,cr150_0). +Le(i3150,cr150_0). +Le(i3220,cr150_0). +Le(i3260,cr150_0). +Le(i3290,cr150_0). +Le(i3300,cr150_0). +Le(i3330,cr150_0). +Le(i3400,cr150_0). +Le(i3430,cr150_0). +Le(i3500,cr150_0). +Le(i3520,cr150_0). +Le(i3580,cr150_0). +Le(i3610,cr150_0). +Le(i3650,cr150_0). +Le(i3680,cr150_0). +Le(i3720,cr150_0). +Le(i3740,cr150_0). +Le(i3790,cr150_0). +Le(i3820,cr150_0). +Le(i3860,cr150_0). +Le(i3960,cr150_0). +Le(i4040,cr150_0). +Le(i4140,cr150_0). +Le(i4180,cr150_0). +Le(i4400,cr150_0). +Le(i4620,cr150_0). +Le(i4840,cr150_0). +Le(i5060,cr150_0). +Le(i5280,cr150_0). +Le(i5500,cr150_0). +Le(i5720,cr150_0). +Le(i5940,cr150_0). +Le(i6160,cr150_0). +Le(i6380,cr150_0). +Le(i6600,cr150_0). +Le(i6820,cr150_0). +Le(i7040,cr150_0). +Le(i7260,cr150_0). +Le(cr150_0,i7920). +Le(cr150_0,i8140). +Le(cr150_0,i8360). +Le(cr150_0,i8580). +Eq(i7700,i7700). +Le(i7700,cr151_0). +Le(cr151_0,i7920). +Le(i-30,cr151_0). +Le(i0,cr151_0). +Le(i13,cr151_0). +Le(i26,cr151_0). +Le(i39,cr151_0). +Le(i52,cr151_0). +Le(i60,cr151_0). +Le(i65,cr151_0). +Le(i70,cr151_0). +Le(i78,cr151_0). +Le(i90,cr151_0). +Le(i91,cr151_0). +Le(i104,cr151_0). +Le(i117,cr151_0). +Le(i130,cr151_0). +Le(i143,cr151_0). +Le(i156,cr151_0). +Le(i169,cr151_0). +Le(i182,cr151_0). +Le(i195,cr151_0). +Le(i208,cr151_0). +Le(i221,cr151_0). +Le(i234,cr151_0). +Le(i247,cr151_0). +Le(i260,cr151_0). +Le(i460,cr151_0). +Le(i530,cr151_0). +Le(i600,cr151_0). +Le(i660,cr151_0). +Le(i670,cr151_0). +Le(i710,cr151_0). +Le(i740,cr151_0). +Le(i810,cr151_0). +Le(i850,cr151_0). +Le(i880,cr151_0). +Le(i890,cr151_0). +Le(i920,cr151_0). +Le(i960,cr151_0). +Le(i990,cr151_0). +Le(i1030,cr151_0). +Le(i1060,cr151_0). +Le(i1100,cr151_0). +Le(i1130,cr151_0). +Le(i1170,cr151_0). +Le(i1200,cr151_0). +Le(i1240,cr151_0). +Le(i1260,cr151_0). +Le(i1270,cr151_0). +Le(i1290,cr151_0). +Le(i1310,cr151_0). +Le(i1320,cr151_0). +Le(i1330,cr151_0). +Le(i1350,cr151_0). +Le(i1360,cr151_0). +Le(i1380,cr151_0). +Le(i1390,cr151_0). +Le(i1420,cr151_0). +Le(i1430,cr151_0). +Le(i1450,cr151_0). +Le(i1460,cr151_0). +Le(i1490,cr151_0). +Le(i1520,cr151_0). +Le(i1530,cr151_0). +Le(i1540,cr151_0). +Le(i1560,cr151_0). +Le(i1590,cr151_0). +Le(i1630,cr151_0). +Le(i1660,cr151_0). +Le(i1700,cr151_0). +Le(i1730,cr151_0). +Le(i1760,cr151_0). +Le(i1770,cr151_0). +Le(i1810,cr151_0). +Le(i1840,cr151_0). +Le(i1880,cr151_0). +Le(i1910,cr151_0). +Le(i1950,cr151_0). +Le(i1980,cr151_0). +Le(i2020,cr151_0). +Le(i2050,cr151_0). +Le(i2090,cr151_0). +Le(i2120,cr151_0). +Le(i2160,cr151_0). +Le(i2190,cr151_0). +Le(i2200,cr151_0). +Le(i2230,cr151_0). +Le(i2270,cr151_0). +Le(i2300,cr151_0). +Le(i2340,cr151_0). +Le(i2370,cr151_0). +Le(i2410,cr151_0). +Le(i2420,cr151_0). +Le(i2440,cr151_0). +Le(i2480,cr151_0). +Le(i2510,cr151_0). +Le(i2550,cr151_0). +Le(i2580,cr151_0). +Le(i2620,cr151_0). +Le(i2640,cr151_0). +Le(i2660,cr151_0). +Le(i2730,cr151_0). +Le(i2760,cr151_0). +Le(i2800,cr151_0). +Le(i2830,cr151_0). +Le(i2860,cr151_0). +Le(i2870,cr151_0). +Le(i2940,cr151_0). +Le(i2970,cr151_0). +Le(i3010,cr151_0). +Le(i3040,cr151_0). +Le(i3080,cr151_0). +Le(i3120,cr151_0). +Le(i3150,cr151_0). +Le(i3220,cr151_0). +Le(i3260,cr151_0). +Le(i3290,cr151_0). +Le(i3300,cr151_0). +Le(i3330,cr151_0). +Le(i3400,cr151_0). +Le(i3430,cr151_0). +Le(i3500,cr151_0). +Le(i3520,cr151_0). +Le(i3580,cr151_0). +Le(i3610,cr151_0). +Le(i3650,cr151_0). +Le(i3680,cr151_0). +Le(i3720,cr151_0). +Le(i3740,cr151_0). +Le(i3790,cr151_0). +Le(i3820,cr151_0). +Le(i3860,cr151_0). +Le(i3960,cr151_0). +Le(i4040,cr151_0). +Le(i4140,cr151_0). +Le(i4180,cr151_0). +Le(i4400,cr151_0). +Le(i4620,cr151_0). +Le(i4840,cr151_0). +Le(i5060,cr151_0). +Le(i5280,cr151_0). +Le(i5500,cr151_0). +Le(i5720,cr151_0). +Le(i5940,cr151_0). +Le(i6160,cr151_0). +Le(i6380,cr151_0). +Le(i6600,cr151_0). +Le(i6820,cr151_0). +Le(i7040,cr151_0). +Le(i7260,cr151_0). +Le(i7480,cr151_0). +Le(cr151_0,i8140). +Le(cr151_0,i8360). +Le(cr151_0,i8580). +Eq(i7920,i7920). +Le(i7920,cr152_0). +Le(cr152_0,i8140). +Le(i-30,cr152_0). +Le(i0,cr152_0). +Le(i13,cr152_0). +Le(i26,cr152_0). +Le(i39,cr152_0). +Le(i52,cr152_0). +Le(i60,cr152_0). +Le(i65,cr152_0). +Le(i70,cr152_0). +Le(i78,cr152_0). +Le(i90,cr152_0). +Le(i91,cr152_0). +Le(i104,cr152_0). +Le(i117,cr152_0). +Le(i130,cr152_0). +Le(i143,cr152_0). +Le(i156,cr152_0). +Le(i169,cr152_0). +Le(i182,cr152_0). +Le(i195,cr152_0). +Le(i208,cr152_0). +Le(i221,cr152_0). +Le(i234,cr152_0). +Le(i247,cr152_0). +Le(i260,cr152_0). +Le(i460,cr152_0). +Le(i530,cr152_0). +Le(i600,cr152_0). +Le(i660,cr152_0). +Le(i670,cr152_0). +Le(i710,cr152_0). +Le(i740,cr152_0). +Le(i810,cr152_0). +Le(i850,cr152_0). +Le(i880,cr152_0). +Le(i890,cr152_0). +Le(i920,cr152_0). +Le(i960,cr152_0). +Le(i990,cr152_0). +Le(i1030,cr152_0). +Le(i1060,cr152_0). +Le(i1100,cr152_0). +Le(i1130,cr152_0). +Le(i1170,cr152_0). +Le(i1200,cr152_0). +Le(i1240,cr152_0). +Le(i1260,cr152_0). +Le(i1270,cr152_0). +Le(i1290,cr152_0). +Le(i1310,cr152_0). +Le(i1320,cr152_0). +Le(i1330,cr152_0). +Le(i1350,cr152_0). +Le(i1360,cr152_0). +Le(i1380,cr152_0). +Le(i1390,cr152_0). +Le(i1420,cr152_0). +Le(i1430,cr152_0). +Le(i1450,cr152_0). +Le(i1460,cr152_0). +Le(i1490,cr152_0). +Le(i1520,cr152_0). +Le(i1530,cr152_0). +Le(i1540,cr152_0). +Le(i1560,cr152_0). +Le(i1590,cr152_0). +Le(i1630,cr152_0). +Le(i1660,cr152_0). +Le(i1700,cr152_0). +Le(i1730,cr152_0). +Le(i1760,cr152_0). +Le(i1770,cr152_0). +Le(i1810,cr152_0). +Le(i1840,cr152_0). +Le(i1880,cr152_0). +Le(i1910,cr152_0). +Le(i1950,cr152_0). +Le(i1980,cr152_0). +Le(i2020,cr152_0). +Le(i2050,cr152_0). +Le(i2090,cr152_0). +Le(i2120,cr152_0). +Le(i2160,cr152_0). +Le(i2190,cr152_0). +Le(i2200,cr152_0). +Le(i2230,cr152_0). +Le(i2270,cr152_0). +Le(i2300,cr152_0). +Le(i2340,cr152_0). +Le(i2370,cr152_0). +Le(i2410,cr152_0). +Le(i2420,cr152_0). +Le(i2440,cr152_0). +Le(i2480,cr152_0). +Le(i2510,cr152_0). +Le(i2550,cr152_0). +Le(i2580,cr152_0). +Le(i2620,cr152_0). +Le(i2640,cr152_0). +Le(i2660,cr152_0). +Le(i2730,cr152_0). +Le(i2760,cr152_0). +Le(i2800,cr152_0). +Le(i2830,cr152_0). +Le(i2860,cr152_0). +Le(i2870,cr152_0). +Le(i2940,cr152_0). +Le(i2970,cr152_0). +Le(i3010,cr152_0). +Le(i3040,cr152_0). +Le(i3080,cr152_0). +Le(i3120,cr152_0). +Le(i3150,cr152_0). +Le(i3220,cr152_0). +Le(i3260,cr152_0). +Le(i3290,cr152_0). +Le(i3300,cr152_0). +Le(i3330,cr152_0). +Le(i3400,cr152_0). +Le(i3430,cr152_0). +Le(i3500,cr152_0). +Le(i3520,cr152_0). +Le(i3580,cr152_0). +Le(i3610,cr152_0). +Le(i3650,cr152_0). +Le(i3680,cr152_0). +Le(i3720,cr152_0). +Le(i3740,cr152_0). +Le(i3790,cr152_0). +Le(i3820,cr152_0). +Le(i3860,cr152_0). +Le(i3960,cr152_0). +Le(i4040,cr152_0). +Le(i4140,cr152_0). +Le(i4180,cr152_0). +Le(i4400,cr152_0). +Le(i4620,cr152_0). +Le(i4840,cr152_0). +Le(i5060,cr152_0). +Le(i5280,cr152_0). +Le(i5500,cr152_0). +Le(i5720,cr152_0). +Le(i5940,cr152_0). +Le(i6160,cr152_0). +Le(i6380,cr152_0). +Le(i6600,cr152_0). +Le(i6820,cr152_0). +Le(i7040,cr152_0). +Le(i7260,cr152_0). +Le(i7480,cr152_0). +Le(i7700,cr152_0). +Le(cr152_0,i8360). +Le(cr152_0,i8580). +Eq(i8140,i8140). +Le(i8140,cr153_0). +Le(cr153_0,i8360). +Le(i-30,cr153_0). +Le(i0,cr153_0). +Le(i13,cr153_0). +Le(i26,cr153_0). +Le(i39,cr153_0). +Le(i52,cr153_0). +Le(i60,cr153_0). +Le(i65,cr153_0). +Le(i70,cr153_0). +Le(i78,cr153_0). +Le(i90,cr153_0). +Le(i91,cr153_0). +Le(i104,cr153_0). +Le(i117,cr153_0). +Le(i130,cr153_0). +Le(i143,cr153_0). +Le(i156,cr153_0). +Le(i169,cr153_0). +Le(i182,cr153_0). +Le(i195,cr153_0). +Le(i208,cr153_0). +Le(i221,cr153_0). +Le(i234,cr153_0). +Le(i247,cr153_0). +Le(i260,cr153_0). +Le(i460,cr153_0). +Le(i530,cr153_0). +Le(i600,cr153_0). +Le(i660,cr153_0). +Le(i670,cr153_0). +Le(i710,cr153_0). +Le(i740,cr153_0). +Le(i810,cr153_0). +Le(i850,cr153_0). +Le(i880,cr153_0). +Le(i890,cr153_0). +Le(i920,cr153_0). +Le(i960,cr153_0). +Le(i990,cr153_0). +Le(i1030,cr153_0). +Le(i1060,cr153_0). +Le(i1100,cr153_0). +Le(i1130,cr153_0). +Le(i1170,cr153_0). +Le(i1200,cr153_0). +Le(i1240,cr153_0). +Le(i1260,cr153_0). +Le(i1270,cr153_0). +Le(i1290,cr153_0). +Le(i1310,cr153_0). +Le(i1320,cr153_0). +Le(i1330,cr153_0). +Le(i1350,cr153_0). +Le(i1360,cr153_0). +Le(i1380,cr153_0). +Le(i1390,cr153_0). +Le(i1420,cr153_0). +Le(i1430,cr153_0). +Le(i1450,cr153_0). +Le(i1460,cr153_0). +Le(i1490,cr153_0). +Le(i1520,cr153_0). +Le(i1530,cr153_0). +Le(i1540,cr153_0). +Le(i1560,cr153_0). +Le(i1590,cr153_0). +Le(i1630,cr153_0). +Le(i1660,cr153_0). +Le(i1700,cr153_0). +Le(i1730,cr153_0). +Le(i1760,cr153_0). +Le(i1770,cr153_0). +Le(i1810,cr153_0). +Le(i1840,cr153_0). +Le(i1880,cr153_0). +Le(i1910,cr153_0). +Le(i1950,cr153_0). +Le(i1980,cr153_0). +Le(i2020,cr153_0). +Le(i2050,cr153_0). +Le(i2090,cr153_0). +Le(i2120,cr153_0). +Le(i2160,cr153_0). +Le(i2190,cr153_0). +Le(i2200,cr153_0). +Le(i2230,cr153_0). +Le(i2270,cr153_0). +Le(i2300,cr153_0). +Le(i2340,cr153_0). +Le(i2370,cr153_0). +Le(i2410,cr153_0). +Le(i2420,cr153_0). +Le(i2440,cr153_0). +Le(i2480,cr153_0). +Le(i2510,cr153_0). +Le(i2550,cr153_0). +Le(i2580,cr153_0). +Le(i2620,cr153_0). +Le(i2640,cr153_0). +Le(i2660,cr153_0). +Le(i2730,cr153_0). +Le(i2760,cr153_0). +Le(i2800,cr153_0). +Le(i2830,cr153_0). +Le(i2860,cr153_0). +Le(i2870,cr153_0). +Le(i2940,cr153_0). +Le(i2970,cr153_0). +Le(i3010,cr153_0). +Le(i3040,cr153_0). +Le(i3080,cr153_0). +Le(i3120,cr153_0). +Le(i3150,cr153_0). +Le(i3220,cr153_0). +Le(i3260,cr153_0). +Le(i3290,cr153_0). +Le(i3300,cr153_0). +Le(i3330,cr153_0). +Le(i3400,cr153_0). +Le(i3430,cr153_0). +Le(i3500,cr153_0). +Le(i3520,cr153_0). +Le(i3580,cr153_0). +Le(i3610,cr153_0). +Le(i3650,cr153_0). +Le(i3680,cr153_0). +Le(i3720,cr153_0). +Le(i3740,cr153_0). +Le(i3790,cr153_0). +Le(i3820,cr153_0). +Le(i3860,cr153_0). +Le(i3960,cr153_0). +Le(i4040,cr153_0). +Le(i4140,cr153_0). +Le(i4180,cr153_0). +Le(i4400,cr153_0). +Le(i4620,cr153_0). +Le(i4840,cr153_0). +Le(i5060,cr153_0). +Le(i5280,cr153_0). +Le(i5500,cr153_0). +Le(i5720,cr153_0). +Le(i5940,cr153_0). +Le(i6160,cr153_0). +Le(i6380,cr153_0). +Le(i6600,cr153_0). +Le(i6820,cr153_0). +Le(i7040,cr153_0). +Le(i7260,cr153_0). +Le(i7480,cr153_0). +Le(i7700,cr153_0). +Le(i7920,cr153_0). +Le(cr153_0,i8580). +Eq(i8360,i8360). +Le(i8360,cr154_0). +Le(cr154_0,i8580). +Le(i-30,cr154_0). +Le(i0,cr154_0). +Le(i13,cr154_0). +Le(i26,cr154_0). +Le(i39,cr154_0). +Le(i52,cr154_0). +Le(i60,cr154_0). +Le(i65,cr154_0). +Le(i70,cr154_0). +Le(i78,cr154_0). +Le(i90,cr154_0). +Le(i91,cr154_0). +Le(i104,cr154_0). +Le(i117,cr154_0). +Le(i130,cr154_0). +Le(i143,cr154_0). +Le(i156,cr154_0). +Le(i169,cr154_0). +Le(i182,cr154_0). +Le(i195,cr154_0). +Le(i208,cr154_0). +Le(i221,cr154_0). +Le(i234,cr154_0). +Le(i247,cr154_0). +Le(i260,cr154_0). +Le(i460,cr154_0). +Le(i530,cr154_0). +Le(i600,cr154_0). +Le(i660,cr154_0). +Le(i670,cr154_0). +Le(i710,cr154_0). +Le(i740,cr154_0). +Le(i810,cr154_0). +Le(i850,cr154_0). +Le(i880,cr154_0). +Le(i890,cr154_0). +Le(i920,cr154_0). +Le(i960,cr154_0). +Le(i990,cr154_0). +Le(i1030,cr154_0). +Le(i1060,cr154_0). +Le(i1100,cr154_0). +Le(i1130,cr154_0). +Le(i1170,cr154_0). +Le(i1200,cr154_0). +Le(i1240,cr154_0). +Le(i1260,cr154_0). +Le(i1270,cr154_0). +Le(i1290,cr154_0). +Le(i1310,cr154_0). +Le(i1320,cr154_0). +Le(i1330,cr154_0). +Le(i1350,cr154_0). +Le(i1360,cr154_0). +Le(i1380,cr154_0). +Le(i1390,cr154_0). +Le(i1420,cr154_0). +Le(i1430,cr154_0). +Le(i1450,cr154_0). +Le(i1460,cr154_0). +Le(i1490,cr154_0). +Le(i1520,cr154_0). +Le(i1530,cr154_0). +Le(i1540,cr154_0). +Le(i1560,cr154_0). +Le(i1590,cr154_0). +Le(i1630,cr154_0). +Le(i1660,cr154_0). +Le(i1700,cr154_0). +Le(i1730,cr154_0). +Le(i1760,cr154_0). +Le(i1770,cr154_0). +Le(i1810,cr154_0). +Le(i1840,cr154_0). +Le(i1880,cr154_0). +Le(i1910,cr154_0). +Le(i1950,cr154_0). +Le(i1980,cr154_0). +Le(i2020,cr154_0). +Le(i2050,cr154_0). +Le(i2090,cr154_0). +Le(i2120,cr154_0). +Le(i2160,cr154_0). +Le(i2190,cr154_0). +Le(i2200,cr154_0). +Le(i2230,cr154_0). +Le(i2270,cr154_0). +Le(i2300,cr154_0). +Le(i2340,cr154_0). +Le(i2370,cr154_0). +Le(i2410,cr154_0). +Le(i2420,cr154_0). +Le(i2440,cr154_0). +Le(i2480,cr154_0). +Le(i2510,cr154_0). +Le(i2550,cr154_0). +Le(i2580,cr154_0). +Le(i2620,cr154_0). +Le(i2640,cr154_0). +Le(i2660,cr154_0). +Le(i2730,cr154_0). +Le(i2760,cr154_0). +Le(i2800,cr154_0). +Le(i2830,cr154_0). +Le(i2860,cr154_0). +Le(i2870,cr154_0). +Le(i2940,cr154_0). +Le(i2970,cr154_0). +Le(i3010,cr154_0). +Le(i3040,cr154_0). +Le(i3080,cr154_0). +Le(i3120,cr154_0). +Le(i3150,cr154_0). +Le(i3220,cr154_0). +Le(i3260,cr154_0). +Le(i3290,cr154_0). +Le(i3300,cr154_0). +Le(i3330,cr154_0). +Le(i3400,cr154_0). +Le(i3430,cr154_0). +Le(i3500,cr154_0). +Le(i3520,cr154_0). +Le(i3580,cr154_0). +Le(i3610,cr154_0). +Le(i3650,cr154_0). +Le(i3680,cr154_0). +Le(i3720,cr154_0). +Le(i3740,cr154_0). +Le(i3790,cr154_0). +Le(i3820,cr154_0). +Le(i3860,cr154_0). +Le(i3960,cr154_0). +Le(i4040,cr154_0). +Le(i4140,cr154_0). +Le(i4180,cr154_0). +Le(i4400,cr154_0). +Le(i4620,cr154_0). +Le(i4840,cr154_0). +Le(i5060,cr154_0). +Le(i5280,cr154_0). +Le(i5500,cr154_0). +Le(i5720,cr154_0). +Le(i5940,cr154_0). +Le(i6160,cr154_0). +Le(i6380,cr154_0). +Le(i6600,cr154_0). +Le(i6820,cr154_0). +Le(i7040,cr154_0). +Le(i7260,cr154_0). +Le(i7480,cr154_0). +Le(i7700,cr154_0). +Le(i7920,cr154_0). +Le(i8140,cr154_0). +Eq(i8580,i8580). +Le(i8580,cr155_0). +Le(i-30,cr155_0). +Le(i0,cr155_0). +Le(i13,cr155_0). +Le(i26,cr155_0). +Le(i39,cr155_0). +Le(i52,cr155_0). +Le(i60,cr155_0). +Le(i65,cr155_0). +Le(i70,cr155_0). +Le(i78,cr155_0). +Le(i90,cr155_0). +Le(i91,cr155_0). +Le(i104,cr155_0). +Le(i117,cr155_0). +Le(i130,cr155_0). +Le(i143,cr155_0). +Le(i156,cr155_0). +Le(i169,cr155_0). +Le(i182,cr155_0). +Le(i195,cr155_0). +Le(i208,cr155_0). +Le(i221,cr155_0). +Le(i234,cr155_0). +Le(i247,cr155_0). +Le(i260,cr155_0). +Le(i460,cr155_0). +Le(i530,cr155_0). +Le(i600,cr155_0). +Le(i660,cr155_0). +Le(i670,cr155_0). +Le(i710,cr155_0). +Le(i740,cr155_0). +Le(i810,cr155_0). +Le(i850,cr155_0). +Le(i880,cr155_0). +Le(i890,cr155_0). +Le(i920,cr155_0). +Le(i960,cr155_0). +Le(i990,cr155_0). +Le(i1030,cr155_0). +Le(i1060,cr155_0). +Le(i1100,cr155_0). +Le(i1130,cr155_0). +Le(i1170,cr155_0). +Le(i1200,cr155_0). +Le(i1240,cr155_0). +Le(i1260,cr155_0). +Le(i1270,cr155_0). +Le(i1290,cr155_0). +Le(i1310,cr155_0). +Le(i1320,cr155_0). +Le(i1330,cr155_0). +Le(i1350,cr155_0). +Le(i1360,cr155_0). +Le(i1380,cr155_0). +Le(i1390,cr155_0). +Le(i1420,cr155_0). +Le(i1430,cr155_0). +Le(i1450,cr155_0). +Le(i1460,cr155_0). +Le(i1490,cr155_0). +Le(i1520,cr155_0). +Le(i1530,cr155_0). +Le(i1540,cr155_0). +Le(i1560,cr155_0). +Le(i1590,cr155_0). +Le(i1630,cr155_0). +Le(i1660,cr155_0). +Le(i1700,cr155_0). +Le(i1730,cr155_0). +Le(i1760,cr155_0). +Le(i1770,cr155_0). +Le(i1810,cr155_0). +Le(i1840,cr155_0). +Le(i1880,cr155_0). +Le(i1910,cr155_0). +Le(i1950,cr155_0). +Le(i1980,cr155_0). +Le(i2020,cr155_0). +Le(i2050,cr155_0). +Le(i2090,cr155_0). +Le(i2120,cr155_0). +Le(i2160,cr155_0). +Le(i2190,cr155_0). +Le(i2200,cr155_0). +Le(i2230,cr155_0). +Le(i2270,cr155_0). +Le(i2300,cr155_0). +Le(i2340,cr155_0). +Le(i2370,cr155_0). +Le(i2410,cr155_0). +Le(i2420,cr155_0). +Le(i2440,cr155_0). +Le(i2480,cr155_0). +Le(i2510,cr155_0). +Le(i2550,cr155_0). +Le(i2580,cr155_0). +Le(i2620,cr155_0). +Le(i2640,cr155_0). +Le(i2660,cr155_0). +Le(i2730,cr155_0). +Le(i2760,cr155_0). +Le(i2800,cr155_0). +Le(i2830,cr155_0). +Le(i2860,cr155_0). +Le(i2870,cr155_0). +Le(i2940,cr155_0). +Le(i2970,cr155_0). +Le(i3010,cr155_0). +Le(i3040,cr155_0). +Le(i3080,cr155_0). +Le(i3120,cr155_0). +Le(i3150,cr155_0). +Le(i3220,cr155_0). +Le(i3260,cr155_0). +Le(i3290,cr155_0). +Le(i3300,cr155_0). +Le(i3330,cr155_0). +Le(i3400,cr155_0). +Le(i3430,cr155_0). +Le(i3500,cr155_0). +Le(i3520,cr155_0). +Le(i3580,cr155_0). +Le(i3610,cr155_0). +Le(i3650,cr155_0). +Le(i3680,cr155_0). +Le(i3720,cr155_0). +Le(i3740,cr155_0). +Le(i3790,cr155_0). +Le(i3820,cr155_0). +Le(i3860,cr155_0). +Le(i3960,cr155_0). +Le(i4040,cr155_0). +Le(i4140,cr155_0). +Le(i4180,cr155_0). +Le(i4400,cr155_0). +Le(i4620,cr155_0). +Le(i4840,cr155_0). +Le(i5060,cr155_0). +Le(i5280,cr155_0). +Le(i5500,cr155_0). +Le(i5720,cr155_0). +Le(i5940,cr155_0). +Le(i6160,cr155_0). +Le(i6380,cr155_0). +Le(i6600,cr155_0). +Le(i6820,cr155_0). +Le(i7040,cr155_0). +Le(i7260,cr155_0). +Le(i7480,cr155_0). +Le(i7700,cr155_0). +Le(i7920,cr155_0). +Le(i8140,cr155_0). +Le(i8360,cr155_0). +Leq(?x0,?x1) :- Le(?x0,?x1). +Leq(?x0,?x1) :- Eq(?x0,?x1). +Goal(cg) :- ResultDeg(i0),ResultDeg(cr2_0),ResultDeg(i13),ResultDeg(cr3_0),ResultDeg(i26),ResultDeg(cr4_0),ResultDeg(i39),ResultDeg(cr5_0),ResultDeg(i52),ResultDeg(cr6_0),ResultDeg(i60),ResultDeg(cr7_0),ResultDeg(i65),ResultDeg(cr8_0),ResultDeg(i70),ResultDeg(cr9_0),ResultDeg(i78),ResultDeg(cr10_0),ResultDeg(i90),ResultDeg(cr11_0),ResultDeg(i91),ResultDeg(cr12_0),ResultDeg(i104),ResultDeg(cr13_0),ResultDeg(i117),ResultDeg(cr14_0),ResultDeg(i130),ResultDeg(cr15_0),ResultDeg(i143),ResultDeg(cr16_0),ResultDeg(i156),ResultDeg(cr17_0),ResultDeg(i169),ResultDeg(cr18_0),ResultDeg(i182),ResultDeg(cr19_0),ResultDeg(i195),ResultDeg(cr20_0),ResultDeg(i208),ResultDeg(cr21_0),ResultDeg(i221),ResultDeg(cr22_0),ResultDeg(i234),ResultDeg(cr23_0),ResultDeg(i247),ResultDeg(cr24_0),ResultDeg(i260),ResultDeg(cr25_0),ResultDeg(i460),ResultDeg(cr26_0),ResultDeg(i530),ResultDeg(cr27_0),ResultDeg(i600),ResultDeg(cr28_0),ResultDeg(i660),ResultDeg(cr29_0),ResultDeg(i670),ResultDeg(cr30_0),ResultDeg(i710),ResultDeg(cr31_0),ResultDeg(i740),ResultDeg(cr32_0),ResultDeg(i810),ResultDeg(cr33_0),ResultDeg(i850),ResultDeg(cr34_0),ResultDeg(i880),ResultDeg(cr35_0),ResultDeg(i890),ResultDeg(cr36_0),ResultDeg(i920),ResultDeg(cr37_0),ResultDeg(i960),ResultDeg(cr38_0),ResultDeg(i990),ResultDeg(cr39_0),ResultDeg(i1030),ResultDeg(cr40_0),ResultDeg(i1060),ResultDeg(cr41_0),ResultDeg(i1100),ResultDeg(cr42_0),ResultDeg(i1130),ResultDeg(cr43_0),ResultDeg(i1170),ResultDeg(cr44_0),ResultDeg(i1200),ResultDeg(cr45_0),ResultDeg(i1240),ResultDeg(cr46_0),ResultDeg(i1260),ResultDeg(cr47_0),ResultDeg(i1270),ResultDeg(cr48_0),ResultDeg(i1290),ResultDeg(cr49_0),ResultDeg(i1310),ResultDeg(cr50_0),ResultDeg(i1320),ResultDeg(cr51_0),ResultDeg(i1330),ResultDeg(cr52_0),ResultDeg(i1350),ResultDeg(cr53_0),ResultDeg(i1360),ResultDeg(cr54_0),ResultDeg(i1380),ResultDeg(cr55_0),ResultDeg(i1390),ResultDeg(cr56_0),ResultDeg(i1420),ResultDeg(cr57_0),ResultDeg(i1430),ResultDeg(cr58_0),ResultDeg(i1450),ResultDeg(cr59_0),ResultDeg(i1460),ResultDeg(cr60_0),ResultDeg(i1490),ResultDeg(cr61_0),ResultDeg(i1520),ResultDeg(cr62_0),ResultDeg(i1530),ResultDeg(cr63_0),ResultDeg(i1540),ResultDeg(cr64_0),ResultDeg(i1560),ResultDeg(cr65_0),ResultDeg(i1590),ResultDeg(cr66_0),ResultDeg(i1630),ResultDeg(cr67_0),ResultDeg(i1660),ResultDeg(cr68_0),ResultDeg(i1700),ResultDeg(cr69_0),ResultDeg(i1730),ResultDeg(cr70_0),ResultDeg(i1760),ResultDeg(cr71_0),ResultDeg(i1770),ResultDeg(cr72_0),ResultDeg(i1810),ResultDeg(cr73_0),ResultDeg(i1840),ResultDeg(cr74_0),ResultDeg(i1880),ResultDeg(cr75_0),ResultDeg(i1910),ResultDeg(cr76_0),ResultDeg(i1950),ResultDeg(cr77_0),ResultDeg(i1980),ResultDeg(cr78_0),ResultDeg(i2020),ResultDeg(cr79_0),ResultDeg(i2050),ResultDeg(cr80_0),ResultDeg(i2090),ResultDeg(cr81_0),ResultDeg(i2120),ResultDeg(cr82_0),ResultDeg(i2160),ResultDeg(cr83_0),ResultDeg(i2190),ResultDeg(cr84_0),ResultDeg(i2200),ResultDeg(cr85_0),ResultDeg(i2230),ResultDeg(cr86_0),ResultDeg(i2270),ResultDeg(cr87_0),ResultDeg(i2300),ResultDeg(cr88_0),ResultDeg(i2340),ResultDeg(cr89_0),ResultDeg(i2370),ResultDeg(cr90_0),ResultDeg(i2410),ResultDeg(cr91_0),ResultDeg(i2420),ResultDeg(cr92_0),ResultDeg(i2440),ResultDeg(cr93_0),ResultDeg(i2480),ResultDeg(cr94_0),ResultDeg(i2510),ResultDeg(cr95_0),ResultDeg(i2550),ResultDeg(cr96_0),ResultDeg(i2580),ResultDeg(cr97_0),ResultDeg(i2620),ResultDeg(cr98_0),ResultDeg(i2640),ResultDeg(cr99_0),ResultDeg(i2660),ResultDeg(cr100_0),ResultDeg(i2730),ResultDeg(cr101_0),ResultDeg(i2760),ResultDeg(cr102_0),ResultDeg(i2800),ResultDeg(cr103_0),ResultDeg(i2830),ResultDeg(cr104_0),ResultDeg(i2860),ResultDeg(cr105_0),ResultDeg(i2870),ResultDeg(cr106_0),ResultDeg(i2940),ResultDeg(cr107_0),ResultDeg(i2970),ResultDeg(cr108_0),ResultDeg(i3010),ResultDeg(cr109_0),ResultDeg(i3040),ResultDeg(cr110_0),ResultDeg(i3080),ResultDeg(cr111_0),ResultDeg(i3120),ResultDeg(cr112_0),ResultDeg(i3150),ResultDeg(cr113_0),ResultDeg(i3220),ResultDeg(cr114_0),ResultDeg(i3260),ResultDeg(cr115_0),ResultDeg(i3290),ResultDeg(cr116_0),ResultDeg(i3300),ResultDeg(cr117_0),ResultDeg(i3330),ResultDeg(cr118_0),ResultDeg(i3400),ResultDeg(cr119_0),ResultDeg(i3430),ResultDeg(cr120_0),ResultDeg(i3500),ResultDeg(cr121_0),ResultDeg(i3520),ResultDeg(cr122_0),ResultDeg(i3580),ResultDeg(cr123_0),ResultDeg(i3610),ResultDeg(cr124_0),ResultDeg(i3650),ResultDeg(cr125_0),ResultDeg(i3680),ResultDeg(cr126_0),ResultDeg(i3720),ResultDeg(cr127_0),ResultDeg(i3740),ResultDeg(cr128_0),ResultDeg(i3790),ResultDeg(cr129_0),ResultDeg(i3820),ResultDeg(cr130_0),ResultDeg(i3860),ResultDeg(cr131_0),ResultDeg(i3960),ResultDeg(cr132_0),ResultDeg(i4040),ResultDeg(cr133_0),ResultDeg(i4140),ResultDeg(cr134_0),ResultDeg(i4180),ResultDeg(cr135_0),ResultDeg(i4400),ResultDeg(cr136_0),ResultDeg(i4620),ResultDeg(cr137_0),ResultDeg(i4840),ResultDeg(cr138_0),ResultDeg(i5060),ResultDeg(cr139_0),ResultDeg(i5280),ResultDeg(cr140_0),ResultDeg(i5500),ResultDeg(cr141_0),ResultDeg(i5720),ResultDeg(cr142_0),ResultDeg(i5940),ResultDeg(cr143_0),ResultDeg(i6160),ResultDeg(cr144_0),ResultDeg(i6380),ResultDeg(cr145_0),ResultDeg(i6600),ResultDeg(cr146_0),ResultDeg(i6820),ResultDeg(cr147_0),ResultDeg(i7040),ResultDeg(cr148_0),ResultDeg(i7260),ResultDeg(cr149_0),ResultDeg(i7480),ResultDeg(cr150_0),ResultDeg(i7700),ResultDeg(cr151_0),ResultDeg(i7920),ResultDeg(cr152_0),ResultDeg(i8140),ResultDeg(cr153_0),ResultDeg(i8360),ResultDeg(cr154_0),ResultDeg(i8580). diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/72-expected.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/72-expected.rls new file mode 100644 index 000000000..5cdcf25fe --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/72-expected.rls @@ -0,0 +1,8 @@ +Le(i1500, i2020) . +ResultDeg(i0, i0, i0, i0, i0, i0, i2020) . + +Le(i1500, i2050) . +ResultDeg(i0, i660, i0, i0, i660, i0, i2050) . + +Goal(i2020) . +Goal(i2050) . \ No newline at end of file diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/72.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/72.rls new file mode 100644 index 000000000..deee76c7d --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/72.rls @@ -0,0 +1,7 @@ +Le(i1500, i2020) . +ResultDeg(i0, i0, i0, i0, i0, i0, i2020) . + +Le(i1500, i2050) . +ResultDeg(i0, i660, i0, i0, i660, i0, i2050) . + +Goal(?x0) :- Le(i1500,?x0),ResultDeg(?x1,?x2,?x3,?x4,?x5,?x3,?x0). diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/73/73-nt.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/73/73-nt.rls new file mode 100644 index 000000000..3cc000096 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/73/73-nt.rls @@ -0,0 +1 @@ + join(?x):- triple(?y, ?prop1, ?x), triple(?y, ?prop2, ?x) . \ No newline at end of file diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/73/73.nt b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/73/73.nt new file mode 100644 index 000000000..92a47e4b1 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/73/73.nt @@ -0,0 +1,2 @@ + "foo"^^ . + "foo" . \ No newline at end of file diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/73/73.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/73/73.rls new file mode 100644 index 000000000..9f2bbf3dc --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/73/73.rls @@ -0,0 +1 @@ + join(?x):- long(?x), short(?x) . \ No newline at end of file diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/73/long.csv b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/73/long.csv new file mode 100644 index 000000000..70422d5fb --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/73/long.csv @@ -0,0 +1 @@ +"""foo""^^" diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/73/short.csv b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/73/short.csv new file mode 100644 index 000000000..6d3c1bc67 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/73/short.csv @@ -0,0 +1 @@ +"""foo""" diff --git a/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/98.rls b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/98.rls new file mode 100644 index 000000000..2ed5eee27 --- /dev/null +++ b/rulewerk-integrationtests/src/test/resources/vlogissues/vlog/98.rls @@ -0,0 +1,22 @@ +tooth(?X) :- molar_tooth(?X) . +part_of(?X, !T), tooth(!T) :- crown(?X) . +part_of(?X, !T), crown(!T) :- cingulum(?X) . + +crown(?X), part_of(?X, !T), molar_tooth(!T) :- molar_crown(?X) . +molar_crown(?X) :- crown(?X), part_of(?X, ?T), molar_tooth(?T) . + +cingulum(?X), part_of(?X, !T), molar_tooth(!T) :- molar_cingulum(?X) . +molar_cingulum(?X) :- cingulum(?X), part_of(?X, ?T), molar_tooth(?T) . + +sameAs(?X, ?Y) :- cingulum(?C), tooth(?X), part_of(?C, ?X), tooth(?Y), part_of(?C, ?Y) . +part_of(?A, ?Z) :- part_of(?A, ?Y), sameAs(?Y, ?Z) . +sameAs(?Y, ?X) :- sameAs(?X, ?Y) . + +part_of(?X, ?Z) :- part_of(?X, ?Y), part_of(?Y, ?Z) . + +part_of_tooth(?X) :- tooth(?C), part_of(?X, ?C) . +part_of_molar_crown(?X) :- molar_crown(?C), part_of(?X, ?C) . +part_of_crown(?X) :- crown(?C), part_of(?X, ?C) . + +molar_cingulum(mc1) . +cingulum(mc2) . \ No newline at end of file diff --git a/rulewerk-owlapi/LICENSE.txt b/rulewerk-owlapi/LICENSE.txt new file mode 100644 index 000000000..261eeb9e9 --- /dev/null +++ b/rulewerk-owlapi/LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vlog4j-owlapi/pom.xml b/rulewerk-owlapi/pom.xml similarity index 76% rename from vlog4j-owlapi/pom.xml rename to rulewerk-owlapi/pom.xml index 318cf8c96..5701a169a 100644 --- a/vlog4j-owlapi/pom.xml +++ b/rulewerk-owlapi/pom.xml @@ -1,36 +1,36 @@ - - - 4.0.0 - - - org.semanticweb.vlog4j - vlog4j-parent - 0.0.1 - - - vlog4j-owlapi - jar - - VLog4j OWL API Support - Bindings and utilities for working with OWL ontologies using the OWL API - - - - net.sourceforge.owlapi - owlapi-apibinding - ${owlapi.version} - - - net.sourceforge.owlapi - owlapi-api - ${owlapi.version} - - - ${project.groupId} - vlog4j-core - ${project.version} - - - - + + + 4.0.0 + + + org.semanticweb.rulewerk + rulewerk-parent + 0.10.0-SNAPSHOT + + + rulewerk-owlapi + jar + + Rulewerk OWL API Support + Bindings and utilities for working with OWL ontologies using the OWL API + + + + net.sourceforge.owlapi + owlapi-apibinding + ${owlapi.version} + + + net.sourceforge.owlapi + owlapi-api + ${owlapi.version} + + + ${project.groupId} + rulewerk-core + ${project.version} + + + + diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/AbstractClassToRuleConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/AbstractClassToRuleConverter.java similarity index 59% rename from vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/AbstractClassToRuleConverter.java rename to rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/AbstractClassToRuleConverter.java index a95902f04..3ca6b58de 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/AbstractClassToRuleConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/AbstractClassToRuleConverter.java @@ -1,17 +1,17 @@ -package org.semanticweb.vlog4j.owlapi; +package org.semanticweb.rulewerk.owlapi; /*- * #%L - * VLog4j OWL API Support + * Rulewerk OWL API Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -23,30 +23,29 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.List; import org.semanticweb.owlapi.model.OWLClassExpression; import org.semanticweb.owlapi.model.OWLClassExpressionVisitor; import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.AtomImpl; -import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; -import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; /** * Abstract base class for converters that create rules from OWL class * expressions. - * + * * @author Markus Krötzsch */ public abstract class AbstractClassToRuleConverter implements OWLClassExpressionVisitor { /** - * Helper class to represent a list of atoms, interpreted as a conjunction of - * (positive) atoms. An empty conjunction is "true" (the neutral element of + * Helper class to represent a list of literals, interpreted as a conjunction of + * (positive) literals. An empty conjunction is "true" (the neutral element of * conjunction). If the conjunction would become false due to some unsatisfiable * atom, this is recorded in {@link SimpleConjunction#unsatisfiable}. In this * case, the conjuncts should be ignored. A third relevant option for the head @@ -60,7 +59,7 @@ public abstract class AbstractClassToRuleConverter implements OWLClassExpression */ static class SimpleConjunction { - private List conjuncts; + private List conjuncts; private boolean unsatisfiable; /** @@ -73,19 +72,19 @@ public void init() { } } - public void add(Atom atom) { + public void add(final PositiveLiteral atom) { if (this.unsatisfiable) { return; } - init(); + this.init(); this.conjuncts.add(atom); } - public void add(List atoms) { + public void add(final List atoms) { if (this.unsatisfiable) { return; } - init(); + this.init(); this.conjuncts.addAll(atoms); } @@ -95,9 +94,9 @@ public void makeFalse() { /** * Returns true if this conjunction is true, i.e., if it is an empty conjunction - * (assuming that tautological atoms are never added). A true conjunction can - * become refutable when more atoms are added. - * + * (assuming that tautological literals are never added). A true conjunction can + * become refutable when more literals are added. + * * @return */ public boolean isTrue() { @@ -106,9 +105,9 @@ public boolean isTrue() { /** * Returns true if this conjunction is strongly false, i.e., if it contains an - * unsatisfiable atom. In this case, the actual atoms stored are not relevant. A - * false conjunction can not become true again. - * + * unsatisfiable atom. In this case, the actual literals stored are not + * relevant. A false conjunction can not become true again. + * * @return */ public boolean isFalse() { @@ -118,7 +117,7 @@ public boolean isFalse() { /** * Returns true if this object represents a conjunction at all (even an empty * one). - * + * * @return */ public boolean exists() { @@ -128,33 +127,33 @@ public boolean exists() { /** * Returns true if this object represents a conjunction that contains at least * one atom. For this it should be neither empty, nor false, nor true. - * + * * @return */ public boolean hasPositiveAtoms() { - return !this.unsatisfiable && this.conjuncts != null && !this.conjuncts.isEmpty(); + return !this.unsatisfiable && (this.conjuncts != null) && !this.conjuncts.isEmpty(); } - public List getConjuncts() { + public List getConjuncts() { return this.conjuncts; } /** * Returns true if the conjunction is false or empty. - * + * * @return */ public boolean isFalseOrEmpty() { - return this.conjuncts == null || this.unsatisfiable; + return (this.conjuncts == null) || this.unsatisfiable; } /** * Returns true if the conjunction is true or empty. - * + * * @return */ public boolean isTrueOrEmpty() { - return this.conjuncts == null || (this.conjuncts.isEmpty() && !this.unsatisfiable); + return (this.conjuncts == null) || (this.conjuncts.isEmpty() && !this.unsatisfiable); } } @@ -163,7 +162,7 @@ public boolean isTrueOrEmpty() { SimpleConjunction head; /** - * Current frontier variable used as the main variable for creating atoms. + * Current frontier variable used as the main variable for creating literals. */ final Term mainTerm; @@ -172,8 +171,8 @@ public boolean isTrueOrEmpty() { */ final OwlAxiomToRulesConverter parent; - public AbstractClassToRuleConverter(Term mainTerm, SimpleConjunction body, SimpleConjunction head, - OwlAxiomToRulesConverter parent) { + public AbstractClassToRuleConverter(final Term mainTerm, final SimpleConjunction body, final SimpleConjunction head, + final OwlAxiomToRulesConverter parent) { this.mainTerm = mainTerm; this.body = body; this.head = head; @@ -181,31 +180,31 @@ public AbstractClassToRuleConverter(Term mainTerm, SimpleConjunction body, Simpl } /** - * Returns true if the current rule is a tautology, i.e., has an unsatisfiable - * body or a tautological head. - * - * @return + * Check whether the current rule is a tautology. + * + * @return true if the current rule is a tautology, i.e., has an unsatisfiable + * body or a tautological head. */ public boolean isTautology() { return this.body.isFalse() || this.head.isTrue(); } /** - * Returns true if the current rule represents a falsity, i.e., has a - * tautological (or non-existent) body and an unsatisfiable (or no-existent) - * head. - * - * @return + * Checks whether the current rule is a falsity. + * + * @return true if the current rule represents a falsity, i.e., has a + * tautological (or non-existent) body and an unsatisfiable (or + * no-existent) head. */ public boolean isFalsity() { return this.body.isTrueOrEmpty() && this.head.isFalseOrEmpty(); } - void handleDisjunction(OWLClassExpression disjunct, Term term) { + void handleDisjunction(final OWLClassExpression disjunct, final Term term) { if (this.isTautology()) { return; } - AbstractClassToRuleConverter converter = makeChildConverter(term); + final AbstractClassToRuleConverter converter = this.makeChildConverter(term); disjunct.accept(converter); if (converter.isTautology()) { this.body.makeFalse(); @@ -226,12 +225,12 @@ void handleDisjunction(OWLClassExpression disjunct, Term term) { } } - void handleDisjunction(Collection disjuncts) { + void handleDisjunction(final Collection disjuncts) { OwlFeatureNotSupportedException owlFeatureNotSupportedException = null; - for (OWLClassExpression disjunct : disjuncts) { + for (final OWLClassExpression disjunct : disjuncts) { try { - handleDisjunction(disjunct, this.mainTerm); - } catch (OwlFeatureNotSupportedException e) { + this.handleDisjunction(disjunct, this.mainTerm); + } catch (final OwlFeatureNotSupportedException e) { owlFeatureNotSupportedException = e; } if (this.isTautology()) { @@ -244,12 +243,13 @@ void handleDisjunction(Collection disjuncts) { } } - void handleConjunction(Collection conjuncts, Term term) { - List converters = new ArrayList<>(); + void handleConjunction(final Collection conjuncts, final Term term) { + final List converters = new ArrayList<>(); OwlFeatureNotSupportedException owlFeatureNotSupportedException = null; boolean hasPositiveConjuncts = false; - for (OWLClassExpression conjunct : conjuncts) { - AbstractClassToRuleConverter converter = makeChildConverter(term); + + for (final OWLClassExpression conjunct : conjuncts) { + final AbstractClassToRuleConverter converter = this.makeChildConverter(term); try { conjunct.accept(converter); if (converter.isTautology()) { @@ -261,7 +261,7 @@ void handleConjunction(Collection conjuncts, Term term) { } hasPositiveConjuncts = hasPositiveConjuncts || converter.head.hasPositiveAtoms(); converters.add(converter); - } catch (OwlFeatureNotSupportedException e) { + } catch (final OwlFeatureNotSupportedException e) { owlFeatureNotSupportedException = e; } } @@ -275,25 +275,25 @@ void handleConjunction(Collection conjuncts, Term term) { return; } - Atom auxAtom = null; + PositiveLiteral auxAtom = null; if (hasPositiveConjuncts || this.head.hasPositiveAtoms()) { // make positive (head) auxiliary atom - for (AbstractClassToRuleConverter converter : converters) { - auxAtom = handlePositiveConjunct(converter, conjuncts, term, auxAtom); + for (final AbstractClassToRuleConverter converter : converters) { + auxAtom = this.handlePositiveConjunct(converter, conjuncts, term, auxAtom); } } else { // make negative (body) auxiliary atom - auxAtom = new AtomImpl(OwlToRulesConversionHelper.getAuxiliaryClassPredicate(conjuncts), - Arrays.asList(term)); + auxAtom = new PositiveLiteralImpl( + OwlToRulesConversionHelper.getConjunctionAuxiliaryClassPredicate(conjuncts), Arrays.asList(term)); this.body.add(auxAtom); - Conjunction auxHead = new ConjunctionImpl(Arrays.asList(auxAtom)); - for (AbstractClassToRuleConverter converter : converters) { + final List auxHead = Collections.singletonList(auxAtom); + for (final AbstractClassToRuleConverter converter : converters) { assert (converter.body.exists()); // else: falsity (empty body true, empty head false) - this.parent.rules.add(new RuleImpl(auxHead, new ConjunctionImpl(converter.body.getConjuncts()))); + this.parent.addAuxiliaryRule(auxHead, converter.body.getConjuncts(), term); } } } - private Atom handlePositiveConjunct(AbstractClassToRuleConverter converter, - Collection auxiliaryExpressions, Term term, Atom auxiliaryAtom) { + private PositiveLiteral handlePositiveConjunct(final AbstractClassToRuleConverter converter, + final Collection auxiliaryExpressions, final Term term, PositiveLiteral auxiliaryAtom) { assert (!converter.isFalsity()); assert (!converter.isTautology()); if (converter.body.isTrueOrEmpty()) { @@ -301,66 +301,59 @@ private Atom handlePositiveConjunct(AbstractClassToRuleConverter converter, this.head.add(converter.head.getConjuncts()); } else { assert (converter.body.exists()); // checked in if-branch - List newBody = new ArrayList<>(converter.body.getConjuncts().size() + 1); + final List newBody = new ArrayList<>(converter.body.getConjuncts().size() + 1); if (auxiliaryAtom == null) { - auxiliaryAtom = new AtomImpl( - OwlToRulesConversionHelper.getAuxiliaryClassPredicate(auxiliaryExpressions), + auxiliaryAtom = new PositiveLiteralImpl( + OwlToRulesConversionHelper.getConjunctionAuxiliaryClassPredicate(auxiliaryExpressions), Arrays.asList(term)); this.head.add(auxiliaryAtom); } newBody.add(auxiliaryAtom); newBody.addAll(converter.body.getConjuncts()); - List newHead; + List newHead; if (converter.head.hasPositiveAtoms()) { newHead = converter.head.getConjuncts(); } else { newHead = Arrays.asList(OwlToRulesConversionHelper.getBottom(term)); } - this.parent.rules.add(new RuleImpl(new ConjunctionImpl(newHead), new ConjunctionImpl(newBody))); + this.parent.addAuxiliaryRule(newHead, newBody, term); } return auxiliaryAtom; } /** * Handles a OWLObjectAllValues expression. - * - * @param property - * the OWL property of the expression - * @param filler - * the filler class of the expression + * + * @param property the OWL property of the expression + * @param filler the filler class of the expression */ - void handleObjectAllValues(OWLObjectPropertyExpression property, OWLClassExpression filler) { - Variable variable = this.parent.getFreshVariable(); - OwlToRulesConversionHelper.addConjunctForPropertyExpression(property, this.mainTerm, variable, - this.body); + void handleObjectAllValues(final OWLObjectPropertyExpression property, final OWLClassExpression filler) { + final Variable variable = this.parent.termFactory.getFreshUniversalVariable(); + OwlToRulesConversionHelper.addConjunctForPropertyExpression(property, this.mainTerm, variable, this.body); if (!this.body.isFalse()) { - handleDisjunction(filler, variable); + this.handleDisjunction(filler, variable); } } /** * Handles a OWLObjectSomeValues expression. - * - * @param property - * the OWL property of the expression - * @param filler - * the filler class of the expression + * + * @param property the OWL property of the expression + * @param filler the filler class of the expression */ - void handleObjectSomeValues(OWLObjectPropertyExpression property, OWLClassExpression filler) { - Variable variable = this.parent.getFreshVariable(); - OwlToRulesConversionHelper.addConjunctForPropertyExpression(property, this.mainTerm, variable, - this.head); + void handleObjectSomeValues(final OWLObjectPropertyExpression property, final OWLClassExpression filler) { + final Variable variable = this.parent.termFactory.getFreshExistentialVariable(); + OwlToRulesConversionHelper.addConjunctForPropertyExpression(property, this.mainTerm, variable, this.head); if (!this.head.isFalse()) { - handleConjunction(Arrays.asList(filler), variable); + this.handleConjunction(Arrays.asList(filler), variable); } } /** * Creates a new converter object of the same polarity, using the given frontier * variable. - * - * @param mainTerm - * a variable to use + * + * @param mainTerm a variable to use */ public abstract AbstractClassToRuleConverter makeChildConverter(Term mainTerm); diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleBodyConverter.java similarity index 59% rename from vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java rename to rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleBodyConverter.java index 3170b6559..d27c5a9d3 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleBodyConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleBodyConverter.java @@ -1,10 +1,10 @@ -package org.semanticweb.vlog4j.owlapi; +package org.semanticweb.rulewerk.owlapi; /*- * #%L - * VLog4j OWL API Support + * Rulewerk OWL API Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,6 +21,7 @@ */ import java.util.Arrays; +import java.util.stream.Collectors; import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.model.OWLClassExpressionVisitor; @@ -41,92 +42,91 @@ import org.semanticweb.owlapi.model.OWLObjectOneOf; import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; import org.semanticweb.owlapi.model.OWLObjectUnionOf; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.AtomImpl; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; /** * Helper class for transforming OWL class expressions that occur as subclasses - * into suitable body atoms for rules. Auxiliary rules might be created to + * into suitable body literals for rules. Auxiliary rules might be created to * capture the semantics of some constructs. - * + * * @author Markus Krötzsch * */ public class ClassToRuleBodyConverter extends AbstractClassToRuleConverter implements OWLClassExpressionVisitor { - public ClassToRuleBodyConverter(Term mainTerm, SimpleConjunction body, SimpleConjunction head, - OwlAxiomToRulesConverter parent) { + public ClassToRuleBodyConverter(final Term mainTerm, final SimpleConjunction body, final SimpleConjunction head, + final OwlAxiomToRulesConverter parent) { super(mainTerm, body, head, parent); } - public ClassToRuleBodyConverter(Term mainTerm, OwlAxiomToRulesConverter parent) { + public ClassToRuleBodyConverter(final Term mainTerm, final OwlAxiomToRulesConverter parent) { this(mainTerm, new SimpleConjunction(), new SimpleConjunction(), parent); } @Override - public AbstractClassToRuleConverter makeChildConverter(Term mainTerm) { + public AbstractClassToRuleConverter makeChildConverter(final Term mainTerm) { return new ClassToRuleBodyConverter(mainTerm, this.parent); } @Override - public void visit(OWLClass ce) { + public void visit(final OWLClass ce) { if (ce.isOWLNothing()) { this.body.makeFalse(); } else if (ce.isOWLThing()) { this.body.init(); } else { - Predicate predicate = OwlToRulesConversionHelper.getClassPredicate(ce); - this.body.add(new AtomImpl(predicate, Arrays.asList(this.mainTerm))); + final Predicate predicate = OwlToRulesConversionHelper.getClassPredicate(ce); + this.body.add(new PositiveLiteralImpl(predicate, Arrays.asList(this.mainTerm))); } } @Override - public void visit(OWLObjectIntersectionOf ce) { - handleDisjunction(ce.getOperands()); + public void visit(final OWLObjectIntersectionOf ce) { + this.handleDisjunction(ce.operands().collect(Collectors.toList())); } @Override - public void visit(OWLObjectUnionOf ce) { - handleConjunction(ce.getOperands(), this.mainTerm); + public void visit(final OWLObjectUnionOf ce) { + this.handleConjunction(ce.operands().collect(Collectors.toList()), this.mainTerm); } @Override - public void visit(OWLObjectComplementOf ce) { - ClassToRuleHeadConverter converter = new ClassToRuleHeadConverter(this.mainTerm, this.body, this.head, + public void visit(final OWLObjectComplementOf ce) { + final ClassToRuleHeadConverter converter = new ClassToRuleHeadConverter(this.mainTerm, this.body, this.head, this.parent); ce.getOperand().accept(converter); } @Override - public void visit(OWLObjectSomeValuesFrom ce) { - handleObjectAllValues(ce.getProperty(), ce.getFiller()); + public void visit(final OWLObjectSomeValuesFrom ce) { + this.handleObjectAllValues(ce.getProperty(), ce.getFiller()); } @Override - public void visit(OWLObjectAllValuesFrom ce) { - Variable variable = this.parent.getFreshVariable(); + public void visit(final OWLObjectAllValuesFrom ce) { + final Variable variable = this.parent.termFactory.getFreshExistentialVariable(); OwlToRulesConversionHelper.addConjunctForPropertyExpression(ce.getProperty(), this.mainTerm, variable, this.head); if (!this.head.isFalse()) { - handleConjunction(Arrays.asList(ce.getFiller()), variable); + this.handleConjunction(Arrays.asList(ce.getFiller()), variable); } } @Override - public void visit(OWLObjectHasValue ce) { - Term term = OwlToRulesConversionHelper.getIndividualTerm(ce.getFiller()); - OwlToRulesConversionHelper.addConjunctForPropertyExpression(ce.getProperty(), this.mainTerm, term, - this.body); + public void visit(final OWLObjectHasValue ce) { + final Term term = OwlToRulesConversionHelper.getIndividualTerm(ce.getFiller(), this.parent.termFactory); + OwlToRulesConversionHelper.addConjunctForPropertyExpression(ce.getProperty(), this.mainTerm, term, this.body); } @Override - public void visit(OWLObjectMinCardinality ce) { + public void visit(final OWLObjectMinCardinality ce) { if (ce.getCardinality() == 0) { this.body.init(); // tautological } else if (ce.getCardinality() == 1) { - handleObjectAllValues(ce.getProperty(), ce.getFiller()); + this.handleObjectAllValues(ce.getProperty(), ce.getFiller()); } else { throw new OwlFeatureNotSupportedException( "Min cardinality restrictions with values greater than 1 in subclass positions are not supported in rules."); @@ -134,56 +134,57 @@ public void visit(OWLObjectMinCardinality ce) { } @Override - public void visit(OWLObjectExactCardinality ce) { + public void visit(final OWLObjectExactCardinality ce) { throw new OwlFeatureNotSupportedException( "Exact cardinality restrictions in subclass positions are not supported in rules."); } @Override - public void visit(OWLObjectMaxCardinality ce) { + public void visit(final OWLObjectMaxCardinality ce) { throw new OwlFeatureNotSupportedException( "Maximal cardinality restrictions in subclass positions are not supported in rules."); } @Override - public void visit(OWLObjectHasSelf ce) { - OwlToRulesConversionHelper.addConjunctForPropertyExpression(ce.getProperty(), this.mainTerm, - this.mainTerm, this.body); + public void visit(final OWLObjectHasSelf ce) { + OwlToRulesConversionHelper.addConjunctForPropertyExpression(ce.getProperty(), this.mainTerm, this.mainTerm, + this.body); } + // TODO support this feature @Override - public void visit(OWLObjectOneOf ce) { - // TODO Auto-generated method stub - + public void visit(final OWLObjectOneOf ce) { + throw new OwlFeatureNotSupportedException( + "OWLObjectOneOf in complex class expressions currently not supported!"); } @Override - public void visit(OWLDataSomeValuesFrom ce) { + public void visit(final OWLDataSomeValuesFrom ce) { throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); } @Override - public void visit(OWLDataAllValuesFrom ce) { + public void visit(final OWLDataAllValuesFrom ce) { throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); } @Override - public void visit(OWLDataHasValue ce) { + public void visit(final OWLDataHasValue ce) { throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); } @Override - public void visit(OWLDataMinCardinality ce) { + public void visit(final OWLDataMinCardinality ce) { throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); } @Override - public void visit(OWLDataExactCardinality ce) { + public void visit(final OWLDataExactCardinality ce) { throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); } @Override - public void visit(OWLDataMaxCardinality ce) { + public void visit(final OWLDataMaxCardinality ce) { throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); } diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleHeadConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleHeadConverter.java similarity index 59% rename from vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleHeadConverter.java rename to rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleHeadConverter.java index 0305133a6..a9da42714 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/ClassToRuleHeadConverter.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ClassToRuleHeadConverter.java @@ -1,10 +1,10 @@ -package org.semanticweb.vlog4j.owlapi; +package org.semanticweb.rulewerk.owlapi; /*- * #%L - * VLog4j OWL API Support + * Rulewerk OWL API Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,6 +21,8 @@ */ import java.util.Arrays; +import java.util.stream.Collectors; + import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.model.OWLClassExpressionVisitor; import org.semanticweb.owlapi.model.OWLDataAllValuesFrom; @@ -40,14 +42,14 @@ import org.semanticweb.owlapi.model.OWLObjectOneOf; import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; import org.semanticweb.owlapi.model.OWLObjectUnionOf; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.AtomImpl; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; /** * Helper class for transforming OWL class expressions that occur as - * superclasses into suitable head atoms for rules. - * + * superclasses into suitable head literals for rules. + * * @author Markus Krötzsch * */ @@ -55,71 +57,71 @@ public class ClassToRuleHeadConverter extends AbstractClassToRuleConverter imple boolean currentIsExistential = false; - public ClassToRuleHeadConverter(Term mainTerm, SimpleConjunction body, SimpleConjunction head, - OwlAxiomToRulesConverter parent) { + public ClassToRuleHeadConverter(final Term mainTerm, final SimpleConjunction body, final SimpleConjunction head, + final OwlAxiomToRulesConverter parent) { super(mainTerm, body, head, parent); } - public ClassToRuleHeadConverter(Term mainTerm, OwlAxiomToRulesConverter parent) { + public ClassToRuleHeadConverter(final Term mainTerm, final OwlAxiomToRulesConverter parent) { this(mainTerm, new SimpleConjunction(), new SimpleConjunction(), parent); } @Override - public AbstractClassToRuleConverter makeChildConverter(Term mainTerm) { + public AbstractClassToRuleConverter makeChildConverter(final Term mainTerm) { return new ClassToRuleHeadConverter(mainTerm, this.parent); } @Override - public void visit(OWLClass ce) { + public void visit(final OWLClass ce) { if (ce.isOWLNothing()) { this.head.makeFalse(); } else if (ce.isOWLThing()) { this.head.init(); } else { - Predicate predicate = OwlToRulesConversionHelper.getClassPredicate(ce); - this.head.add(new AtomImpl(predicate, Arrays.asList(this.mainTerm))); + final Predicate predicate = OwlToRulesConversionHelper.getClassPredicate(ce); + this.head.add(new PositiveLiteralImpl(predicate, Arrays.asList(this.mainTerm))); } } @Override - public void visit(OWLObjectIntersectionOf ce) { - handleConjunction(ce.getOperands(), this.mainTerm); + public void visit(final OWLObjectIntersectionOf ce) { + this.handleConjunction(ce.operands().collect(Collectors.toList()), this.mainTerm); } @Override - public void visit(OWLObjectUnionOf ce) { - handleDisjunction(ce.getOperands()); + public void visit(final OWLObjectUnionOf ce) { + this.handleDisjunction(ce.operands().collect(Collectors.toList())); } @Override - public void visit(OWLObjectComplementOf ce) { - ClassToRuleBodyConverter converter = new ClassToRuleBodyConverter(this.mainTerm, this.body, this.head, + public void visit(final OWLObjectComplementOf ce) { + final ClassToRuleBodyConverter converter = new ClassToRuleBodyConverter(this.mainTerm, this.body, this.head, this.parent); ce.getOperand().accept(converter); } @Override - public void visit(OWLObjectSomeValuesFrom ce) { - handleObjectSomeValues(ce.getProperty(), ce.getFiller()); + public void visit(final OWLObjectSomeValuesFrom ce) { + this.handleObjectSomeValues(ce.getProperty(), ce.getFiller()); } @Override - public void visit(OWLObjectAllValuesFrom ce) { - handleObjectAllValues(ce.getProperty(), ce.getFiller()); + public void visit(final OWLObjectAllValuesFrom ce) { + this.handleObjectAllValues(ce.getProperty(), ce.getFiller()); } @Override - public void visit(OWLObjectHasValue ce) { - Term term = OwlToRulesConversionHelper.getIndividualTerm(ce.getFiller()); + public void visit(final OWLObjectHasValue ce) { + final Term term = OwlToRulesConversionHelper.getIndividualTerm(ce.getFiller(), this.parent.termFactory); OwlToRulesConversionHelper.addConjunctForPropertyExpression(ce.getProperty(), this.mainTerm, term, this.head); } @Override - public void visit(OWLObjectMinCardinality ce) { + public void visit(final OWLObjectMinCardinality ce) { if (ce.getCardinality() == 0) { this.head.init(); // tautological } else if (ce.getCardinality() == 1) { - handleObjectSomeValues(ce.getProperty(), ce.getFiller()); + this.handleObjectSomeValues(ce.getProperty(), ce.getFiller()); } else { throw new OwlFeatureNotSupportedException( "Min cardinality restrictions with values greater than 1 in superclass positions are not supported in rules."); @@ -127,56 +129,56 @@ public void visit(OWLObjectMinCardinality ce) { } @Override - public void visit(OWLObjectExactCardinality ce) { + public void visit(final OWLObjectExactCardinality ce) { throw new OwlFeatureNotSupportedException( "Exact cardinality restrictions in superclass positions are not supported in rules."); } @Override - public void visit(OWLObjectMaxCardinality ce) { - // TODO Auto-generated method stub - + public void visit(final OWLObjectMaxCardinality ce) { + throw new OwlFeatureNotSupportedException( + "Max cardinality restrictions in superclass positions are not supported in rules."); } @Override - public void visit(OWLObjectHasSelf ce) { + public void visit(final OWLObjectHasSelf ce) { OwlToRulesConversionHelper.addConjunctForPropertyExpression(ce.getProperty(), this.mainTerm, this.mainTerm, this.head); } @Override - public void visit(OWLObjectOneOf ce) { - // TODO Auto-generated method stub - + public void visit(final OWLObjectOneOf ce) { + throw new OwlFeatureNotSupportedException( + "Nominal restrictions in superclass positions are not supported in rules, due to lack of equality support."); } @Override - public void visit(OWLDataSomeValuesFrom ce) { + public void visit(final OWLDataSomeValuesFrom ce) { throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); } @Override - public void visit(OWLDataAllValuesFrom ce) { + public void visit(final OWLDataAllValuesFrom ce) { throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); } @Override - public void visit(OWLDataHasValue ce) { + public void visit(final OWLDataHasValue ce) { throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); } @Override - public void visit(OWLDataMinCardinality ce) { + public void visit(final OWLDataMinCardinality ce) { throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); } @Override - public void visit(OWLDataExactCardinality ce) { + public void visit(final OWLDataExactCardinality ce) { throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); } @Override - public void visit(OWLDataMaxCardinality ce) { + public void visit(final OWLDataMaxCardinality ce) { throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); } diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ConverterTermFactory.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ConverterTermFactory.java new file mode 100644 index 000000000..14728f26c --- /dev/null +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/ConverterTermFactory.java @@ -0,0 +1,82 @@ +package org.semanticweb.rulewerk.owlapi; + +/*- + * #%L + * Rulewerk OWL API Support + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.ExistentialVariableImpl; +import org.semanticweb.rulewerk.core.model.implementation.UniversalVariableImpl; +import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; + +/** + * Factory for retrieving Terms in the context of a translation from an OWL + * Axiom to rules (and facts). + * + * @author dragoste + * + */ +public class ConverterTermFactory { + + private Skolemization skolemization = new Skolemization(); + + final Variable frontierVariable = new UniversalVariableImpl("X"); + + private int freshVariableCounter = 0; + + /** + * Changes the renaming function for blank node IDs. Blank nodes with the same + * local ID will be represented differently before and after this function is + * called, but will retain a constant interpretation otherwise. + */ + public void startNewBlankNodeContext() { + this.skolemization = new Skolemization(); + } + + void resetFreshVariableCounter() { + this.freshVariableCounter = 0; + } + + /** + * Returns a fresh universal variable, which can be used as auxiliary variable + * in the current axiom's translation. + * + * @return a variable + */ + Variable getFreshUniversalVariable() { + this.freshVariableCounter++; + return new UniversalVariableImpl("Y" + this.freshVariableCounter); + } + + /** + * Returns a fresh existential variable, which can be used as auxiliary variable + * in the current axiom's translation. + * + * @return a variable + */ + Variable getFreshExistentialVariable() { + this.freshVariableCounter++; + return new ExistentialVariableImpl("Y" + this.freshVariableCounter); + } + + Skolemization getSkolemization() { + return this.skolemization; + } + +} diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java new file mode 100644 index 000000000..a2bc01d60 --- /dev/null +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverter.java @@ -0,0 +1,612 @@ +package org.semanticweb.rulewerk.owlapi; + +/*- + * #%L + * Rulewerk OWL API Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.OWLAsymmetricObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLAxiomVisitor; +import org.semanticweb.owlapi.model.OWLClass; +import org.semanticweb.owlapi.model.OWLClassAssertionAxiom; +import org.semanticweb.owlapi.model.OWLClassExpression; +import org.semanticweb.owlapi.model.OWLDataFactory; +import org.semanticweb.owlapi.model.OWLDataPropertyAssertionAxiom; +import org.semanticweb.owlapi.model.OWLDataPropertyDomainAxiom; +import org.semanticweb.owlapi.model.OWLDataPropertyRangeAxiom; +import org.semanticweb.owlapi.model.OWLDifferentIndividualsAxiom; +import org.semanticweb.owlapi.model.OWLDisjointClassesAxiom; +import org.semanticweb.owlapi.model.OWLDisjointDataPropertiesAxiom; +import org.semanticweb.owlapi.model.OWLDisjointObjectPropertiesAxiom; +import org.semanticweb.owlapi.model.OWLDisjointUnionAxiom; +import org.semanticweb.owlapi.model.OWLEquivalentClassesAxiom; +import org.semanticweb.owlapi.model.OWLEquivalentDataPropertiesAxiom; +import org.semanticweb.owlapi.model.OWLEquivalentObjectPropertiesAxiom; +import org.semanticweb.owlapi.model.OWLFunctionalDataPropertyAxiom; +import org.semanticweb.owlapi.model.OWLFunctionalObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLHasKeyAxiom; +import org.semanticweb.owlapi.model.OWLIndividual; +import org.semanticweb.owlapi.model.OWLInverseFunctionalObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLInverseObjectPropertiesAxiom; +import org.semanticweb.owlapi.model.OWLIrreflexiveObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLNegativeDataPropertyAssertionAxiom; +import org.semanticweb.owlapi.model.OWLNegativeObjectPropertyAssertionAxiom; +import org.semanticweb.owlapi.model.OWLObjectIntersectionOf; +import org.semanticweb.owlapi.model.OWLObjectOneOf; +import org.semanticweb.owlapi.model.OWLObjectProperty; +import org.semanticweb.owlapi.model.OWLObjectPropertyAssertionAxiom; +import org.semanticweb.owlapi.model.OWLObjectPropertyDomainAxiom; +import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; +import org.semanticweb.owlapi.model.OWLObjectPropertyRangeAxiom; +import org.semanticweb.owlapi.model.OWLReflexiveObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLSameIndividualAxiom; +import org.semanticweb.owlapi.model.OWLSubClassOfAxiom; +import org.semanticweb.owlapi.model.OWLSubDataPropertyOfAxiom; +import org.semanticweb.owlapi.model.OWLSubObjectPropertyOfAxiom; +import org.semanticweb.owlapi.model.OWLSubPropertyChainOfAxiom; +import org.semanticweb.owlapi.model.OWLSymmetricObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLTransitiveObjectPropertyAxiom; +import org.semanticweb.owlapi.model.SWRLRule; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.TermType; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.ConjunctionImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.FactImpl; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.rulewerk.core.model.implementation.RuleImpl; +import org.semanticweb.rulewerk.core.model.implementation.UniversalVariableImpl; + +/** + * Class for converting OWL axioms to rules. + * + * @author Markus Kroetzsch + * + */ +public class OwlAxiomToRulesConverter implements OWLAxiomVisitor { + + static OWLDataFactory owlDataFactory = OWLManager.getOWLDataFactory(); + + final ConverterTermFactory termFactory = new ConverterTermFactory(); + + final Set rules = new HashSet<>(); + final Set facts = new HashSet<>(); + + /** + * Processes the output of an {@link AbstractClassToRuleConverter} and + * transforms it into a statement that is added. Tautologies are not added but + * simply dropped. Formulas that have only positive atoms (empty body) are + * transformed into one or more facts. All other cases lead to a single rule + * being added. + * + * @param converter + */ + void addRule(final AbstractClassToRuleConverter converter) { + if (!converter.isTautology()) { + final Conjunction headConjunction = this.constructHeadConjunction(converter); + + if (converter.body.isTrueOrEmpty() && (headConjunction.getVariables().count() == 0)) { + for (final PositiveLiteral conjunct : headConjunction.getLiterals()) { + this.facts.add(new FactImpl(conjunct.getPredicate(), conjunct.getArguments())); + } + } else { + final Conjunction bodyConjunction = this.constructBodyConjunction(converter); + this.rules.add(Expressions.makePositiveLiteralsRule(headConjunction, bodyConjunction)); + } + } + } + + private Conjunction constructBodyConjunction(final AbstractClassToRuleConverter converter) { + if (converter.body.isTrueOrEmpty()) { + return new ConjunctionImpl<>(Arrays.asList(OwlToRulesConversionHelper.getTop(converter.mainTerm))); + } else { + return new ConjunctionImpl<>(converter.body.getConjuncts()); + } + } + + private Conjunction constructHeadConjunction(final AbstractClassToRuleConverter converter) { + if (converter.head.isFalseOrEmpty()) { + return new ConjunctionImpl<>(Arrays.asList(OwlToRulesConversionHelper.getBottom(converter.mainTerm))); + } else { + return new ConjunctionImpl<>(converter.head.getConjuncts()); + } + } + + Term replaceTerm(Term term, Term oldTerm, Term newTerm) { + return term.equals(oldTerm) ? newTerm : term; + } + + PositiveLiteralImpl makeTermReplacedLiteral(Literal literal, Term oldTerm, Term newTerm) { + if (literal.isNegated()) { + throw new OwlFeatureNotSupportedException( + "Nonmonotonic negation of literals is not handled in OWL conversion."); + } + return new PositiveLiteralImpl(literal.getPredicate(), + literal.getTerms().map(term -> this.replaceTerm(term, oldTerm, newTerm)).collect(Collectors.toList())); + } + + /** + * Creates and adds an auxiliary rule for the given body and head. All auxiliary + * rules are renamings of class expressions, based on auxiliary class names + * (unary predicates). The given term is the term used in this auxiliary + * predicate. + * + * Variables used in auxiliary atoms can be existentially quantified, but the + * corresponding variable in auxiliary rules must always be universally + * quantified. Therefore, if the given term is an existential variable, the + * method will replace it by a universal one of the same name. + * + * @param head + * @param body + * @param auxTerm + */ + void addAuxiliaryRule(List head, List body, Term auxTerm) { + if (auxTerm.getType() == TermType.EXISTENTIAL_VARIABLE) { + Term newVariable = new UniversalVariableImpl(auxTerm.getName()); + List newBody = new ArrayList<>(); + List newHead = new ArrayList<>(); + body.forEach(literal -> newBody.add(this.makeTermReplacedLiteral(literal, auxTerm, newVariable))); + head.forEach(literal -> newHead.add(this.makeTermReplacedLiteral(literal, auxTerm, newVariable))); + this.rules.add(new RuleImpl(new ConjunctionImpl<>(newHead), new ConjunctionImpl<>(newBody))); + } else { + this.rules.add(new RuleImpl(new ConjunctionImpl<>(head), new ConjunctionImpl<>(body))); + } + + } + + /** + * Resets the internal counter used for generating fresh variables. + */ + void startAxiomConversion() { + // this.freshVariableCounter = 0; + this.termFactory.resetFreshVariableCounter(); + } + + /** + * Processes an OWL class inclusion axiom with the two class expressions as + * given, and adds the resulting rules. The method proceeds by first converting + * the superclass, then converting the subclass with the same body and head atom + * buffers, and finally creating a rule from the collected body and head. The + * conversions may lead to auxiliary rules being created during processing, so + * additional rules besides the one that is added here might be created. + * + * @param subClass + * @param superClass + */ + void addSubClassAxiom(final OWLClassExpression subClass, final OWLClassExpression superClass) { + if (subClass instanceof OWLObjectOneOf) { + final OWLObjectOneOf subClassObjectOneOf = (OWLObjectOneOf) subClass; + subClassObjectOneOf.individuals() + .forEach(individual -> this.visitClassAssertionAxiom(individual, superClass)); + } else { + this.startAxiomConversion(); + + final ClassToRuleHeadConverter headConverter = new ClassToRuleHeadConverter( + this.termFactory.frontierVariable, this); + superClass.accept(headConverter); + final ClassToRuleBodyConverter bodyConverter = new ClassToRuleBodyConverter( + this.termFactory.frontierVariable, headConverter.body, headConverter.head, this); + bodyConverter.handleDisjunction(subClass, this.termFactory.frontierVariable); + this.addRule(bodyConverter); + } + } + + @Override + public void visit(final OWLSubClassOfAxiom axiom) { + this.addSubClassAxiom(axiom.getSubClass(), axiom.getSuperClass()); + } + + @Override + public void visit(final OWLNegativeObjectPropertyAssertionAxiom axiom) { + final Term subject = OwlToRulesConversionHelper.getIndividualTerm(axiom.getSubject(), this.termFactory); + final Term object = OwlToRulesConversionHelper.getIndividualTerm(axiom.getObject(), this.termFactory); + final Literal atom = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getProperty(), subject, object); + final PositiveLiteral bot = OwlToRulesConversionHelper.getBottom(subject); + this.rules.add(Expressions.makeRule(bot, atom)); + } + + @Override + public void visit(final OWLAsymmetricObjectPropertyAxiom axiom) { + this.startAxiomConversion(); + final Variable secondVariable = this.termFactory.getFreshUniversalVariable(); + final Literal atom1 = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getProperty(), + this.termFactory.frontierVariable, secondVariable); + final Literal atom2 = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getProperty(), secondVariable, + this.termFactory.frontierVariable); + this.rules.add(Expressions.makeRule(OwlToRulesConversionHelper.getBottom(this.termFactory.frontierVariable), + atom1, atom2)); + } + + @Override + public void visit(final OWLReflexiveObjectPropertyAxiom axiom) { + final PositiveLiteral atom1 = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getProperty(), + this.termFactory.frontierVariable, this.termFactory.frontierVariable); + this.rules + .add(Expressions.makeRule(atom1, OwlToRulesConversionHelper.getTop(this.termFactory.frontierVariable))); + } + + @Override + public void visit(final OWLDisjointClassesAxiom axiom) { + final List disjointClassExpressions = axiom.operands().collect(Collectors.toList()); + if (disjointClassExpressions.size() < 2) { + throw new IllegalArgumentException( + "OWLDisjointClassesAxiom " + axiom + " expected to have at least 2 operands!"); + } + + while (disjointClassExpressions.size() > 2) { + final OWLClassExpression a = this.removeFirst(disjointClassExpressions); + final OWLClassExpression b = this.removeFirst(disjointClassExpressions); + + final OWLClass disjunctionAB = this.disjointClassExpressionsToSubClassOfAuxiliaryDisjunction(a, b); + disjointClassExpressions.add(disjunctionAB); + } + + final OWLObjectIntersectionOf disjointIntersection = owlDataFactory + .getOWLObjectIntersectionOf(disjointClassExpressions.get(0), disjointClassExpressions.get(1)); + this.addSubClassAxiom(disjointIntersection, owlDataFactory.getOWLNothing()); + } + + private T removeFirst(final List list) { + final T t = list.get(0); + list.remove(t); + return t; + } + + private OWLClass disjointClassExpressionsToSubClassOfAuxiliaryDisjunction(final OWLClassExpression a, + final OWLClassExpression b) { + this.addSubClassAxiom(owlDataFactory.getOWLObjectIntersectionOf(a, b), owlDataFactory.getOWLNothing()); + + final OWLClass auxiliaryClass = owlDataFactory + .getOWLClass(OwlToRulesConversionHelper.getAuxiliaryClassNameDisjuncts((Arrays.asList(a, b)))); + this.addSubClassAxiom(a, auxiliaryClass); + this.addSubClassAxiom(b, auxiliaryClass); + + return auxiliaryClass; + } + + @Override + public void visit(final OWLDataPropertyDomainAxiom axiom) { + throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); + } + + @Override + public void visit(final OWLObjectPropertyDomainAxiom axiom) { + final OWLClassExpression existsProperty = owlDataFactory.getOWLObjectSomeValuesFrom(axiom.getProperty(), + owlDataFactory.getOWLThing()); + this.addSubClassAxiom(existsProperty, axiom.getDomain()); + } + + @Override + public void visit(final OWLEquivalentObjectPropertiesAxiom axiom) { + this.startAxiomConversion(); + final Variable secondVariable = this.termFactory.getFreshUniversalVariable(); + + PositiveLiteral firstAtom = null; + Literal previousAtom = null; + PositiveLiteral currentAtom = null; + + for (final OWLObjectPropertyExpression owlObjectPropertyExpression : axiom.properties() + .collect(Collectors.toList())) { + currentAtom = OwlToRulesConversionHelper.getObjectPropertyAtom(owlObjectPropertyExpression, + this.termFactory.frontierVariable, secondVariable); + if (previousAtom == null) { + firstAtom = currentAtom; + } else { + this.rules.add(Expressions.makeRule(currentAtom, previousAtom)); + } + previousAtom = currentAtom; + } + + if (currentAtom != null) { + this.rules.add(Expressions.makeRule(firstAtom, currentAtom)); + } + } + + @Override + public void visit(final OWLNegativeDataPropertyAssertionAxiom axiom) { + throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); + + } + + @Override + public void visit(final OWLDifferentIndividualsAxiom axiom) { + throw new OwlFeatureNotSupportedException( + "DifferentIndividuals currently not supported, due to lack of equality support."); + } + + @Override + public void visit(final OWLDisjointDataPropertiesAxiom axiom) { + throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); + } + + @Override + public void visit(final OWLDisjointObjectPropertiesAxiom axiom) { + // FIXME How should we interpret axiom with 1 or 0 operands? + if (axiom.operands().count() < 2) { + throw new OwlFeatureNotSupportedException( + "OWLDisjointObjectPropertiesAxiom " + axiom + " only supported for 2 or more operands!"); + } + final Term sourceTerm = this.termFactory.frontierVariable; + final Term targetTerm = this.termFactory.getFreshUniversalVariable(); + + if (axiom.operands().anyMatch(prop -> prop.isOWLTopObjectProperty())) { + this.toUnsatisfiableObjectProperties(axiom, sourceTerm, targetTerm); + } else { + + final List disjointPropertyExpressions = axiom.operands() + .collect(Collectors.toList()); + + while (disjointPropertyExpressions.size() > 2) { + final OWLObjectPropertyExpression a = this.removeFirst(disjointPropertyExpressions); + final OWLObjectPropertyExpression b = this.removeFirst(disjointPropertyExpressions); + final OWLObjectProperty disjunctionAB = owlDataFactory.getOWLObjectProperty( + OwlToRulesConversionHelper.getAuxiliaryPropertyNameDisjuncts(Arrays.asList(a, b))); + + final PositiveLiteral literalA = OwlToRulesConversionHelper.getObjectPropertyAtom(a, sourceTerm, + targetTerm); + final PositiveLiteral literalB = OwlToRulesConversionHelper.getObjectPropertyAtom(b, sourceTerm, + targetTerm); + this.addUnsatisfiableRule(Expressions.makeConjunction(literalA, literalB), sourceTerm); + + final PositiveLiteral disjunctionABLiteral = OwlToRulesConversionHelper + .getObjectPropertyAtom(disjunctionAB, sourceTerm, targetTerm); + this.rules.add(Expressions.makeRule(disjunctionABLiteral, literalA)); + this.rules.add(Expressions.makeRule(disjunctionABLiteral, literalB)); + + disjointPropertyExpressions.add(disjunctionAB); + } + + final PositiveLiteral literalA = OwlToRulesConversionHelper + .getObjectPropertyAtom(disjointPropertyExpressions.get(0), sourceTerm, targetTerm); + final PositiveLiteral literalB = OwlToRulesConversionHelper + .getObjectPropertyAtom(disjointPropertyExpressions.get(1), sourceTerm, targetTerm); + this.addUnsatisfiableRule(Expressions.makeConjunction(literalA, literalB), sourceTerm); + } + } + + private void toUnsatisfiableObjectProperties(final OWLDisjointObjectPropertiesAxiom axiom, final Term sourceTerm, + final Term targetTerm) { + axiom.operands().forEach(prop -> { + if (!prop.isOWLTopObjectProperty()) { + final Literal propertyLiteral = OwlToRulesConversionHelper.getObjectPropertyAtom(prop, sourceTerm, + targetTerm); + this.addUnsatisfiableRule(Expressions.makeConjunction(propertyLiteral), sourceTerm); + } + }); + } + + void addUnsatisfiableRule(final Conjunction body, final Term term) { + final Rule ruleConjunctionUnsatisfiable = Expressions + .makeRule(Expressions.makePositiveConjunction(OwlToRulesConversionHelper.getBottom(term)), body); + this.rules.add(ruleConjunctionUnsatisfiable); + } + + @Override + public void visit(final OWLObjectPropertyRangeAxiom axiom) { + this.startAxiomConversion(); + final OWLClassExpression forallPropertyDomain = owlDataFactory.getOWLObjectAllValuesFrom(axiom.getProperty(), + axiom.getRange()); + final ClassToRuleHeadConverter headConverter = new ClassToRuleHeadConverter(this.termFactory.frontierVariable, + this); + forallPropertyDomain.accept(headConverter); + this.addRule(headConverter); + } + + @Override + public void visit(final OWLObjectPropertyAssertionAxiom axiom) { + final Term subject = OwlToRulesConversionHelper.getIndividualTerm(axiom.getSubject(), this.termFactory); + final Term object = OwlToRulesConversionHelper.getIndividualTerm(axiom.getObject(), this.termFactory); + this.facts.add(OwlToRulesConversionHelper.getObjectPropertyFact(axiom.getProperty(), subject, object)); + } + + @Override + public void visit(final OWLFunctionalObjectPropertyAxiom axiom) { + throw new OwlFeatureNotSupportedException( + "FunctionalObjectProperty currently not supported, due to lack of equality support."); + } + + @Override + public void visit(final OWLSubObjectPropertyOfAxiom axiom) { + this.startAxiomConversion(); + final Variable secondVariable = this.termFactory.getFreshUniversalVariable(); + final Literal subRole = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getSubProperty(), + this.termFactory.frontierVariable, secondVariable); + final PositiveLiteral superRole = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getSuperProperty(), + this.termFactory.frontierVariable, secondVariable); + + this.rules.add(Expressions.makeRule(superRole, subRole)); + } + + @Override + public void visit(final OWLDisjointUnionAxiom axiom) { + throw new OwlFeatureNotSupportedException( + "OWL DisjointUnion not supported, since the cases where it would be expressible in disjunction-free rules are not useful."); + } + + @Override + public void visit(final OWLSymmetricObjectPropertyAxiom axiom) { + this.startAxiomConversion(); + final Variable secondVariable = this.termFactory.getFreshUniversalVariable(); + final Literal atom1 = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getProperty(), + this.termFactory.frontierVariable, secondVariable); + final PositiveLiteral atom2 = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getProperty(), + secondVariable, this.termFactory.frontierVariable); + + this.rules.add(Expressions.makeRule(atom2, atom1)); + } + + @Override + public void visit(final OWLDataPropertyRangeAxiom axiom) { + throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); + } + + @Override + public void visit(final OWLFunctionalDataPropertyAxiom axiom) { + throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); + } + + @Override + public void visit(final OWLEquivalentDataPropertiesAxiom axiom) { + throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); + } + + @Override + public void visit(final OWLClassAssertionAxiom axiom) { + this.visitClassAssertionAxiom(axiom.getIndividual(), axiom.getClassExpression()); + } + + void visitClassAssertionAxiom(final OWLIndividual individual, final OWLClassExpression classExpression) { + this.startAxiomConversion(); + final Term term = OwlToRulesConversionHelper.getIndividualTerm(individual, this.termFactory); + final ClassToRuleHeadConverter headConverter = new ClassToRuleHeadConverter(term, this); + classExpression.accept(headConverter); + this.addRule(headConverter); + } + + @Override + public void visit(final OWLEquivalentClassesAxiom axiom) { + OWLClassExpression firstClass = null; + OWLClassExpression previousClass = null; + OWLClassExpression currentClass = null; + for (final OWLClassExpression owlClassExpression : axiom.classExpressions().collect(Collectors.toList())) { + currentClass = owlClassExpression; + if (previousClass == null) { + firstClass = currentClass; + } else { + this.addSubClassAxiom(previousClass, currentClass); + } + previousClass = currentClass; + } + + if (currentClass != null) { + this.addSubClassAxiom(currentClass, firstClass); + } + } + + @Override + public void visit(final OWLDataPropertyAssertionAxiom axiom) { + throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); + } + + @Override + public void visit(final OWLTransitiveObjectPropertyAxiom axiom) { + this.startAxiomConversion(); + final Variable var1 = this.termFactory.getFreshUniversalVariable(); + final Variable var2 = this.termFactory.getFreshUniversalVariable(); + final Literal atom1 = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getProperty(), + this.termFactory.frontierVariable, var1); + final Literal atom2 = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getProperty(), var1, var2); + final PositiveLiteral atomHead = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getProperty(), + this.termFactory.frontierVariable, var2); + + this.rules.add(Expressions.makeRule(atomHead, atom1, atom2)); + } + + @Override + public void visit(final OWLIrreflexiveObjectPropertyAxiom axiom) { + final Literal atomSelf = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getProperty(), + this.termFactory.frontierVariable, this.termFactory.frontierVariable); + this.rules.add(Expressions.makeRule(OwlToRulesConversionHelper.getBottom(this.termFactory.frontierVariable), + atomSelf)); + } + + @Override + public void visit(final OWLSubDataPropertyOfAxiom axiom) { + throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); + } + + @Override + public void visit(final OWLInverseFunctionalObjectPropertyAxiom axiom) { + throw new OwlFeatureNotSupportedException( + "InverseFunctionalObjectProperty currently not supported, due to lack of equality support."); + } + + @Override + public void visit(final OWLSameIndividualAxiom axiom) { + throw new OwlFeatureNotSupportedException( + "SameIndividual currently not supported, due to lack of equality support."); + } + + @Override + public void visit(final OWLSubPropertyChainOfAxiom axiom) { + this.startAxiomConversion(); + Variable previousVariable = this.termFactory.frontierVariable; + Variable currentVariable = null; + final List body = new ArrayList<>(); + + for (final OWLObjectPropertyExpression owlObjectPropertyExpression : axiom.getPropertyChain()) { + currentVariable = this.termFactory.getFreshUniversalVariable(); + body.add(OwlToRulesConversionHelper.getObjectPropertyAtom(owlObjectPropertyExpression, previousVariable, + currentVariable)); + previousVariable = currentVariable; + } + + final PositiveLiteral headAtom = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getSuperProperty(), + this.termFactory.frontierVariable, currentVariable); + + this.rules.add( + Expressions.makeRule(Expressions.makePositiveConjunction(headAtom), Expressions.makeConjunction(body))); + } + + @Override + public void visit(final OWLInverseObjectPropertiesAxiom axiom) { + this.startAxiomConversion(); + final Variable secondVariable = this.termFactory.getFreshUniversalVariable(); + final PositiveLiteral firstRole = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getFirstProperty(), + this.termFactory.frontierVariable, secondVariable); + final PositiveLiteral secondRole = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getSecondProperty(), + secondVariable, this.termFactory.frontierVariable); + + this.rules.add(Expressions.makeRule(secondRole, firstRole)); + this.rules.add(Expressions.makeRule(firstRole, secondRole)); + } + + @Override + public void visit(final OWLHasKeyAxiom axiom) { + throw new OwlFeatureNotSupportedException("HasKey currently not supported, due to lack of equality support."); + } + + @Override + public void visit(final SWRLRule rule) { + throw new OwlFeatureNotSupportedException("SWRLRule currently not supported."); + + } + + /** + * Changes the renaming function for blank node IDs. Blank nodes with the same + * local ID will be represented differently before and after this function is + * called, but will retain a constant interpretation otherwise. + */ + public void startNewBlankNodeContext() { + this.termFactory.startNewBlankNodeContext(); + } + +} diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlFeatureNotSupportedException.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java similarity index 74% rename from vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlFeatureNotSupportedException.java rename to rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java index d3ddbf4cf..5a3fb1ba7 100644 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlFeatureNotSupportedException.java +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlFeatureNotSupportedException.java @@ -1,10 +1,10 @@ -package org.semanticweb.vlog4j.owlapi; +package org.semanticweb.rulewerk.owlapi; /*- * #%L - * VLog4j OWL API Support + * Rulewerk OWL API Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,25 +20,26 @@ * #L% */ +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; + /** * Exception that indicates that the translation of OWL into rules has failed * due to an expressive feature of OWL that cannot be captured in rules. - * + * * @author Markus Krötzsch * */ -public class OwlFeatureNotSupportedException extends RuntimeException { +public class OwlFeatureNotSupportedException extends RulewerkRuntimeException { /** - * + * */ private static final long serialVersionUID = -194716185012512419L; /** * Creates a new exception. - * - * @param cause - * message explaining the error + * + * @param cause message explaining the error */ public OwlFeatureNotSupportedException(String cause) { super(cause); diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java new file mode 100644 index 000000000..d831305e9 --- /dev/null +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConversionHelper.java @@ -0,0 +1,198 @@ +package org.semanticweb.rulewerk.owlapi; + +/*- + * #%L + * Rulewerk OWL API Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +import java.io.UnsupportedEncodingException; +import java.math.BigInteger; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.Arrays; +import java.util.Collection; + +import org.semanticweb.owlapi.model.OWLAnonymousIndividual; +import org.semanticweb.owlapi.model.OWLClass; +import org.semanticweb.owlapi.model.OWLClassExpression; +import org.semanticweb.owlapi.model.OWLIndividual; +import org.semanticweb.owlapi.model.OWLNamedIndividual; +import org.semanticweb.owlapi.model.OWLObject; +import org.semanticweb.owlapi.model.OWLObjectProperty; +import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.FactImpl; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.rulewerk.core.model.implementation.PredicateImpl; +import org.semanticweb.rulewerk.owlapi.AbstractClassToRuleConverter.SimpleConjunction; + +/** + * Utility class for helper functions that are used to convert OWL API objects + * to rules. + * + * @author Markus Kroetzsch + * + */ +public class OwlToRulesConversionHelper { + + /** + * Returns a {@link Term} to represent an {@link OWLIndividual} in rules. + * + * @param owlIndividual the individual to get a term for + * @return a suitable term + */ + public static Term getIndividualTerm(final OWLIndividual owlIndividual, + final ConverterTermFactory converterTermFactory) { + if (owlIndividual instanceof OWLNamedIndividual) { + return new AbstractConstantImpl(((OWLNamedIndividual) owlIndividual).getIRI().toString()); + } else if (owlIndividual instanceof OWLAnonymousIndividual) { + return converterTermFactory.getSkolemization() + .getRenamedNamedNull(((OWLAnonymousIndividual) owlIndividual).getID().toString()); + } else { + throw new OwlFeatureNotSupportedException( + "Could not convert OWL individual '" + owlIndividual.toString() + "' to a term."); + } + } + + /** + * Returns a {@link Predicate} to represent an {@link OWLClass} in rules. + * + * @param owlClass the atomic class to get a predicate for + * @return a suitable unary predicate + */ + public static Predicate getClassPredicate(final OWLClass owlClass) { + return new PredicateImpl(owlClass.getIRI().toString(), 1); + } + + /** + * Returns a {@link Predicate} to represent an {@link OWLObjectProperty} in + * rules. + * + * @param owlObjectProperty the atomic property to get a predicate for + * @return a suitable binary predicate + */ + public static Predicate getObjectPropertyPredicate(final OWLObjectProperty owlObjectProperty) { + return new PredicateImpl(owlObjectProperty.getIRI().toString(), 2); + } + + /** + * Returns a unary {@link Predicate} to represent a conjunction of given + * {@link OWLClassExpression} collection in rules. + * + * @param conjuncts a collect of class expressions whose intersection the + * returned predicate represents. + * @return a suitable unary predicate. + */ + public static Predicate getConjunctionAuxiliaryClassPredicate(final Collection conjuncts) { + return new PredicateImpl(getAuxiliaryClasNameConjuncts(conjuncts), 1); + } + + static String getAuxiliaryClasNameConjuncts(final Collection conjuncts) { + return getAuxiliaryOWLObjectName("aux-class-conjunction", conjuncts); + } + + static String getAuxiliaryClassNameDisjuncts(final Collection disjuncts) { + return getAuxiliaryOWLObjectName("aux-class-disjunction", disjuncts); + } + + static String getAuxiliaryPropertyNameDisjuncts(final Collection disjuncts) { + return getAuxiliaryOWLObjectName("aux-objectPropery-disjunction", disjuncts); + } + + private static String getAuxiliaryOWLObjectName(final String prefix, + final Collection owlObjects) { + final MessageDigest messageDigest; + try { + messageDigest = MessageDigest.getInstance("MD5"); + for (final OWLObject owlObject : owlObjects) { + messageDigest.update(owlObject.toString().getBytes("UTF-8")); + } + } catch (NoSuchAlgorithmException | UnsupportedEncodingException e) { + throw new RulewerkRuntimeException("We are missing some core functionality of Java here", e); + } + final byte[] digest = messageDigest.digest(); + final BigInteger bigInt = new BigInteger(1, digest); + final String hashtext = bigInt.toString(16); + return prefix + hashtext; + } + + /** + * Adds a binary predicate for a given OWL object property expression to the + * given conjunction. If the expression is an inverse, source and target terms + * are swapped. If the expression is top or bottom, it is handled appropriately. + * + * @param owlObjectPropertyExpression the property expression + * @param sourceTerm the term that should be in the first + * parameter position of the original + * expression + * @param targetTerm the term that should be in the second + * parameter position of the original + * expression + */ + static void addConjunctForPropertyExpression(final OWLObjectPropertyExpression owlObjectPropertyExpression, + final Term sourceTerm, final Term targetTerm, final SimpleConjunction conjuncts) { + if (owlObjectPropertyExpression.isOWLTopObjectProperty()) { + conjuncts.init(); + } else if (owlObjectPropertyExpression.isOWLBottomObjectProperty()) { + conjuncts.makeFalse(); + } else { + conjuncts.add(getObjectPropertyAtom(owlObjectPropertyExpression, sourceTerm, targetTerm)); + } + } + + public static PositiveLiteral getObjectPropertyAtom(final OWLObjectPropertyExpression owlObjectPropertyExpression, + final Term sourceTerm, final Term targetTerm) { + if (owlObjectPropertyExpression.isAnonymous()) { + final Predicate predicate = OwlToRulesConversionHelper + .getObjectPropertyPredicate(owlObjectPropertyExpression.getInverseProperty().asOWLObjectProperty()); + return new PositiveLiteralImpl(predicate, Arrays.asList(targetTerm, sourceTerm)); + } else { + final Predicate predicate = OwlToRulesConversionHelper + .getObjectPropertyPredicate(owlObjectPropertyExpression.asOWLObjectProperty()); + return new PositiveLiteralImpl(predicate, Arrays.asList(sourceTerm, targetTerm)); + } + } + + public static Fact getObjectPropertyFact(final OWLObjectPropertyExpression owlObjectPropertyExpression, + final Term sourceTerm, final Term targetTerm) { + if (owlObjectPropertyExpression.isAnonymous()) { + final Predicate predicate = OwlToRulesConversionHelper + .getObjectPropertyPredicate(owlObjectPropertyExpression.getInverseProperty().asOWLObjectProperty()); + return new FactImpl(predicate, Arrays.asList(targetTerm, sourceTerm)); + } else { + final Predicate predicate = OwlToRulesConversionHelper + .getObjectPropertyPredicate(owlObjectPropertyExpression.asOWLObjectProperty()); + return new FactImpl(predicate, Arrays.asList(sourceTerm, targetTerm)); + } + } + + public static PositiveLiteral getBottom(final Term term) { + final Predicate predicate = new PredicateImpl("http://www.w3.org/2002/07/owl#Nothing", 1); + return new PositiveLiteralImpl(predicate, Arrays.asList(term)); + } + + public static PositiveLiteral getTop(final Term term) { + final Predicate predicate = new PredicateImpl("http://www.w3.org/2002/07/owl#Thing", 1); + return new PositiveLiteralImpl(predicate, Arrays.asList(term)); + } + +} diff --git a/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java new file mode 100644 index 000000000..2b2395aff --- /dev/null +++ b/rulewerk-owlapi/src/main/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverter.java @@ -0,0 +1,142 @@ +package org.semanticweb.rulewerk.owlapi; + +import java.util.ArrayList; +import java.util.List; + +/*- + * #%L + * Rulewerk OWL API Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.Set; + +import org.semanticweb.owlapi.model.OWLAxiom; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Class for converting OWL ontologies to rules. + * + * @author Markus Kroetzsch + * + */ +public class OwlToRulesConverter { + + private static Logger LOGGER = LoggerFactory.getLogger(OwlToRulesConverter.class); + + final OwlAxiomToRulesConverter owlAxiomToRulesConverter = new OwlAxiomToRulesConverter(); + + private final boolean failOnUnsupported; + private int unsupportedAxiomsCount = 0; + private final List unsupportedAxioms = new ArrayList<>(); + + /** + * Constructor. + * + * @param failOnUnsupported whether the converter should fail with an + * {@link OwlFeatureNotSupportedException} when + * encountering axioms that cannot be converted to + * rules or facts. + */ + public OwlToRulesConverter(boolean failOnUnsupported) { + this.failOnUnsupported = failOnUnsupported; + } + + /** + * Constructs an object that fails with a + * {@link OwlFeatureNotSupportedException} when encountering axioms that cannot + * be converted to rules or facts. + */ + public OwlToRulesConverter() { + this(true); + } + + /** + * Converts the given OWL ontology to rules and facts, and adds the result to + * the internal buffer of rules and facts for later retrieval. + * + * @param owlOntology the ontology + */ + public void addOntology(final OWLOntology owlOntology) { + this.owlAxiomToRulesConverter.startNewBlankNodeContext(); + owlOntology.axioms().forEach(owlAxiom -> { + try { + owlAxiom.accept(this.owlAxiomToRulesConverter); + } catch (OwlFeatureNotSupportedException e) { + if (this.failOnUnsupported) { + LOGGER.error(e.getMessage()); + throw e; + } else { + LOGGER.warn(e.getMessage()); + this.unsupportedAxiomsCount++; + if (this.unsupportedAxioms.size() < 10) { + this.unsupportedAxioms.add(owlAxiom); + } + } + } + }); + } + + /** + * Returns the set of facts generated by transforming the given OWL ontology. No + * copy is created, so the set should not be modified if its owner is still to + * be used. + * + * @return set of facts + */ + public Set getFacts() { + return this.owlAxiomToRulesConverter.facts; + } + + /** + * Returns the set of rules generated by transforming the given OWL ontology. No + * copy is created, so the set should not be modified if its owner is still to + * be used. + * + * @return set of rules + */ + public Set getRules() { + return this.owlAxiomToRulesConverter.rules; + } + + /** + * Returns the number of OWL axioms that could not be converted into rules. This + * number is only computed if the object is not configured to fail when + * encountering the first unsupported axiom. + * + * @return total number of unsupported axioms + */ + public int getUnsupportedAxiomsCount() { + return this.unsupportedAxiomsCount; + } + + /** + * Returns up to 10 unsupported axioms encountered during the conversion. The + * complete number of unsupported axioms can be queried using + * {@link #getUnsupportedAxiomsCount()}. + * + * @return list of first ten unsupported axioms that were encountered + */ + public List getUnsupportedAxiomsSample() { + return this.unsupportedAxioms; + } + +} diff --git a/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverterTest.java b/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverterTest.java new file mode 100644 index 000000000..681c56011 --- /dev/null +++ b/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlAxiomToRulesConverterTest.java @@ -0,0 +1,1168 @@ +package org.semanticweb.rulewerk.owlapi; + +/*- + * #%L + * Rulewerk OWL API Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +import org.junit.Ignore; +import org.junit.Test; +import org.mockito.internal.util.collections.Sets; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.IRI; +import org.semanticweb.owlapi.model.OWLAxiom; +import org.semanticweb.owlapi.model.OWLClass; +import org.semanticweb.owlapi.model.OWLClassExpression; +import org.semanticweb.owlapi.model.OWLDataFactory; +import org.semanticweb.owlapi.model.OWLDisjointClassesAxiom; +import org.semanticweb.owlapi.model.OWLDisjointObjectPropertiesAxiom; +import org.semanticweb.owlapi.model.OWLIndividual; +import org.semanticweb.owlapi.model.OWLObjectIntersectionOf; +import org.semanticweb.owlapi.model.OWLObjectOneOf; +import org.semanticweb.owlapi.model.OWLObjectProperty; +import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; +import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; +import org.semanticweb.owlapi.model.OWLObjectUnionOf; +import org.semanticweb.owlapi.model.OWLSubClassOfAxiom; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.UniversalVariableImpl; + +public class OwlAxiomToRulesConverterTest { + + static OWLDataFactory df = OWLManager.getOWLDataFactory(); + + public static IRI getIri(final String localName) { + return IRI.create("http://example.org/" + localName); + } + + public static OWLClass getOwlClass(final String localName) { + return df.getOWLClass(getIri(localName)); + } + + public static OWLObjectProperty getOwlObjectProperty(final String localName) { + return df.getOWLObjectProperty(getIri(localName)); + } + + public static Predicate getClassPredicate(final String localName) { + return Expressions.makePredicate("http://example.org/" + localName, 1); + } + + public static Predicate getPropertyPredicate(final String localName) { + return Expressions.makePredicate("http://example.org/" + localName, 2); + } + + static final OWLClass cA = getOwlClass("A"); + static final OWLClass cB = getOwlClass("B"); + static final OWLClass cC = getOwlClass("C"); + static final OWLClass cD = getOwlClass("D"); + static final OWLClass cE = getOwlClass("E"); + static final OWLObjectProperty pR = getOwlObjectProperty("Rule"); + static final OWLObjectProperty pS = getOwlObjectProperty("S"); + static final OWLObjectProperty pT = getOwlObjectProperty("T"); + static final OWLObjectProperty pU = getOwlObjectProperty("U"); + + static final Predicate nA = getClassPredicate("A"); + static final Predicate nB = getClassPredicate("B"); + static final Predicate nC = getClassPredicate("C"); + static final Predicate nD = getClassPredicate("D"); + static final Predicate nE = getClassPredicate("E"); + static final Predicate nR = getPropertyPredicate("Rule"); + static final Predicate nS = getPropertyPredicate("S"); + static final Predicate nT = getPropertyPredicate("T"); + static final Predicate nU = getPropertyPredicate("U"); + + static final OWLIndividual inda = df.getOWLNamedIndividual(getIri("a")); + static final OWLIndividual indb = df.getOWLNamedIndividual(getIri("b")); + static final OWLIndividual indc = df.getOWLNamedIndividual(getIri("c")); + + static final Term consta = Expressions.makeAbstractConstant(getIri("a").toString()); + static final Term constb = Expressions.makeAbstractConstant(getIri("b").toString()); + static final Term constc = Expressions.makeAbstractConstant(getIri("c").toString()); + + @Test + public void testSimpleRule() { + final OWLObjectIntersectionOf body = df.getOWLObjectIntersectionOf(cA, cB, cC); + final OWLObjectIntersectionOf head = df.getOWLObjectIntersectionOf(cD, cE); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(body, head); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final PositiveLiteral atA = Expressions.makePositiveLiteral(nA, converter.termFactory.frontierVariable); + final PositiveLiteral atB = Expressions.makePositiveLiteral(nB, converter.termFactory.frontierVariable); + final PositiveLiteral atC = Expressions.makePositiveLiteral(nC, converter.termFactory.frontierVariable); + final PositiveLiteral atD = Expressions.makePositiveLiteral(nD, converter.termFactory.frontierVariable); + final PositiveLiteral atE = Expressions.makePositiveLiteral(nE, converter.termFactory.frontierVariable); + final Rule rule = Expressions.makeRule(Expressions.makePositiveConjunction(atD, atE), + Expressions.makeConjunction(atA, atB, atC)); + + assertEquals(Collections.singleton(rule), converter.rules); + + } + + @Test + public void testTrueBody() { + final OWLClassExpression body = df.getOWLObjectIntersectionOf(df.getOWLThing(), + df.getOWLObjectAllValuesFrom(df.getOWLBottomObjectProperty(), cB)); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(body, cA); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final PositiveLiteral atA = Expressions.makePositiveLiteral(nA, + Arrays.asList(converter.termFactory.frontierVariable)); + final PositiveLiteral top = OwlToRulesConversionHelper.getTop(converter.termFactory.frontierVariable); + final Rule rule = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atA)), + Expressions.makeConjunction(Arrays.asList(top))); + + assertEquals(Collections.singleton(rule), converter.rules); + } + + @Test + public void testConjunctionTruth() { + final OWLObjectIntersectionOf head = df.getOWLObjectIntersectionOf(cB, df.getOWLThing(), cC); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA, head); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final PositiveLiteral atA = Expressions.makePositiveLiteral(nA, + Arrays.asList(converter.termFactory.frontierVariable)); + final PositiveLiteral atB = Expressions.makePositiveLiteral(nB, + Arrays.asList(converter.termFactory.frontierVariable)); + final PositiveLiteral atC = Expressions.makePositiveLiteral(nC, + Arrays.asList(converter.termFactory.frontierVariable)); + final Rule rule = Expressions.makeRule(Expressions.makePositiveConjunction(atB, atC), + Expressions.makeConjunction(atA)); + + assertEquals(Collections.singleton(rule), converter.rules); + } + + @Test + public void testConjunctionTruthTruth() { + final OWLObjectIntersectionOf head = df.getOWLObjectIntersectionOf(df.getOWLThing(), df.getOWLThing()); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA, head); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + assertEquals(0, converter.rules.size()); + } + + @Test + public void testConjunctionFalsity() { + final OWLClassExpression notSupported = df.getOWLObjectExactCardinality(10, pR); + final OWLObjectIntersectionOf head = df.getOWLObjectIntersectionOf(notSupported, df.getOWLNothing(), cC); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA, head); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final PositiveLiteral atA = Expressions.makePositiveLiteral(nA, + Arrays.asList(converter.termFactory.frontierVariable)); + final PositiveLiteral bot = OwlToRulesConversionHelper.getBottom(converter.termFactory.frontierVariable); + final Rule rule = Expressions.makeRule(bot, atA); + + assertEquals(Collections.singleton(rule), converter.rules); + } + + @Test(expected = OwlFeatureNotSupportedException.class) + public void testConjunctionException() { + final OWLClassExpression notSupported = df.getOWLObjectExactCardinality(10, pR); + final OWLObjectIntersectionOf head = df.getOWLObjectIntersectionOf(notSupported, cC); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA, head); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + } + + @Test + public void testConjunctionNegativeLiterals() { + final OWLClassExpression notA = df.getOWLObjectComplementOf(cA); + final OWLClassExpression notB = df.getOWLObjectComplementOf(cB); + final OWLClassExpression notC = df.getOWLObjectComplementOf(cC); + final OWLObjectIntersectionOf head = df.getOWLObjectIntersectionOf(notB, notC); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(notA, head); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Predicate auxPredicate = OwlToRulesConversionHelper + .getConjunctionAuxiliaryClassPredicate(Arrays.asList(notB, notC)); + + final PositiveLiteral atA = Expressions.makePositiveLiteral(nA, + Arrays.asList(converter.termFactory.frontierVariable)); + final PositiveLiteral atB = Expressions.makePositiveLiteral(nB, + Arrays.asList(converter.termFactory.frontierVariable)); + final PositiveLiteral atC = Expressions.makePositiveLiteral(nC, + Arrays.asList(converter.termFactory.frontierVariable)); + final PositiveLiteral atAux = Expressions.makePositiveLiteral(auxPredicate, + Arrays.asList(converter.termFactory.frontierVariable)); + + final Rule rule1 = Expressions.makeRule(atAux, atB); + final Rule rule2 = Expressions.makeRule(atAux, atC); + final Rule rule3 = Expressions.makeRule(atA, atAux); + + assertEquals(Sets.newSet(rule1, rule2, rule3), converter.rules); + } + + @Test + public void testContrapositive() { + final OWLClassExpression notA = df.getOWLObjectComplementOf(cA); + final OWLClassExpression notB = df.getOWLObjectComplementOf(cB); + final OWLClassExpression notC = df.getOWLObjectComplementOf(cC); + final OWLClassExpression notBOrNotC = df.getOWLObjectUnionOf(notB, notC); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(notA, notBOrNotC); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final PositiveLiteral atA = Expressions.makePositiveLiteral(nA, + Arrays.asList(converter.termFactory.frontierVariable)); + final PositiveLiteral atB = Expressions.makePositiveLiteral(nB, + Arrays.asList(converter.termFactory.frontierVariable)); + final PositiveLiteral atC = Expressions.makePositiveLiteral(nC, + Arrays.asList(converter.termFactory.frontierVariable)); + final Rule rule = Expressions.makeRule(Expressions.makePositiveConjunction(atA), + Expressions.makeConjunction(atB, atC)); + + assertEquals(Collections.singleton(rule), converter.rules); + } + + @Test + public void testPositiveUniversal() { + final OWLClassExpression forallRA = df.getOWLObjectAllValuesFrom(pR, cA); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cB, forallRA); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Variable secondVariable = Expressions.makeUniversalVariable("Y1"); + final PositiveLiteral atA = Expressions.makePositiveLiteral(nA, Arrays.asList(secondVariable)); + final PositiveLiteral atB = Expressions.makePositiveLiteral(nB, + Arrays.asList(converter.termFactory.frontierVariable)); + final PositiveLiteral atR = Expressions.makePositiveLiteral(nR, + Arrays.asList(converter.termFactory.frontierVariable, secondVariable)); + + final Rule rule = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atA)), + Expressions.makeConjunction(Arrays.asList(atR, atB))); + + assertEquals(Collections.singleton(rule), converter.rules); + } + + @Test + public void testPositiveExistential() { + final OWLClassExpression existsRA = df.getOWLObjectSomeValuesFrom(pR, cA); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cB, existsRA); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Variable secondVariable = Expressions.makeExistentialVariable("Y1"); + final PositiveLiteral atA = Expressions.makePositiveLiteral(nA, Arrays.asList(secondVariable)); + final PositiveLiteral atB = Expressions.makePositiveLiteral(nB, + Arrays.asList(converter.termFactory.frontierVariable)); + final PositiveLiteral atR = Expressions.makePositiveLiteral(nR, + Arrays.asList(converter.termFactory.frontierVariable, secondVariable)); + + final Rule rule = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atR, atA)), + Expressions.makeConjunction(Arrays.asList(atB))); + + assertEquals(Collections.singleton(rule), converter.rules); + } + + @Test + public void testNegativeUniversal() { + final OWLClassExpression forallRA = df.getOWLObjectAllValuesFrom(pR, cA); + final OWLClassExpression notB = df.getOWLObjectComplementOf(cB); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(forallRA, notB); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Predicate auxPredicate = OwlToRulesConversionHelper + .getConjunctionAuxiliaryClassPredicate(Arrays.asList(cA)); + final Variable secondVariable = Expressions.makeExistentialVariable("Y1"); + final Variable secondVariableUniversal = Expressions.makeUniversalVariable("Y1"); + + final PositiveLiteral atB = Expressions.makePositiveLiteral(nB, + Arrays.asList(converter.termFactory.frontierVariable)); + final PositiveLiteral atR = Expressions.makePositiveLiteral(nR, + Arrays.asList(converter.termFactory.frontierVariable, secondVariable)); + final PositiveLiteral atAux = Expressions.makePositiveLiteral(auxPredicate, Arrays.asList(secondVariable)); + final Rule rule1 = Expressions.makeRule(Expressions.makePositiveConjunction(atR, atAux), + Expressions.makeConjunction(atB)); + + final PositiveLiteral atA = Expressions.makePositiveLiteral(nA, Arrays.asList(secondVariableUniversal)); + final PositiveLiteral bot = OwlToRulesConversionHelper.getBottom(secondVariableUniversal); + final PositiveLiteral atAuxUniversal = Expressions.makePositiveLiteral(auxPredicate, + Arrays.asList(secondVariableUniversal)); + final Rule rule2 = Expressions.makeRule(Expressions.makePositiveConjunction(bot), + Expressions.makeConjunction(atAuxUniversal, atA)); + + assertEquals(Sets.newSet(rule1, rule2), converter.rules); + } + + @Test + public void testNegativeExistential() { + final OWLClassExpression existRA = df.getOWLObjectSomeValuesFrom(pR, cA); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(existRA, cB); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Variable secondVariable = Expressions.makeUniversalVariable("Y1"); + final PositiveLiteral atR = Expressions.makePositiveLiteral(nR, converter.termFactory.frontierVariable, + secondVariable); + final PositiveLiteral atA = Expressions.makePositiveLiteral(nA, secondVariable); + final PositiveLiteral atB = Expressions.makePositiveLiteral(nB, converter.termFactory.frontierVariable); + + final Rule rule = Expressions.makeRule(atB, atR, atA); + + assertEquals(Collections.singleton(rule), converter.rules); + } + + @Test + public void testSelf() { + final OWLClassExpression selfR = df.getOWLObjectHasSelf(pR); + final OWLClassExpression selfS = df.getOWLObjectHasSelf(pS); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(selfR, selfS); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final PositiveLiteral atR = Expressions.makePositiveLiteral(nR, converter.termFactory.frontierVariable, + converter.termFactory.frontierVariable); + final PositiveLiteral atS = Expressions.makePositiveLiteral(nS, converter.termFactory.frontierVariable, + converter.termFactory.frontierVariable); + + final Rule rule = Expressions.makeRule(atS, atR); + + assertEquals(Collections.singleton(rule), converter.rules); + } + + @Test + public void testHasValue() { + final OWLClassExpression hasRa = df.getOWLObjectHasValue(pR, inda); + final OWLClassExpression hasSb = df.getOWLObjectHasValue(pS, indb); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(hasRa, hasSb); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Term consta = Expressions.makeAbstractConstant(getIri("a").toString()); + final Term constb = Expressions.makeAbstractConstant(getIri("b").toString()); + final PositiveLiteral atR = Expressions.makePositiveLiteral(nR, converter.termFactory.frontierVariable, consta); + final PositiveLiteral atS = Expressions.makePositiveLiteral(nS, converter.termFactory.frontierVariable, constb); + + final Rule rule = Expressions.makeRule(atS, atR); + + assertEquals(Collections.singleton(rule), converter.rules); + } + + @Test + public void testObjectPropertyAssertions() { + final OWLAxiom Rab = df.getOWLObjectPropertyAssertionAxiom(pR, inda, indb); + final OWLAxiom invSab = df.getOWLObjectPropertyAssertionAxiom(df.getOWLObjectInverseOf(pS), inda, indb); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + Rab.accept(converter); + invSab.accept(converter); + + final Term consta = Expressions.makeAbstractConstant(getIri("a").toString()); + final Term constb = Expressions.makeAbstractConstant(getIri("b").toString()); + final PositiveLiteral atR = Expressions.makePositiveLiteral(nR, Arrays.asList(consta, constb)); + final PositiveLiteral atS = Expressions.makePositiveLiteral(nS, Arrays.asList(constb, consta)); + + assertEquals(Sets.newSet(atR, atS), converter.facts); + } + + @Test + public void testClassAssertions() { + final OWLAxiom Ca = df.getOWLClassAssertionAxiom(cC, indc); + final OWLClassExpression BandhasRb = df.getOWLObjectIntersectionOf(cB, df.getOWLObjectHasValue(pR, indb)); + final OWLAxiom BandhasRba = df.getOWLClassAssertionAxiom(BandhasRb, inda); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + Ca.accept(converter); + BandhasRba.accept(converter); + + final PositiveLiteral atC = Expressions.makePositiveLiteral(nC, constc); + final PositiveLiteral atB = Expressions.makePositiveLiteral(nB, consta); + final PositiveLiteral atR = Expressions.makePositiveLiteral(nR, consta, constb); + + assertEquals(Sets.newSet(atC, atB, atR), converter.facts); + } + + @Test + public void testNegativeObjectPropertyAssertions() { + final OWLAxiom Rab = df.getOWLNegativeObjectPropertyAssertionAxiom(pR, inda, indb); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + Rab.accept(converter); + + final Term consta = Expressions.makeAbstractConstant(getIri("a").toString()); + final Term constb = Expressions.makeAbstractConstant(getIri("b").toString()); + final PositiveLiteral atR = Expressions.makePositiveLiteral(nR, Arrays.asList(consta, constb)); + final PositiveLiteral bot = OwlToRulesConversionHelper.getBottom(consta); + + final Rule rule = Expressions.makeRule(bot, atR); + + assertEquals(Collections.singleton(rule), converter.rules); + } + + @Test + public void testSubObjectPropertyOf() { + final OWLAxiom axiom = df.getOWLSubObjectPropertyOfAxiom(pR, df.getOWLObjectInverseOf(pS)); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Variable secondVariable = Expressions.makeUniversalVariable("Y1"); + final PositiveLiteral atR = Expressions.makePositiveLiteral(nR, converter.termFactory.frontierVariable, + secondVariable); + final PositiveLiteral atS = Expressions.makePositiveLiteral(nS, secondVariable, + converter.termFactory.frontierVariable); + final Rule rule = Expressions.makeRule(atS, atR); + + assertEquals(Sets.newSet(rule), converter.rules); + } + + @Test + public void testAsymmetricObjectPropertyOf() { + final OWLAxiom axiom = df.getOWLAsymmetricObjectPropertyAxiom(pR); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Variable secondVariable = Expressions.makeUniversalVariable("Y1"); + final PositiveLiteral at1 = Expressions.makePositiveLiteral(nR, converter.termFactory.frontierVariable, + secondVariable); + final PositiveLiteral at2 = Expressions.makePositiveLiteral(nR, secondVariable, + converter.termFactory.frontierVariable); + final Rule rule = Expressions + .makeRule(OwlToRulesConversionHelper.getBottom(converter.termFactory.frontierVariable), at1, at2); + + assertEquals(Sets.newSet(rule), converter.rules); + } + + @Test + public void testSymmetricObjectPropertyOf() { + final OWLAxiom axiom = df.getOWLSymmetricObjectPropertyAxiom(pR); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Variable secondVariable = Expressions.makeUniversalVariable("Y1"); + final PositiveLiteral at1 = Expressions.makePositiveLiteral(nR, converter.termFactory.frontierVariable, + secondVariable); + final PositiveLiteral at2 = Expressions.makePositiveLiteral(nR, secondVariable, + converter.termFactory.frontierVariable); + final Rule rule = Expressions.makeRule(at2, at1); + + assertEquals(Sets.newSet(rule), converter.rules); + } + + @Test + public void testIrreflexiveObjectPropertyOf() { + final OWLAxiom axiom = df.getOWLIrreflexiveObjectPropertyAxiom(pR); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final PositiveLiteral at1 = Expressions.makePositiveLiteral(nR, converter.termFactory.frontierVariable, + converter.termFactory.frontierVariable); + final Rule rule = Expressions + .makeRule(OwlToRulesConversionHelper.getBottom(converter.termFactory.frontierVariable), at1); + + assertEquals(Sets.newSet(rule), converter.rules); + } + + @Test + public void testReflexiveObjectPropertyOf() { + final OWLAxiom axiom = df.getOWLReflexiveObjectPropertyAxiom(pR); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final PositiveLiteral at1 = Expressions.makePositiveLiteral(nR, converter.termFactory.frontierVariable, + converter.termFactory.frontierVariable); + final Rule rule = Expressions.makeRule(at1, + OwlToRulesConversionHelper.getTop(converter.termFactory.frontierVariable)); + + assertEquals(Sets.newSet(rule), converter.rules); + } + + @Test + public void testInverseObjectProperties() { + final OWLAxiom axiom = df.getOWLInverseObjectPropertiesAxiom(pR, pS); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Variable secondVariable = Expressions.makeUniversalVariable("Y1"); + final PositiveLiteral atR = Expressions.makePositiveLiteral(nR, converter.termFactory.frontierVariable, + secondVariable); + final PositiveLiteral atS = Expressions.makePositiveLiteral(nS, secondVariable, + converter.termFactory.frontierVariable); + final Rule rule1 = Expressions.makeRule(atS, atR); + final Rule rule2 = Expressions.makeRule(atR, atS); + + assertEquals(Sets.newSet(rule1, rule2), converter.rules); + } + + @Test + public void testEquivalentObjectProperties() { + final OWLAxiom axiom = df.getOWLEquivalentObjectPropertiesAxiom(pR, df.getOWLObjectInverseOf(pS), pT); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Variable secondVariable = Expressions.makeUniversalVariable("Y1"); + final PositiveLiteral atR = Expressions.makePositiveLiteral(nR, + Arrays.asList(converter.termFactory.frontierVariable, secondVariable)); + final PositiveLiteral atS = Expressions.makePositiveLiteral(nS, + Arrays.asList(secondVariable, converter.termFactory.frontierVariable)); + final PositiveLiteral atT = Expressions.makePositiveLiteral(nT, + Arrays.asList(converter.termFactory.frontierVariable, secondVariable)); + final Rule ruleRS = Expressions.makeRule(atS, atR); + final Rule ruleST = Expressions.makeRule(atT, atS); + final Rule ruleTR = Expressions.makeRule(atR, atT); + final Rule ruleRT = Expressions.makeRule(atT, atR); + final Rule ruleTS = Expressions.makeRule(atS, atT); + final Rule ruleSR = Expressions.makeRule(atR, atS); + + // We have to test against two possible iteration orders, which may occur + // non-deterministically and affect the result: Rule S T or Rule T S + // (other orders lead to the same outcome) + assertTrue(converter.rules.equals(Sets.newSet(ruleRS, ruleST, ruleTR)) + || converter.rules.equals(Sets.newSet(ruleRT, ruleTS, ruleSR))); + } + + @Test + public void testSubObjectPropertyChain() { + final OWLAxiom axiom = df.getOWLSubPropertyChainOfAxiom(Arrays.asList(pR, df.getOWLObjectInverseOf(pS), pT), + pU); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Variable var1 = Expressions.makeUniversalVariable("Y1"); + final Variable var2 = Expressions.makeUniversalVariable("Y2"); + final Variable var3 = Expressions.makeUniversalVariable("Y3"); + final PositiveLiteral atR = Expressions.makePositiveLiteral(nR, + Arrays.asList(converter.termFactory.frontierVariable, var1)); + final PositiveLiteral atS = Expressions.makePositiveLiteral(nS, Arrays.asList(var2, var1)); + final PositiveLiteral atT = Expressions.makePositiveLiteral(nT, Arrays.asList(var2, var3)); + final PositiveLiteral atU = Expressions.makePositiveLiteral(nU, + Arrays.asList(converter.termFactory.frontierVariable, var3)); + final Rule rule = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atU)), + Expressions.makeConjunction(Arrays.asList(atR, atS, atT))); + + assertEquals(Sets.newSet(rule), converter.rules); + } + + public void testTransitiveProperty() { + final OWLAxiom axiom = df.getOWLTransitiveObjectPropertyAxiom(pR); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Variable var1 = Expressions.makeUniversalVariable("Y1"); + final Variable var2 = Expressions.makeUniversalVariable("Y2"); + final PositiveLiteral at1 = Expressions.makePositiveLiteral(nR, + Arrays.asList(converter.termFactory.frontierVariable, var1)); + final PositiveLiteral at2 = Expressions.makePositiveLiteral(nR, Arrays.asList(var1, var2)); + final PositiveLiteral ath = Expressions.makePositiveLiteral(nR, + Arrays.asList(converter.termFactory.frontierVariable, var2)); + final Rule rule = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(ath)), + Expressions.makeConjunction(Arrays.asList(at1, at2))); + + assertEquals(Sets.newSet(rule), converter.rules); + } + + @Test + public void testEquivalentClasses() { + final OWLAxiom axiom = df.getOWLEquivalentClassesAxiom(cA, cB, cC); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final PositiveLiteral atA = Expressions.makePositiveLiteral(nA, + Arrays.asList(converter.termFactory.frontierVariable)); + final PositiveLiteral atB = Expressions.makePositiveLiteral(nB, + Arrays.asList(converter.termFactory.frontierVariable)); + final PositiveLiteral atC = Expressions.makePositiveLiteral(nC, + Arrays.asList(converter.termFactory.frontierVariable)); + final Rule ruleAB = Expressions.makeRule(atB, atA); + final Rule ruleBC = Expressions.makeRule(atC, atB); + final Rule ruleCA = Expressions.makeRule(atA, atC); + final Rule ruleAC = Expressions.makeRule(atC, atA); + final Rule ruleCB = Expressions.makeRule(atB, atC); + final Rule ruleBA = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atA)), + Expressions.makeConjunction(Arrays.asList(atB))); + + // We have to test against two possible iteration orders, which may occur + // non-deterministically and affect the result: A B C or A C B + // (other orders lead to the same outcome) + assertTrue(converter.rules.equals(Sets.newSet(ruleAB, ruleBC, ruleCA)) + || converter.rules.equals(Sets.newSet(ruleAC, ruleCB, ruleBA))); + } + + @Test + public void testObjectPropertyDomain() { + final OWLAxiom axiom = df.getOWLObjectPropertyDomainAxiom(pR, cA); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Variable secondVariable = Expressions.makeUniversalVariable("Y1"); + final PositiveLiteral atR = Expressions.makePositiveLiteral(nR, converter.termFactory.frontierVariable, + secondVariable); + final PositiveLiteral atA = Expressions.makePositiveLiteral(nA, converter.termFactory.frontierVariable); + + final Rule rule = Expressions.makeRule(atA, atR); + + assertEquals(Collections.singleton(rule), converter.rules); + } + + @Test + public void testObjectPropertyRange() { + final OWLAxiom axiom = df.getOWLObjectPropertyRangeAxiom(pR, cA); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Variable secondVariable = Expressions.makeUniversalVariable("Y1"); + final PositiveLiteral atR = Expressions.makePositiveLiteral(nR, converter.termFactory.frontierVariable, + secondVariable); + final PositiveLiteral atA = Expressions.makePositiveLiteral(nA, secondVariable); + + final Rule rule = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atA)), + Expressions.makeConjunction(Arrays.asList(atR))); + + assertEquals(Collections.singleton(rule), converter.rules); + } + + /* + * A \sqsubseteq <1 .R + */ + @Test(expected = OwlFeatureNotSupportedException.class) + public void testSubClassOfMaxCardinality() { + + final OWLClassExpression maxCard = df.getOWLObjectMaxCardinality(1, pR); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA, maxCard); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + } + + @Test + public void testDisjointClassesTwoSimpleClasses() { + final OWLDisjointClassesAxiom axiom = df.getOWLDisjointClassesAxiom(cA, cB); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + assertEquals(1, converter.rules.size()); + final Rule rule = converter.rules.iterator().next(); + + final PositiveLiteral expectedHeadLiteral = OwlToRulesConversionHelper + .getBottom(converter.termFactory.frontierVariable); + final Literal expectedBodyLiteral1 = Expressions.makePositiveLiteral( + OwlToRulesConversionHelper.getClassPredicate(cA), converter.termFactory.frontierVariable); + final Literal expectedBodyLiteral2 = Expressions.makePositiveLiteral( + OwlToRulesConversionHelper.getClassPredicate(cB), converter.termFactory.frontierVariable); + final Rule expected = Expressions.makeRule(expectedHeadLiteral, expectedBodyLiteral1, expectedBodyLiteral2); + assertEquals(expected, rule); + } + + @Test + public void testDisjointClassesThreeSimpleClasses() { + final OWLDisjointClassesAxiom axiom = df.getOWLDisjointClassesAxiom(cA, cB, cC); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + assertEquals(4, converter.rules.size()); + + final PositiveLiteral botX = OwlToRulesConversionHelper.getBottom(converter.termFactory.frontierVariable); + final Literal cAx = Expressions.makePositiveLiteral(OwlToRulesConversionHelper.getClassPredicate(cA), + converter.termFactory.frontierVariable); + final Literal cBx = Expressions.makePositiveLiteral(OwlToRulesConversionHelper.getClassPredicate(cB), + converter.termFactory.frontierVariable); + final Rule aAndBDisjoint = Expressions.makeRule(botX, cAx, cBx); + + final PositiveLiteral auxX = Expressions.makePositiveLiteral( + OwlToRulesConversionHelper.getAuxiliaryClassNameDisjuncts(Arrays.asList(cA, cB)), + converter.termFactory.frontierVariable); + + final Rule aux1 = Expressions.makeRule(auxX, cAx); + final Rule aux2 = Expressions.makeRule(auxX, cBx); + + final Literal cCx = Expressions.makePositiveLiteral(OwlToRulesConversionHelper.getClassPredicate(cC), + converter.termFactory.frontierVariable); + final Rule cAndAuxDisjoint = Expressions.makeRule(botX, auxX, cCx); + + final Set expectedRules = new HashSet<>(Arrays.asList(aAndBDisjoint, aux1, aux2, cAndAuxDisjoint)); + + assertEquals(expectedRules, converter.rules); + } + + @Test + public void testDisjointClassesSimpleClassAndTop() { + final OWLDisjointClassesAxiom axiom = df.getOWLDisjointClassesAxiom(cA, df.getOWLThing()); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + assertEquals(1, converter.rules.size()); + + final PositiveLiteral expectedHeadLiteral = OwlToRulesConversionHelper + .getBottom(converter.termFactory.frontierVariable); + final Literal expectedBodyLiteral = Expressions.makePositiveLiteral( + OwlToRulesConversionHelper.getClassPredicate(cA), converter.termFactory.frontierVariable); + final Rule expected = Expressions.makeRule(expectedHeadLiteral, expectedBodyLiteral); + assertEquals(new HashSet<>(Arrays.asList(expected)), converter.rules); + } + + @Test + public void testDisjointClassesSimpleClassAndBot() { + final OWLDisjointClassesAxiom axiom = df.getOWLDisjointClassesAxiom(cA, df.getOWLNothing()); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + assertTrue(converter.rules.isEmpty()); + } + + @Test + public void testDisjointClasseSimpleClassAndSomeValuesFrom() { + final OWLObjectSomeValuesFrom someValuesFrom = df.getOWLObjectSomeValuesFrom(pR, cB); + final OWLDisjointClassesAxiom axiom = df.getOWLDisjointClassesAxiom(cA, someValuesFrom); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + assertEquals(1, converter.rules.size()); + final Rule rule = converter.rules.iterator().next(); + + assertEquals(Expressions.makeConjunction( + OwlToRulesConversionHelper.getBottom(converter.termFactory.frontierVariable)), rule.getHead()); + + assertEquals(3, rule.getBody().getLiterals().size()); + final Literal cAx = Expressions.makePositiveLiteral(OwlToRulesConversionHelper.getClassPredicate(cA), + converter.termFactory.frontierVariable); + assertTrue(rule.getBody().getLiterals().contains(cAx)); + } + + @Test(expected = OwlFeatureNotSupportedException.class) + // TODO support this feature + public void testDisjointClasseSimpleClassAndOf() { + final OWLObjectOneOf oneOfb = df.getOWLObjectOneOf(indb); + final OWLDisjointClassesAxiom axiom = df.getOWLDisjointClassesAxiom(cA, oneOfb); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + } + + @Test + public void testDisjointObjectProperties_twoSimpleProperties() { + final OWLDisjointObjectPropertiesAxiom axiom = df.getOWLDisjointObjectPropertiesAxiom(pS, pT); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + assertEquals(1, converter.rules.size()); + converter.rules.forEach(System.out::println); + + final Term expectedSource = new UniversalVariableImpl("X"); + final Term expectedTarget = new UniversalVariableImpl("Y1"); + + final PositiveLiteral botX = OwlToRulesConversionHelper.getBottom(expectedSource); + final Literal expectedBodyLiteral1 = Expressions.makePositiveLiteral( + OwlToRulesConversionHelper.getObjectPropertyPredicate(pS), expectedSource, expectedTarget); + final Literal expectedBodyLiteral2 = Expressions.makePositiveLiteral( + OwlToRulesConversionHelper.getObjectPropertyPredicate(pT), expectedSource, expectedTarget); + final Rule expected = Expressions.makeRule(botX, expectedBodyLiteral1, expectedBodyLiteral2); + assertEquals(new HashSet<>(Arrays.asList(expected)), converter.rules); + } + + @Test(expected = OwlFeatureNotSupportedException.class) + public void testDisjointObjectProperties_empty() { + final OWLDisjointObjectPropertiesAxiom axiom = df.getOWLDisjointObjectPropertiesAxiom(); + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + } + + @Test(expected = OwlFeatureNotSupportedException.class) + public void testDisjointObjectProperties_oneProperty() { + final OWLDisjointObjectPropertiesAxiom axiom = df.getOWLDisjointObjectPropertiesAxiom(pS); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + } + + @Test + public void testDisjointObjectProperties_threeSimpleProperties() { + final OWLDisjointObjectPropertiesAxiom axiom = df.getOWLDisjointObjectPropertiesAxiom(pS, pT, pU); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + final Term expectedSource = new UniversalVariableImpl("X"); + final Term expectedTarget = new UniversalVariableImpl("Y1"); + + final PositiveLiteral botX = OwlToRulesConversionHelper.getBottom(expectedSource); + final Literal literalSxy = Expressions.makePositiveLiteral( + OwlToRulesConversionHelper.getObjectPropertyPredicate(pS), expectedSource, expectedTarget); + final Literal literalTxy = Expressions.makePositiveLiteral( + OwlToRulesConversionHelper.getObjectPropertyPredicate(pT), expectedSource, expectedTarget); + final Rule sAndTDisjoint = Expressions.makeRule(botX, literalSxy, literalTxy); + + final PositiveLiteral auxXY = Expressions.makePositiveLiteral( + OwlToRulesConversionHelper.getAuxiliaryPropertyNameDisjuncts(Arrays.asList(pS, pT)), expectedSource, + expectedTarget); + + final Rule aux1 = Expressions.makeRule(auxXY, literalSxy); + final Rule aux2 = Expressions.makeRule(auxXY, literalTxy); + + final Literal literalUxy = Expressions.makePositiveLiteral( + OwlToRulesConversionHelper.getObjectPropertyPredicate(pU), expectedSource, expectedTarget); + final Rule uAndAuxDisjoint = Expressions.makeRule(botX, literalUxy, auxXY); + + assertEquals(new HashSet<>(Arrays.asList(sAndTDisjoint, aux1, aux2, uAndAuxDisjoint)), converter.rules); + } + + @Test + public void testDisjointObjectProperties_fourSimpleProperties() { + final OWLDisjointObjectPropertiesAxiom axiom = df.getOWLDisjointObjectPropertiesAxiom(pR, pS, pT, pU); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + final Term expectedSource = new UniversalVariableImpl("X"); + final Term expectedTarget = new UniversalVariableImpl("Y1"); + + final PositiveLiteral botX = OwlToRulesConversionHelper.getBottom(expectedSource); + final Literal literalRxy = Expressions.makePositiveLiteral( + OwlToRulesConversionHelper.getObjectPropertyPredicate(pR), expectedSource, expectedTarget); + final Literal literalSxy = Expressions.makePositiveLiteral( + OwlToRulesConversionHelper.getObjectPropertyPredicate(pS), expectedSource, expectedTarget); + final Rule rAndSDisjoint = Expressions.makeRule(botX, literalRxy, literalSxy); + + final PositiveLiteral auxRSxy = Expressions.makePositiveLiteral( + OwlToRulesConversionHelper.getAuxiliaryPropertyNameDisjuncts(Arrays.asList(pR, pS)), expectedSource, + expectedTarget); + + final Rule auxR = Expressions.makeRule(auxRSxy, literalRxy); + final Rule auxS = Expressions.makeRule(auxRSxy, literalSxy); + + final Literal literalTxy = Expressions.makePositiveLiteral( + OwlToRulesConversionHelper.getObjectPropertyPredicate(pT), expectedSource, expectedTarget); + final Literal literalUxy = Expressions.makePositiveLiteral( + OwlToRulesConversionHelper.getObjectPropertyPredicate(pU), expectedSource, expectedTarget); + final Rule tAndUDisjoint = Expressions.makeRule(botX, literalTxy, literalUxy); + + final PositiveLiteral auxTUxy = Expressions.makePositiveLiteral( + OwlToRulesConversionHelper.getAuxiliaryPropertyNameDisjuncts(Arrays.asList(pT, pU)), expectedSource, + expectedTarget); + + final Rule auxT = Expressions.makeRule(auxTUxy, literalTxy); + final Rule auxU = Expressions.makeRule(auxTUxy, literalUxy); + + final Rule auxRSAndAuxTUDisjoint = Expressions.makeRule(botX, auxRSxy, auxTUxy); + + assertEquals( + new HashSet<>( + Arrays.asList(rAndSDisjoint, auxR, auxS, tAndUDisjoint, auxT, auxU, auxRSAndAuxTUDisjoint)), + converter.rules); + } + + @Test + public void testDisjointObjectProperties_topAndSimpleProperties() { + final OWLDisjointObjectPropertiesAxiom axiom = df + .getOWLDisjointObjectPropertiesAxiom(df.getOWLTopObjectProperty(), pS, pT); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Term expectedSource = new UniversalVariableImpl("X"); + final Term expectedTarget = new UniversalVariableImpl("Y1"); + + final PositiveLiteral botX = OwlToRulesConversionHelper.getBottom(expectedSource); + + final Literal sXY = Expressions.makePositiveLiteral(OwlToRulesConversionHelper.getObjectPropertyPredicate(pS), + expectedSource, expectedTarget); + + final Rule sUnsatisfiable = Expressions.makeRule(botX, sXY); + + final Literal tXY = Expressions.makePositiveLiteral(OwlToRulesConversionHelper.getObjectPropertyPredicate(pT), + expectedSource, expectedTarget); + final Rule tUnsatisfiable = Expressions.makeRule(botX, tXY); + assertEquals(new HashSet<>(Arrays.asList(sUnsatisfiable, tUnsatisfiable)), converter.rules); + } + + @Test + public void testDisjointObjectProperties_inverseAndSimpleProperties() { + final OWLDisjointObjectPropertiesAxiom axiom = df.getOWLDisjointObjectPropertiesAxiom(pS.getInverseProperty(), + pS); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Term expectedSource = new UniversalVariableImpl("X"); + final Term expectedTarget = new UniversalVariableImpl("Y1"); + final PositiveLiteral botX = OwlToRulesConversionHelper.getBottom(expectedSource); + + final Literal sXY = Expressions.makePositiveLiteral(OwlToRulesConversionHelper.getObjectPropertyPredicate(pS), + expectedSource, expectedTarget); + + final Literal sYX = Expressions.makePositiveLiteral(OwlToRulesConversionHelper.getObjectPropertyPredicate(pS), + expectedTarget, expectedSource); + + final Rule expected = Expressions.makeRule(botX, sXY, sYX); + assertEquals(new HashSet<>(Arrays.asList(expected)), converter.rules); + } + + /* + * {a} \sqsubseteq A + */ + @Test + public void testNominalSubClassOfClass() { + final OWLObjectOneOf oneOfa = df.getOWLObjectOneOf(inda); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(oneOfa, cA); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Fact expectedFact = Expressions.makeFact(nA, consta); + assertEquals(Collections.singleton(expectedFact), converter.facts); + assertTrue(converter.rules.isEmpty()); + } + + /* + * {a,b} \sqsubseteq A + */ + @Test + public void testNominalsSubClassOfClass() { + final OWLObjectOneOf oneOfab = df.getOWLObjectOneOf(inda, indb); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(oneOfab, cA); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Fact expectedFact1 = Expressions.makeFact(nA, consta); + final Fact expectedFact2 = Expressions.makeFact(nA, constb); + + assertEquals(Sets.newSet(expectedFact1, expectedFact2), converter.facts); + assertTrue(converter.rules.isEmpty()); + } + + /* + * ({a,b} \sqcap B) \sqsubseteq A + */ + @Test(expected = OwlFeatureNotSupportedException.class) + // TODO support this feature + public void testNominalsInConjunctionLeftSubClassOfClass() { + final OWLObjectOneOf oneOfab = df.getOWLObjectOneOf(inda, indb); + final OWLObjectIntersectionOf conjunction = df.getOWLObjectIntersectionOf(oneOfab, cB); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(conjunction, cA); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + } + + /* + * (B \sqcap {a,b}) \sqsubseteq A + */ + @Test(expected = OwlFeatureNotSupportedException.class) + // TODO support this feature + public void testNominalsInConjunctionRightSubClassOfClass() { + final OWLObjectOneOf oneOfab = df.getOWLObjectOneOf(inda, indb); + final OWLObjectIntersectionOf conjunction = df.getOWLObjectIntersectionOf(cB, oneOfab); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(conjunction, cA); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + } + + /* + * A \sqsubseteq (B \sqcap {a,b}) + */ + @Test(expected = OwlFeatureNotSupportedException.class) + public void testClassSubClassOfNominalsInConjunctionRight() { + final OWLObjectOneOf oneOfab = df.getOWLObjectOneOf(inda, indb); + final OWLObjectIntersectionOf conjunction = df.getOWLObjectIntersectionOf(cB, oneOfab); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA, conjunction); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + } + + /* + * A \sqsubseteq {a} + */ + @Test(expected = OwlFeatureNotSupportedException.class) + public void testNominalSuperClassOfClass() { + final OWLObjectOneOf oneOfa = df.getOWLObjectOneOf(inda); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA, oneOfa); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + } + + /* + * A \sqsubseteq {a,b} + */ + @Test(expected = OwlFeatureNotSupportedException.class) + public void testNominalsSuperClassOfClass() { + final OWLObjectOneOf oneOfab = df.getOWLObjectOneOf(inda, indb); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA, oneOfab); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + } + + /* + * A \sqsubseteq >=1 .R + */ + @Test + public void testSubClassOfMinCardinality_one() { + + final OWLClassExpression minCard = df.getOWLObjectMinCardinality(1, pR); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA, minCard); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Variable secondVariable = Expressions.makeExistentialVariable("Y1"); + final PositiveLiteral expectedHead = Expressions.makePositiveLiteral(nR, converter.termFactory.frontierVariable, + secondVariable); + + final PositiveLiteral expectedBody = Expressions.makePositiveLiteral(nA, + converter.termFactory.frontierVariable); + final Rule expectedRule = Expressions.makeRule(expectedHead, expectedBody); + + assertEquals(Collections.singleton(expectedRule), converter.rules); + } + + /* + * A \sqsubseteq >=1 .R.B + */ + @Test + public void testSubClassOfMinCardinality_one_filler() { + + final OWLClassExpression minCard = df.getOWLObjectMinCardinality(1, pR, cB); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA, minCard); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Variable secondVariable = Expressions.makeExistentialVariable("Y1"); + final PositiveLiteral expectedHead1 = Expressions.makePositiveLiteral(nR, + converter.termFactory.frontierVariable, secondVariable); + final PositiveLiteral expectedHead2 = Expressions.makePositiveLiteral(nB, secondVariable); + + final PositiveLiteral expectedBody = Expressions.makePositiveLiteral(nA, + converter.termFactory.frontierVariable); + final Rule expectedRule = Expressions.makeRule( + Expressions.makePositiveConjunction(expectedHead1, expectedHead2), + Expressions.makeConjunction(expectedBody)); + + assertEquals(Collections.singleton(expectedRule), converter.rules); + } + + /* + * >=1 .R \sqsubseteq A + */ + @Test + public void testMinCardinality_one_SubClassOf() { + + final OWLClassExpression minCard = df.getOWLObjectMinCardinality(1, pR); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(minCard, cA); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Variable secondVariable = Expressions.makeUniversalVariable("Y1"); + final PositiveLiteral expectedBody = Expressions.makePositiveLiteral(nR, converter.termFactory.frontierVariable, + secondVariable); + + final PositiveLiteral expectedHead = Expressions.makePositiveLiteral(nA, + converter.termFactory.frontierVariable); + final Rule expectedRule = Expressions.makeRule(expectedHead, expectedBody); + + assertEquals(Collections.singleton(expectedRule), converter.rules); + } + + /* + * >=1 .R.B \sqsubseteq A + */ + @Test + public void testMinCardinality_one_filler_SubClassOf() { + + final OWLClassExpression minCard = df.getOWLObjectMinCardinality(1, pR, cB); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(minCard, cA); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + final Variable secondVariable = Expressions.makeUniversalVariable("Y1"); + final PositiveLiteral expectedBody1 = Expressions.makePositiveLiteral(nR, + converter.termFactory.frontierVariable, secondVariable); + final PositiveLiteral expectedBody2 = Expressions.makePositiveLiteral(nB, secondVariable); + + final PositiveLiteral expectedHead = Expressions.makePositiveLiteral(nA, converter.termFactory.frontierVariable); + final Rule expectedRule = Expressions.makeRule(expectedHead, expectedBody1, expectedBody2); + + assertEquals(Collections.singleton(expectedRule), converter.rules); + } + + @Ignore + public void test() { + final OWLObjectPropertyExpression Sinv = df.getOWLObjectInverseOf(pS); + final OWLObjectSomeValuesFrom SomeSinvE = df.getOWLObjectSomeValuesFrom(Sinv, cE); + final OWLObjectSomeValuesFrom SomeRSomeSinvE = df.getOWLObjectSomeValuesFrom(pR, SomeSinvE); + final OWLObjectUnionOf AorB = df.getOWLObjectUnionOf(cA, cB); + final OWLObjectIntersectionOf AorBandCandSomeRSomeSinvE = df.getOWLObjectIntersectionOf(AorB, cC, + SomeRSomeSinvE); + final OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(AorBandCandSomeRSomeSinvE, cD); + + final OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); + axiom.accept(converter); + + for (final Rule rule : converter.rules) { + System.out.println(rule); + } + } + +} diff --git a/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverterTest.java b/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverterTest.java new file mode 100644 index 000000000..7b6fd9533 --- /dev/null +++ b/rulewerk-owlapi/src/test/java/org/semanticweb/rulewerk/owlapi/OwlToRulesConverterTest.java @@ -0,0 +1,68 @@ +package org.semanticweb.rulewerk.owlapi; + +/*- + * #%L + * Rulewerk OWL API Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.util.Arrays; + +import org.junit.Test; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.model.IRI; +import org.semanticweb.owlapi.model.OWLAnonymousIndividual; +import org.semanticweb.owlapi.model.OWLAxiom; +import org.semanticweb.owlapi.model.OWLClass; +import org.semanticweb.owlapi.model.OWLDataFactory; +import org.semanticweb.owlapi.model.OWLIndividual; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyCreationException; + +public class OwlToRulesConverterTest { + + static OWLDataFactory df = OWLManager.getOWLDataFactory(); + + public static IRI getIri(final String localName) { + return IRI.create("http://example.org/" + localName); + } + + public static OWLClass getOwlClass(final String localName) { + return df.getOWLClass(getIri(localName)); + } + + static final OWLClass cC = getOwlClass("C"); + static final OWLIndividual inda = df.getOWLNamedIndividual(getIri("a")); + + @Test + public void testLoadOntologies() throws OWLOntologyCreationException { + final OWLAnonymousIndividual bnode = df.getOWLAnonymousIndividual("abc"); + final OWLAxiom Cn = df.getOWLClassAssertionAxiom(cC, bnode); + final OWLAxiom Ca = df.getOWLClassAssertionAxiom(cC, inda); + + final OWLOntology ontology = OWLManager.createOWLOntologyManager().createOntology(Arrays.asList(Cn,Ca)); + + final OwlToRulesConverter converter = new OwlToRulesConverter(); + converter.addOntology(ontology); + converter.addOntology(ontology); + + assertEquals(3, converter.getFacts().size()); + } + +} diff --git a/rulewerk-parser/LICENSE.txt b/rulewerk-parser/LICENSE.txt new file mode 100644 index 000000000..261eeb9e9 --- /dev/null +++ b/rulewerk-parser/LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/rulewerk-parser/pom.xml b/rulewerk-parser/pom.xml new file mode 100644 index 000000000..6f8e08e0a --- /dev/null +++ b/rulewerk-parser/pom.xml @@ -0,0 +1,119 @@ + + + 4.0.0 + + + org.semanticweb.rulewerk + rulewerk-parent + 0.10.0-SNAPSHOT + + + rulewerk-parser + + Rulewerk Parser + + UTF-8 + + + + ${project.groupId} + rulewerk-core + ${project.version} + + + + + + + org.codehaus.mojo + javacc-maven-plugin + 2.6 + + + ruleparser + + ${basedir}/src/main/java/org/semanticweb/rulewerk/parser/javacc/ + + + javacc + + + + + + + org.codehaus.mojo + build-helper-maven-plugin + 1.8 + + + generate-sources + + + ${project.build.directory}/generated-sources/javacc/ + + + + add-source + + + + + + + + + + org.eclipse.m2e + lifecycle-mapping + 1.0.0 + + + + + + org.codehaus.mojo + javacc-maven-plugin + [2.6,) + + javacc + + + + + + + + + org.codehaus.mojo + build-helper-maven-plugin + [1.0,) + + parse-version + add-source + maven-version + add-resource + add-test-resource + add-test-source + + + + + true + true + + + + + + + + + + + diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java new file mode 100644 index 000000000..ff356ba3f --- /dev/null +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ConfigurableLiteralHandler.java @@ -0,0 +1,48 @@ +package org.semanticweb.rulewerk.parser; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; + +/** + * Handler for parsing a configurable literal expression. Note that these are + * not Literals in the logical sense (i.e., Atoms with a specific polarity), but + * rather expressions in the sense of + * RDF literals; + * essentially adding further quoted literals with custom delimiters. + * + * @author Maximilian Marx + */ +@FunctionalInterface +public interface ConfigurableLiteralHandler { + /** + * Parse a Data Source Declaration. + * + * @param syntacticForm syntactic form of the literal expression. + * @param subParserFactory a factory for obtaining a SubParser, sharing the + * parser's state, but bound to new input. + * + * @throws ParsingException when the given syntactic form is invalid. + * @return an appropriate @{link Term} instance. + */ + public Term parseLiteral(String syntacticForm, final SubParserFactory subParserFactory) throws ParsingException; +} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DatatypeConstantHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DatatypeConstantHandler.java new file mode 100644 index 000000000..a64eac992 --- /dev/null +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DatatypeConstantHandler.java @@ -0,0 +1,43 @@ +package org.semanticweb.rulewerk.parser; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; + +/** + * Handler for parsing a custom Datatype constant. + * + * @author Maximilian Marx + */ +@FunctionalInterface +public interface DatatypeConstantHandler { + /** + * Parse a datatype constant. + * + * @param lexicalForm lexical representation of the constant. + * + * @throws ParsingException when the given representation is invalid for this + * datatype. + * + * @return a {@link DatatypeConstant} corresponding to the lexical form. + */ + public DatatypeConstant createConstant(String lexicalForm) throws ParsingException; +} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java new file mode 100644 index 000000000..80050e49f --- /dev/null +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DefaultParserConfiguration.java @@ -0,0 +1,62 @@ +package org.semanticweb.rulewerk.parser; + +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.TridentDataSource; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.parser.datasources.CsvFileDataSourceDeclarationHandler; +import org.semanticweb.rulewerk.parser.datasources.RdfFileDataSourceDeclarationHandler; +import org.semanticweb.rulewerk.parser.datasources.SparqlQueryResultDataSourceDeclarationHandler; +import org.semanticweb.rulewerk.parser.datasources.TridentDataSourceDeclarationHandler; +import org.semanticweb.rulewerk.parser.directives.ImportFileDirectiveHandler; +import org.semanticweb.rulewerk.parser.directives.ImportFileRelativeDirectiveHandler; + +/** + * Default parser configuration. Registers default data sources. + * + * @author Maximilian Marx + */ +public class DefaultParserConfiguration extends ParserConfiguration { + public DefaultParserConfiguration() { + super(); + registerDefaultDataSources(); + registerDefaultDirectives(); + } + + /** + * Register built-in data sources (currently CSV, RDF, SPARQL). + */ + private void registerDefaultDataSources() { + registerDataSource(CsvFileDataSource.declarationPredicateName, new CsvFileDataSourceDeclarationHandler()); + registerDataSource(RdfFileDataSource.declarationPredicateName, new RdfFileDataSourceDeclarationHandler()); + registerDataSource(SparqlQueryResultDataSource.declarationPredicateName, + new SparqlQueryResultDataSourceDeclarationHandler()); + registerDataSource(TridentDataSource.declarationPredicateName, new TridentDataSourceDeclarationHandler()); + } + + private void registerDefaultDirectives() { + registerDirective("import", new ImportFileDirectiveHandler()); + registerDirective("import-relative", new ImportFileRelativeDirectiveHandler()); + } +} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java new file mode 100644 index 000000000..15b5914e9 --- /dev/null +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/DirectiveHandler.java @@ -0,0 +1,180 @@ +package org.semanticweb.rulewerk.parser; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.nio.file.InvalidPathException; +import java.util.List; + +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Terms; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.parser.javacc.JavaCCParser; +import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; + +/** + * Handler for parsing a custom directive. + * + * @author Maximilian Marx + */ +@FunctionalInterface +public interface DirectiveHandler { + /** + * Parse a Directive. + * + * This is called by the parser to parse directives. + * + * @param arguments Arguments given to the Directive statement. + * @param subParserFactory a factory for obtaining a SubParser, sharing the + * parser's state, but bound to new input. + * + * @throws ParsingException when any of the arguments is invalid for the + * directive, or the number of arguments is invalid. + * @return a {@code T} instance corresponding to the given arguments. + */ + public T handleDirective(List arguments, final SubParserFactory subParserFactory) throws ParsingException; + + /** + * Validate the provided number of arguments to the directive statement. + * + * @param arguments Arguments given to the Directive statement. + * @param number expected number of arguments + * + * @throws ParsingException when the given number of Arguments is invalid for + * the Directive statement. + */ + public static void validateNumberOfArguments(final List arguments, final int number) + throws ParsingException { + if (arguments.size() != number) { + throw new ParsingException( + "Invalid number of arguments " + arguments.size() + " for Directive statement, expected " + number); + } + } + + /** + * Validate that the provided argument is a {@link String}. + * + * @param argument the argument to validate + * @param description a description of the argument, used in constructing the + * error message. + * + * @throws ParsingException when the given argument is not a {@link String}. + * + * @return the contained {@link String}. + */ + public static String validateStringArgument(final Argument argument, final String description) + throws ParsingException { + try { + return Terms.extractString(argument.fromTerm().orElseThrow( + () -> new ParsingException("Expected string for " + description + ", but did not find a term."))); + } catch (IllegalArgumentException e) { + throw new ParsingException("Failed to convert term given for " + description + " to string."); + } + } + + /** + * Validate that the provided argument is a file path. + * + * @param argument the argument to validate + * @param description a description of the argument, used in constructing the + * error message + * @param importBasePath the path that relative file names are resolved against + * + * @throws ParsingException when the given argument is not a valid file path + * + * @return the File corresponding to the contained file path + */ + public static File validateFilenameArgument(final Argument argument, final String description, final String importBasePath) + throws ParsingException { + String fileName = DirectiveHandler.validateStringArgument(argument, description); + File file = new File(fileName); + + if (!file.isAbsolute() || importBasePath.isEmpty()) { + file = new File(importBasePath + File.separator + fileName); + } + + try { + // we don't care about the actual path, just that there is one. + file.toPath(); + } catch (InvalidPathException e) { + throw new ParsingException(description + "\"" + fileName + "\" is not a valid file path.", e); + } + + return file; + } + + /** + * Validate that the provided argument is a {@link Term}. + * + * @param argument the argument to validate + * @param description a description of the argument, used in constructing the + * error message. + * + * @throws ParsingException when the given argument is not a {@link Term}. + * + * @return the contained {@link Term}. + */ + public static Term validateTermArgument(final Argument argument, final String description) throws ParsingException { + return argument.fromTerm() + .orElseThrow(() -> new ParsingException(description + "\"" + argument + "\" is not a Term.")); + } + + /** + * Obtain a {@link KnowledgeBase} from a {@link SubParserFactory}. + * + * @param subParserFactory the SubParserFactory. + * + * @return the knowledge base. + */ + default KnowledgeBase getKnowledgeBase(SubParserFactory subParserFactory) { + JavaCCParser subParser = subParserFactory.makeSubParser(""); + + return subParser.getKnowledgeBase(); + } + + /** + * Obtain a {@link ParserConfiguration} from a {@link SubParserFactory}. + * + * @param subParserFactory the SubParserFactory. + * + * @return the parser configuration. + */ + default ParserConfiguration getParserConfiguration(SubParserFactory subParserFactory) { + JavaCCParser subParser = subParserFactory.makeSubParser(""); + + return subParser.getParserConfiguration(); + } + + /** + * Obtain {@link PrefixDeclarationRegistry} from a {@link SubParserFactory}. + * + * @param subParserFactory the SubParserFactory. + * + * @return the prefix declarations. + */ + default PrefixDeclarationRegistry getPrefixDeclarationRegistry(SubParserFactory subParserFactory) { + JavaCCParser subParser = subParserFactory.makeSubParser(""); + + return subParser.getPrefixDeclarationRegistry(); + } +} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java new file mode 100644 index 000000000..faa15a7db --- /dev/null +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/LocalPrefixDeclarationRegistry.java @@ -0,0 +1,105 @@ +package org.semanticweb.rulewerk.parser; + +import org.apache.commons.lang3.Validate; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.implementation.AbstractPrefixDeclarationRegistry; + +/** + * Implementation of {@link PrefixDeclarationRegistry} that is used when parsing + * data from a single source. In this case, attempts to re-declare prefixes or + * the base IRI will lead to errors. + * + * @author Markus Kroetzsch + * + */ +final public class LocalPrefixDeclarationRegistry extends AbstractPrefixDeclarationRegistry { + + /** + * Fallback IRI to use as base IRI if none is set. + */ + private String fallbackIri; + + /** + * Construct a Prefix declaration registry without an inherited base IRI. In + * this case, we default to {@value org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry#EMPTY_BASE}. + */ + public LocalPrefixDeclarationRegistry() { + this(PrefixDeclarationRegistry.EMPTY_BASE); // empty string encodes: "no base" (use relative IRIs) + } + + /** + * Construct a Prefix declaration registry with a base IRI inherited from the + * importing file. + * + * @param fallbackIri the IRI to use as a base if none is set by the imported + * file itself (i.e., if {@link #setBaseIri} is not called). + */ + public LocalPrefixDeclarationRegistry(String fallbackIri) { + super(); + Validate.notNull(fallbackIri, "fallbackIri must not be null"); + this.fallbackIri = fallbackIri; + } + + /** + * Returns the relevant base namespace. Returns the fallback IRI if no base + * namespace has been set yet, and sets that as the base IRI. + * + * @return string of an absolute base IRI + */ + @Override + public String getBaseIri() { + if (this.baseIri == null) { + this.baseIri = this.fallbackIri; + } + return baseIri; + } + + @Override + public void setPrefixIri(String prefixName, String prefixIri) throws PrefixDeclarationException { + if (prefixes.containsKey(prefixName)) { + throw new PrefixDeclarationException("Prefix \"" + prefixName + "\" is already defined as <" + + prefixes.get(prefixName) + ">. It cannot be redefined to mean <" + prefixIri + ">."); + } + + prefixes.put(prefixName, prefixIri); + } + + /** + * Sets the base namespace to the given value. This should only be done once, + * and not after the base namespace was assumed to be an implicit default value. + * + * @param baseIri the new base namespace + * @throws PrefixDeclarationException if base was already defined + */ + + @Override + public void setBaseIri(String baseIri) throws PrefixDeclarationException { + Validate.notNull(baseIri, "baseIri must not be null"); + if (this.baseIri != null) + throw new PrefixDeclarationException( + "Base is already defined as <" + this.baseIri + "> and cannot be re-defined as " + baseIri); + this.baseIri = baseIri; + } +} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java new file mode 100644 index 000000000..d094124f1 --- /dev/null +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParserConfiguration.java @@ -0,0 +1,360 @@ +package org.semanticweb.rulewerk.parser; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; + +import org.apache.commons.lang3.Validate; +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.TermFactory; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter; +import org.semanticweb.rulewerk.parser.datasources.DataSourceDeclarationHandler; +import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; + +/** + * Class to keep parser configuration. + * + * @author Maximilian Marx + */ +public class ParserConfiguration { + /** + * Reserved directive names that are not allowed to be registered. + */ + public static final List RESERVED_DIRECTIVE_NAMES = Arrays.asList("base", "prefix", "source"); + + /** + * Whether parsing Named Nulls is allowed. + */ + private boolean allowNamedNulls = true; + + /** + * The registered data sources. + */ + private HashMap dataSources = new HashMap<>(); + + /** + * The registered datatypes. + */ + private HashMap datatypes = new HashMap<>(); + + /** + * The registered configurable literals. + */ + private HashMap literals = new HashMap<>(); + + /** + * The registered custom directives. + */ + private HashMap> directives = new HashMap<>(); + + /** + * The current base path to resolve imports against. Defaults to the current + * working directory. + */ + private String importBasePath = System.getProperty("user.dir"); + + public ParserConfiguration() { + } + + /** + * Copy constructor. + * + * @param other {@link ParserConfiguration} to copy + */ + public ParserConfiguration(ParserConfiguration other) { + this.allowNamedNulls = other.allowNamedNulls; + this.dataSources = new HashMap<>(other.dataSources); + this.literals = new HashMap<>(other.literals); + this.directives = new HashMap<>(other.directives); + this.importBasePath = new String(other.importBasePath); + } + + /** + * Register a new (type of) Data Source. + * + * This registers a handler for some custom value of the {@code DATASOURCE} + * production of the rules grammar, corresponding to some {@link DataSource} + * type. + * + * @see + * the grammar + * + * @param name Name of the data source, as it appears in the declaring + * directive. + * @param handler Handler for parsing a data source declaration. + * + * @throws IllegalArgumentException if the provided name is already registered. + * @return this + */ + public ParserConfiguration registerDataSource(final String name, final DataSourceDeclarationHandler handler) + throws IllegalArgumentException { + Validate.isTrue(!this.dataSources.containsKey(name), "The Data Source \"%s\" is already registered.", name); + + this.dataSources.put(name, handler); + return this; + } + + /** + * Parse the source-specific part of a data source declaration. + * + * This is called by the parser to construct a {@link DataSourceDeclaration}. It + * is responsible for instantiating an appropriate {@link DataSource} type. + * + * @param declaration literal that specifies the type and parameters for this + * data source declarations + * + * @throws ParsingException when the declaration is invalid, e.g., if the data + * source is not known. + * + * @return the data source instance. + */ + public DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(PositiveLiteral declaration) + throws ParsingException { + final DataSourceDeclarationHandler handler = this.dataSources.get(declaration.getPredicate().getName()); + + if (handler == null) { + throw new ParsingException("Data source \"" + declaration.getPredicate().getName() + "\" is not known."); + } + + return handler.handleDataSourceDeclaration(declaration.getArguments(), this.importBasePath); + } + + /** + * Parse a constant with optional data type. + * + * @param lexicalForm the (unescaped) lexical form of the constant. + * @param datatype the datatype, or null if not present. + * @param termFactory the {@link TermFactory} to use for creating the result + * + * @throws ParsingException when the lexical form is invalid for the given data + * type. + * @return the {@link Constant} corresponding to the given arguments. + */ + public Constant parseDatatypeConstant(final String lexicalForm, final String datatype, + final TermFactory termFactory) throws ParsingException { + final String type = ((datatype != null) ? datatype : PrefixDeclarationRegistry.XSD_STRING); + final DatatypeConstantHandler handler = this.datatypes.get(type); + + if (handler != null) { + return handler.createConstant(lexicalForm); + } + + return termFactory.makeDatatypeConstant(lexicalForm, type); + } + + /** + * Check if a handler for this + * {@link org.semanticweb.rulewerk.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter} + * is registered + * + * @param delimiter delimiter to check. + * @return true if a handler for the given delimiter is registered. + */ + public boolean isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter delimiter) { + return literals.containsKey(delimiter); + } + + /** + * Parse a configurable literal. + * + * @param delimiter delimiter given for the syntactic form. + * @param syntacticForm syntantic form of the literal to parse. + * @param subParserFactory a {@link SubParserFactory} instance that creates + * parser with the same context as the current parser. + * + * @throws ParsingException when no handler for the literal is registered, or + * the given syntactic form is invalid. + * @return an appropriate {@link Constant} instance. + */ + public Term parseConfigurableLiteral(ConfigurableLiteralDelimiter delimiter, String syntacticForm, + final SubParserFactory subParserFactory) throws ParsingException { + if (!isConfigurableLiteralRegistered(delimiter)) { + throw new ParsingException( + "No handler for configurable literal delimiter \"" + delimiter + "\" registered."); + } + + ConfigurableLiteralHandler handler = literals.get(delimiter); + return handler.parseLiteral(syntacticForm, subParserFactory); + } + + /** + * Register a new data type. + * + * @param name the IRI representing the data type. + * @param handler a {@link DatatypeConstantHandler} that parses a syntactic form + * into a {@link Constant}. + * + * @throws IllegalArgumentException when the data type name has already been + * registered. + * + * @return this + */ + public ParserConfiguration registerDatatype(final String name, final DatatypeConstantHandler handler) + throws IllegalArgumentException { + Validate.isTrue(!this.datatypes.containsKey(name), "The Data type \"%s\" is already registered.", name); + + this.datatypes.put(name, handler); + return this; + } + + /** + * Register a custom literal handler. + * + * @param delimiter the delimiter to handle. + * @param handler the handler for this literal type. + * + * @throws IllegalArgumentException when the literal delimiter has already been + * registered. + * + * @return this + */ + public ParserConfiguration registerLiteral(ConfigurableLiteralDelimiter delimiter, + ConfigurableLiteralHandler handler) throws IllegalArgumentException { + Validate.isTrue(!this.literals.containsKey(delimiter), "Literal delimiter \"%s\" is already registered.", + delimiter); + + this.literals.put(delimiter, handler); + return this; + } + + /** + * Register a directive. + * + * @param name the name of the directive. + * @param handler the handler for this directive. + * + * @throws IllegalArgumentException when the directive name has already been + * registered, or is a reserved name (i.e., one + * of {@code base}, {@code prefix}, and + * {@code source}). + * + * @return this + */ + public ParserConfiguration registerDirective(String name, DirectiveHandler handler) + throws IllegalArgumentException { + Validate.isTrue(!RESERVED_DIRECTIVE_NAMES.contains(name), "The name \"%s\" is a reserved directive name.", + name); + Validate.isTrue(!this.directives.containsKey(name), "The directive \"%s\" is already registered.", name); + + this.directives.put(name, handler); + return this; + } + + /** + * Parse a directive statement. + * + * @param name the name of the directive. + * @param arguments the arguments given in the statement. + * + * @throws ParsingException when the directive is not known, or the arguments + * are invalid for the directive. + * + * @return the (possibly updated) KnowledgeBase + */ + public KnowledgeBase parseDirectiveStatement(String name, List arguments, + SubParserFactory subParserFactory) throws ParsingException { + final DirectiveHandler handler = this.directives.get(name); + + if (handler == null) { + throw new ParsingException("Directive \"" + name + "\" is not known."); + } + + return handler.handleDirective(arguments, subParserFactory); + } + + /** + * Set whether to allow parsing of + * {@link org.semanticweb.rulewerk.core.model.api.NamedNull}. + * + * @param allow true allows parsing of named nulls. + * + * @return this + */ + public ParserConfiguration setNamedNulls(boolean allow) { + this.allowNamedNulls = allow; + return this; + } + + /** + * Allow parsing of {@link org.semanticweb.rulewerk.core.model.api.NamedNull}. + * + * @return this + */ + public ParserConfiguration allowNamedNulls() { + return this.setNamedNulls(true); + } + + /** + * Disallow parsing of + * {@link org.semanticweb.rulewerk.core.model.api.NamedNull}. + * + * @return this + */ + public ParserConfiguration disallowNamedNulls() { + return this.setNamedNulls(false); + } + + /** + * Whether parsing of {@link org.semanticweb.rulewerk.core.model.api.NamedNull} + * is allowed. + * + * @return true iff parsing of NamedNulls is allowed. + */ + public boolean isParsingOfNamedNullsAllowed() { + return this.allowNamedNulls; + } + + /** + * Get the base path for file imports. + * + * @return the path that relative imports will be resolved against. + */ + public String getImportBasePath() { + return this.importBasePath; + } + + /** + * Set a new base path for file imports. + * + * @param importBasePath path that relative imports will be + * resolved against. If null, default to current working + * directory. + */ + public ParserConfiguration setImportBasePath(String importBasePath) { + if (importBasePath != null) { + this.importBasePath = importBasePath; + } else { + this.importBasePath = System.getProperty("user.dir"); + } + + return this; + } + +} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java new file mode 100644 index 000000000..f5890b9e8 --- /dev/null +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/ParsingException.java @@ -0,0 +1,43 @@ +package org.semanticweb.rulewerk.parser; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.exceptions.RulewerkException; + +public class ParsingException extends RulewerkException { + private static final long serialVersionUID = 2849123381757026724L; + + public ParsingException(String message) { + super(message); + } + + public ParsingException(Throwable cause) { + super(cause); + } + + public ParsingException(String message, Throwable cause) { + super(message, cause); + } + + public ParsingException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } +} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java new file mode 100644 index 000000000..7043d92c4 --- /dev/null +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/RuleParser.java @@ -0,0 +1,285 @@ +package org.semanticweb.rulewerk.parser; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.ByteArrayInputStream; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.util.List; + +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Entity; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.parser.javacc.JavaCCParser; +import org.semanticweb.rulewerk.parser.javacc.JavaCCParserBase.FormulaContext; +import org.semanticweb.rulewerk.parser.javacc.ParseException; +import org.semanticweb.rulewerk.parser.javacc.TokenMgrError; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Class to statically access VLog parsing functionality. + * + * @author Markus Kroetzsch + * + */ +public class RuleParser { + + public static final String DEFAULT_STRING_ENCODING = "UTF-8"; + + private static Logger LOGGER = LoggerFactory.getLogger(RuleParser.class); + + private RuleParser() { + } + + public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final String encoding, + final ParserConfiguration parserConfiguration, final String baseIri) throws ParsingException { + final JavaCCParser parser = new JavaCCParser(stream, encoding); + + if (baseIri != null) { + PrefixDeclarationRegistry prefixDeclarationRegistry = new LocalPrefixDeclarationRegistry(baseIri); + parser.setPrefixDeclarationRegistry(prefixDeclarationRegistry); + } + + parser.setKnowledgeBase(knowledgeBase); + parser.setParserConfiguration(parserConfiguration); + doParse(parser); + } + + public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final String encoding, + final ParserConfiguration parserConfiguration) throws ParsingException { + parseInto(knowledgeBase, stream, encoding, parserConfiguration, null); + } + + public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, + final ParserConfiguration parserConfiguration, final String baseIri) throws ParsingException { + parseInto(knowledgeBase, stream, DEFAULT_STRING_ENCODING, parserConfiguration, baseIri); + } + + public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, + final ParserConfiguration parserConfiguration) throws ParsingException { + parseInto(knowledgeBase, stream, DEFAULT_STRING_ENCODING, parserConfiguration); + } + + public static void parseInto(final KnowledgeBase knowledgeBase, final String input, + final ParserConfiguration parserConfiguration, final String baseIri) throws ParsingException { + final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); + parseInto(knowledgeBase, inputStream, parserConfiguration, baseIri); + } + + public static void parseInto(final KnowledgeBase knowledgeBase, final String input, + final ParserConfiguration parserConfiguration) throws ParsingException { + final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); + parseInto(knowledgeBase, inputStream, parserConfiguration); + } + + public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream, final String encoding) + throws ParsingException { + final JavaCCParser javaCcParser = new JavaCCParser(stream, encoding); + javaCcParser.setKnowledgeBase(knowledgeBase); + doParse(javaCcParser); + } + + public static void parseInto(final KnowledgeBase knowledgeBase, final InputStream stream) throws ParsingException { + parseInto(knowledgeBase, stream, DEFAULT_STRING_ENCODING); + } + + public static void parseInto(final KnowledgeBase knowledgeBase, final String input) throws ParsingException { + final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); + parseInto(knowledgeBase, inputStream); + } + + public static KnowledgeBase parse(final InputStream stream, final String encoding, + final ParserConfiguration parserConfiguration) throws ParsingException { + JavaCCParser parser = new JavaCCParser(stream, encoding); + parser.setParserConfiguration(parserConfiguration); + return doParse(parser); + } + + public static KnowledgeBase parse(final InputStream stream, final ParserConfiguration parserConfiguration) + throws ParsingException { + return parse(stream, DEFAULT_STRING_ENCODING, parserConfiguration); + } + + public static KnowledgeBase parse(final String input, final ParserConfiguration parserConfiguration) + throws ParsingException { + final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); + return parse(inputStream, parserConfiguration); + } + + public static KnowledgeBase parse(final InputStream stream, final String encoding) throws ParsingException { + return doParse(new JavaCCParser(stream, encoding)); + } + + public static KnowledgeBase parse(final InputStream stream) throws ParsingException { + return parse(stream, DEFAULT_STRING_ENCODING); + } + + public static KnowledgeBase parse(final String input) throws ParsingException { + final InputStream inputStream = new ByteArrayInputStream(input.getBytes()); + return parse(inputStream); + } + + /** + * Interface for a method parsing a fragment of the supported syntax. + * + * This is needed to specify the exceptions thrown by the parse method. + */ + @FunctionalInterface + interface SyntaxFragmentParser { + T parse(final JavaCCParser parser) + throws ParsingException, ParseException, PrefixDeclarationException, TokenMgrError; + } + + /** + * Parse a syntax fragment. + * + * @param input Input string. + * @param parserAction Parsing method for the {@code T}. + * @param syntaxFragmentType Description of the type {@code T} being parsed. + * @param parserConfiguration {@link ParserConfiguration} instance, or null. + * + * @throws ParsingException when an error during parsing occurs. + * @return an appropriate instance of {@code T} + */ + static T parseSyntaxFragment(final String input, SyntaxFragmentParser parserAction, + final String syntaxFragmentType, final ParserConfiguration parserConfiguration) throws ParsingException { + final InputStream inputStream = new ByteArrayInputStream(input.getBytes(StandardCharsets.UTF_8)); + final JavaCCParser localParser = new JavaCCParser(inputStream, "UTF-8"); + + if (parserConfiguration != null) { + localParser.setParserConfiguration(parserConfiguration); + } + + T result; + try { + result = parserAction.parse(localParser); + localParser.ensureEndOfInput(); + } catch (ParseException | PrefixDeclarationException | TokenMgrError | RuntimeException e) { + LOGGER.error("Error parsing " + syntaxFragmentType + ": {}!", input); + throw new ParsingException("Error parsing " + syntaxFragmentType + ": " + e.getMessage(), e); + } + return result; + } + + public static Rule parseRule(final String input, final ParserConfiguration parserConfiguration) + throws ParsingException { + return parseSyntaxFragment(input, JavaCCParser::rule, "rule", parserConfiguration); + } + + public static Rule parseRule(final String input) throws ParsingException { + return parseRule(input, null); + } + + public static Literal parseLiteral(final String input, final ParserConfiguration parserConfiguration) + throws ParsingException { + return parseSyntaxFragment(input, parser -> parser.literal(FormulaContext.HEAD), "literal", + parserConfiguration); + } + + public static Literal parseLiteral(final String input) throws ParsingException { + return parseLiteral(input, null); + } + + public static PositiveLiteral parsePositiveLiteral(final String input, + final ParserConfiguration parserConfiguration) throws ParsingException { + return parseSyntaxFragment(input, parser -> parser.positiveLiteral(FormulaContext.HEAD), "positive literal", + parserConfiguration); + } + + public static PositiveLiteral parsePositiveLiteral(final String input) throws ParsingException { + return parsePositiveLiteral(input, null); + } + + public static Fact parseFact(final String input, final ParserConfiguration parserConfiguration) + throws ParsingException { + return parseSyntaxFragment(input, parser -> parser.fact(FormulaContext.HEAD), "fact", parserConfiguration); + } + + public static Fact parseFact(final String input) throws ParsingException { + return parseFact(input, null); + } + + public static Term parseTerm(final String input, final FormulaContext context, + final ParserConfiguration parserConfiguration) throws ParsingException { + return parseSyntaxFragment(input, parser -> parser.term(context), "term", parserConfiguration); + } + + public static Term parseTerm(final String input, final ParserConfiguration parserConfiguration) + throws ParsingException { + return parseTerm(input, FormulaContext.HEAD, parserConfiguration); + } + + public static Term parseTerm(final String input, final FormulaContext context) throws ParsingException { + return parseTerm(input, context, null); + } + + public static Term parseTerm(final String input) throws ParsingException { + return parseTerm(input, (ParserConfiguration) null); + } + + public static DataSourceDeclaration parseDataSourceDeclaration(final String input, + ParserConfiguration parserConfiguration) throws ParsingException { + return parseSyntaxFragment(input, RuleParser::parseAndExtractDatasourceDeclaration, "data source declaration", + parserConfiguration); + } + + public static DataSourceDeclaration parseDataSourceDeclaration(final String input) throws ParsingException { + return parseDataSourceDeclaration(input, null); + } + + static KnowledgeBase doParse(final JavaCCParser parser) throws ParsingException { + try { + parser.parse(); + } catch (ParseException | PrefixDeclarationException | TokenMgrError e) { + LOGGER.error("Error parsing Knowledge Base: " + e.getMessage(), e); + throw new ParsingException(e.getMessage(), e); + } + + KnowledgeBase knowledgeBase = parser.getKnowledgeBase(); + knowledgeBase.mergePrefixDeclarations(parser.getPrefixDeclarationRegistry()); + + return knowledgeBase; + } + + protected static DataSourceDeclaration parseAndExtractDatasourceDeclaration(final JavaCCParser parser) + throws ParsingException, ParseException, PrefixDeclarationException { + parser.source(); + + final List dataSourceDeclarations = parser.getKnowledgeBase() + .getDataSourceDeclarations(); + + if (dataSourceDeclarations.size() != 1) { + throw new ParsingException( + "Unexpected number of data source declarations: " + dataSourceDeclarations.size()); + } + + return dataSourceDeclarations.get(0); + } + +} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java new file mode 100644 index 000000000..ceac5b156 --- /dev/null +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/CsvFileDataSourceDeclarationHandler.java @@ -0,0 +1,49 @@ +package org.semanticweb.rulewerk.parser.datasources; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.util.List; + +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.parser.ParsingException; + +/** + * Handler for parsing {@link CsvFileDataSource} declarations + * + * @author Maximilian Marx + */ +public class CsvFileDataSourceDeclarationHandler implements DataSourceDeclarationHandler { + @Override + public DataSource handleDataSourceDeclaration(List terms, String importBasePath) throws ParsingException { + DataSourceDeclarationHandler.validateNumberOfArguments(terms, 1); + String fileName = DataSourceDeclarationHandler.validateFileNameArgument(terms.get(0), "CSV file name", + importBasePath); + + try { + return new CsvFileDataSource(fileName); + } catch (IOException e) { + throw new ParsingException("Could not use source file \"" + fileName + "\": " + e.getMessage(), e); + } + } +} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/DataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/DataSourceDeclarationHandler.java new file mode 100644 index 000000000..1aa6a824c --- /dev/null +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/DataSourceDeclarationHandler.java @@ -0,0 +1,139 @@ +package org.semanticweb.rulewerk.parser.datasources; + +import java.io.File; +import java.net.URL; +import java.util.List; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Terms; +import org.semanticweb.rulewerk.parser.ParsingException; + +/** + * Handler for interpreting the arguments of a custom Data Source declaration. + * + * @author Markus Kroetzsch + */ +@FunctionalInterface +public interface DataSourceDeclarationHandler { + + /** + * Handle a data source declaration. + * + * @param terms the list of arguments given in the declaration + * @param importBasePath the base path that relative imports will be resolved + * against + * + * @throws ParsingException when the arguments are unsuitable for the data + * source. + * + * @return a DataSource instance. + */ + DataSource handleDataSourceDeclaration(List terms, String importBasePath) throws ParsingException; + + /** + * Validate the provided number of arguments to the source declaration. + * + * @param terms arguments given to the source declaration. + * @param number expected number of arguments + * + * @throws ParsingException when the number of terms does not match expectations + */ + public static void validateNumberOfArguments(final List terms, final int number) throws ParsingException { + if (terms.size() != number) { + throw new ParsingException( + "Invalid number of arguments " + terms.size() + " for @source declaration, expected " + number); + } + } + + /** + * Returns the string content of the given term, or reports an error if the term + * is not an xsd:string. + * + * @param term the term to be processed + * @param parameterName the string name of the parameter to be used in error + * messages + * @return the extracted string + * @throws ParsingException thrown if the term was not a String + */ + public static String validateStringArgument(Term term, String parameterName) throws ParsingException { + try { + return Terms.extractString(term); + } catch (IllegalArgumentException e) { + throw makeParameterParsingException(term, parameterName, "String", e); + } + } + + /** + * Returns the URL represented by the given term, or reports an error if no + * valid URL could be extracted from the term. + * + * @param term the term to be processed + * @param parameterName the string name of the parameter to be used in error + * messages + * @return the extracted URL + * @throws ParsingException thrown if the term was not a URL + */ + public static URL validateUrlArgument(Term term, String parameterName) throws ParsingException { + try { + return Terms.extractUrl(term); + } catch (IllegalArgumentException e) { + throw makeParameterParsingException(term, parameterName, "URL", e); + } + } + + /** + * Returns the File name represented by the given term, or reports an error if + * no valid File name could be extracted from the term. + * + * @param term the term to be processed + * @param parameterName the string name of the parameter to be used in error + * messages + * @param importBasePath the base path that relative paths will be resolved + * against + * + * @throws ParsingException when the term was not a valid file path + * @return the extracted file path + */ + public static String validateFileNameArgument(Term term, String parameterName, String importBasePath) + throws ParsingException { + File file; + + try { + file = new File(Terms.extractString(term)); + } catch (IllegalArgumentException e) { + throw makeParameterParsingException(term, parameterName, "File name", e); + } + + if (file.isAbsolute() || importBasePath.isEmpty()) { + return file.getPath(); + } + return importBasePath + File.separator + file.getPath(); + } + + static ParsingException makeParameterParsingException(Term term, String parameterName, String type, + Throwable cause) { + return new ParsingException( + "Expected " + parameterName + " to be a " + type + ". Found " + term.toString() + ".", cause); + } +} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java new file mode 100644 index 000000000..99b85132c --- /dev/null +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/RdfFileDataSourceDeclarationHandler.java @@ -0,0 +1,49 @@ +package org.semanticweb.rulewerk.parser.datasources; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.util.List; + +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.parser.ParsingException; + +/** + * Handler for parsing {@link RdfFileDataSource} declarations + * + * @author Maximilian Marx + */ +public class RdfFileDataSourceDeclarationHandler implements DataSourceDeclarationHandler { + @Override + public DataSource handleDataSourceDeclaration(List terms, String importBasePath) throws ParsingException { + DataSourceDeclarationHandler.validateNumberOfArguments(terms, 1); + String fileName = DataSourceDeclarationHandler.validateFileNameArgument(terms.get(0), "RDF file name", + importBasePath); + + try { + return new RdfFileDataSource(fileName); + } catch (IOException e) { + throw new ParsingException("Could not use source file \"" + fileName + "\": " + e.getMessage(), e); + } + } +} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java new file mode 100644 index 000000000..5faca3ad9 --- /dev/null +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/SparqlQueryResultDataSourceDeclarationHandler.java @@ -0,0 +1,47 @@ +package org.semanticweb.rulewerk.parser.datasources; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.net.URL; +import java.util.List; + +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.parser.ParsingException; + +/** + * Handler for parsing {@link SparqlQueryResultDataSource} declarations + * + * @author Maximilian Marx + */ +public class SparqlQueryResultDataSourceDeclarationHandler implements DataSourceDeclarationHandler { + + public DataSource handleDataSourceDeclaration(List terms, String importBasePath) throws ParsingException { + DataSourceDeclarationHandler.validateNumberOfArguments(terms, 3); + URL endpoint = DataSourceDeclarationHandler.validateUrlArgument(terms.get(0), "SPARQL endpoint URL"); + String variables = DataSourceDeclarationHandler.validateStringArgument(terms.get(1), + "SPARQL query variable list"); + String query = DataSourceDeclarationHandler.validateStringArgument(terms.get(2), "SPARQL query pattern"); + + return new SparqlQueryResultDataSource(endpoint, variables, query); + } +} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java new file mode 100644 index 000000000..7ec1627d4 --- /dev/null +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/datasources/TridentDataSourceDeclarationHandler.java @@ -0,0 +1,49 @@ +package org.semanticweb.rulewerk.parser.datasources; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.util.List; + +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.reasoner.implementation.TridentDataSource; +import org.semanticweb.rulewerk.parser.ParsingException; + +/** + * Handler for parsing {@link TridentDataSource} declarations + * + * @author Markus Kroetzsch + */ +public class TridentDataSourceDeclarationHandler implements DataSourceDeclarationHandler { + @Override + public DataSource handleDataSourceDeclaration(List terms, String importBasePath) throws ParsingException { + DataSourceDeclarationHandler.validateNumberOfArguments(terms, 1); + String fileName = DataSourceDeclarationHandler.validateFileNameArgument(terms.get(0), + "path to Trident database", importBasePath); + + try { + return new TridentDataSource(fileName); + } catch (IOException e) { + throw new ParsingException("Could not use trident database \"" + fileName + "\": " + e.getMessage(), e); + } + } +} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java new file mode 100644 index 000000000..262815779 --- /dev/null +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileDirectiveHandler.java @@ -0,0 +1,65 @@ +package org.semanticweb.rulewerk.parser.directives; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.util.List; + +import org.semanticweb.rulewerk.core.exceptions.RulewerkException; +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.parser.DirectiveHandler; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; + +/** + * Handler for parsing {@code @import} statements. + * + * @author Maximilian Marx + */ +public class ImportFileDirectiveHandler implements DirectiveHandler { + + @Override + public KnowledgeBase handleDirective(final List arguments, final SubParserFactory subParserFactory) + throws ParsingException { + final ParserConfiguration parserConfiguration = new ParserConfiguration( + getParserConfiguration(subParserFactory)); + DirectiveHandler.validateNumberOfArguments(arguments, 1); + final File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "rules file", + parserConfiguration.getImportBasePath()); + final KnowledgeBase knowledgeBase = getKnowledgeBase(subParserFactory); + parserConfiguration.setImportBasePath(file.getParent()); + + try { + knowledgeBase.importRulesFile(file, (final InputStream stream, final KnowledgeBase kb) -> { + RuleParser.parseInto(kb, stream, parserConfiguration); + }); + } catch (RulewerkException | IOException | IllegalArgumentException e) { + throw new ParsingException("Could not import rules file \"" + file.getName() + "\": " + e.getMessage(), e); + } + + return knowledgeBase; + } +} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java new file mode 100644 index 000000000..115f4f4e3 --- /dev/null +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/directives/ImportFileRelativeDirectiveHandler.java @@ -0,0 +1,66 @@ +package org.semanticweb.rulewerk.parser.directives; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.util.List; + +import org.semanticweb.rulewerk.core.exceptions.RulewerkException; +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.parser.DirectiveHandler; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; + +/** + * Handler for parsing {@code @import-relative} statements. + * + * @author Maximilian Marx + */ +public class ImportFileRelativeDirectiveHandler implements DirectiveHandler { + @Override + public KnowledgeBase handleDirective(List arguments, SubParserFactory subParserFactory) + throws ParsingException { + final ParserConfiguration parserConfiguration = new ParserConfiguration( + getParserConfiguration(subParserFactory)); + DirectiveHandler.validateNumberOfArguments(arguments, 1); + PrefixDeclarationRegistry prefixDeclarationRegistry = getPrefixDeclarationRegistry(subParserFactory); + File file = DirectiveHandler.validateFilenameArgument(arguments.get(0), "rules file", + parserConfiguration.getImportBasePath()); + KnowledgeBase knowledgeBase = getKnowledgeBase(subParserFactory); + parserConfiguration.setImportBasePath(file.getParent()); + + try { + knowledgeBase.importRulesFile(file, (InputStream stream, KnowledgeBase kb) -> { + RuleParser.parseInto(kb, stream, parserConfiguration, prefixDeclarationRegistry.getBaseIri()); + }); + } catch (RulewerkException | IOException | IllegalArgumentException e) { + throw new ParsingException("Failed while trying to import rules file \"" + file.getName() + "\"", e); + } + + return knowledgeBase; + } +} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/.gitignore b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/.gitignore new file mode 100644 index 000000000..887cfb74c --- /dev/null +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/.gitignore @@ -0,0 +1,8 @@ +/ParseException.java +/SimpleCharStream.java +/Token.java +/TokenMgrError.java +/JavaCharStream.java +/JavaCCParser.java +/JavaCCParserConstants.java +/JavaCCParserTokenManager.java diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj new file mode 100644 index 000000000..6c4709f09 --- /dev/null +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParser.jj @@ -0,0 +1,691 @@ +/*- + * #%L + * rulewerk-parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +options +{ + // Use \ u escapes in streams AND use a reader for the query + // => get both raw and escaped unicode + JAVA_UNICODE_ESCAPE = true; + UNICODE_INPUT = false; + + STATIC = false; + // DEBUG_PARSER = true; + // DEBUG_LOOKAHEAD = true; + // DEBUG_TOKEN_MANAGER = true; +} + +PARSER_BEGIN(JavaCCParser) +package org.semanticweb.rulewerk.parser.javacc; + +import java.net.URI; +import java.net.URISyntaxException; + +import java.util.List; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.LinkedList; +import java.util.ArrayDeque; +import java.util.Deque; + +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.javacc.JavaCCParserBase; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; + +import org.semanticweb.rulewerk.core.model.api.Statement; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.NegativeLiteral; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; + +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +public class JavaCCParser extends JavaCCParserBase +{ + private SubParserFactory getSubParserFactory() { + return new SubParserFactory(this); + } + + public void ensureEndOfInput() throws ParseException { + jj_consume_token(EOF); + } +} + +PARSER_END(JavaCCParser) + +TOKEN_MGR_DECLS : { + // use initializer block to work around auto-generated constructors. + { + states = new ArrayDeque(); + } + + Deque states; + + void pushState() { + states.push(curLexState); + } + + void popState() { + SwitchTo(states.pop()); + } +} + +void parse() throws PrefixDeclarationException : { +} { + ( base() )? + ( prefix() )* + ( source() )* + ( statement() )* + < EOF > +} + +void base() throws PrefixDeclarationException : { + Token iri; +} { + < BASE > iri = < IRI_ABSOLUTE > < DOT > { + setBase(iri.image); + } +} + +String prefixName() : { + Token pn; +} { + pn = < COLON > { return pn.image; } + | pn = < PNAME_NS > { return pn.image; } +} + +void prefix() throws PrefixDeclarationException : { + String pn; + String iri; +} { + < PREFIX > pn = prefixName() iri = absoluteIri() < DOT > { + setPrefix(pn, iri); + } +} + +String absoluteIri() throws PrefixDeclarationException : { + Token iri; +} { + iri = < IRI_ABSOLUTE > { return absolutizeIri(iri.image); } + | iri = < PNAME_LN > { return resolvePrefixedName(iri.image); } +} + +void source() throws PrefixDeclarationException : { + String predicateName; + DataSource dataSource; + Token arity; +} { + < SOURCE > predicateName = predicateName() arity = < ARITY > < COLON > dataSource = dataSource() < DOT > + { + int nArity; + // Do not catch NumberFormatException: < INTEGER > matches must parse as int in Java! + nArity = Integer.parseInt(arity.image); + + addDataSource(predicateName, nArity, dataSource); + } +} + +DataSource dataSource() throws PrefixDeclarationException : { + PositiveLiteral positiveLiteral; + List< Argument > arguments; +} { + positiveLiteral = positiveLiteral(FormulaContext.HEAD) { + return parseDataSourceSpecificPartOfDataSourceDeclaration(positiveLiteral); + } +} + +KnowledgeBase directive() throws PrefixDeclarationException : { + Token name; + List< Argument > arguments; +} { + name = < CUSTOM_DIRECTIVE > arguments = Arguments() < DOT > { + return parseDirectiveStatement(name.image, arguments, getSubParserFactory()); + } +} + +Command command() throws PrefixDeclarationException : { + Token name, pn, arity; + LinkedList< Argument > arguments; + String predicateName; + +} { + name = < CUSTOM_DIRECTIVE > + ( LOOKAHEAD(predicateName() < ARITY > ) predicateName = predicateName() arity = < ARITY > < DOT > { + Argument argument = Argument.term(Expressions.makeDatatypeConstant(predicateName + "[" + arity.image + "]:", PrefixDeclarationRegistry.XSD_STRING)); + return new Command(name.image, Arrays.asList(argument)); + } + | LOOKAHEAD(predicateName() < ARITY >) predicateName = predicateName() arity = < ARITY > < COLON > arguments = Arguments() < DOT > { + arguments.addFirst(Argument.term(Expressions.makeDatatypeConstant(predicateName + "[" + arity.image + "]:", PrefixDeclarationRegistry.XSD_STRING))); + return new Command(name.image,arguments); + } + | arguments = Arguments() < DOT > { return new Command(name.image,arguments); } + | pn = < PNAME_NS > arguments = Arguments() < DOT > { + arguments.addFirst(Argument.term(Expressions.makeDatatypeConstant(pn.image, PrefixDeclarationRegistry.XSD_STRING))); + return new Command(name.image,arguments); + } + | < DOT > { return new Command(name.image, new LinkedList< Argument >()); } + ) +} + +void statement() throws PrefixDeclarationException : { + Statement statement; + KnowledgeBase knowledgeBase; + resetVariableSets(); +} { + ( LOOKAHEAD(rule()) statement = rule() { addStatement(statement); } + | statement = fact(FormulaContext.HEAD) { addStatement(statement); } + | knowledgeBase = directive() { setKnowledgeBase(knowledgeBase); } + ) +} + +Rule rule() throws PrefixDeclarationException : { + Rule rule; +} { + rule = ruleNoDot() < DOT > { + return rule; + } +} + +Rule ruleNoDot() throws PrefixDeclarationException : { + List < PositiveLiteral > head; + List < Literal > body; +} { + head = listOfPositiveLiterals(FormulaContext.HEAD) < ARROW > body = listOfLiterals(FormulaContext.BODY) { + // check that the intersection between headExiVars and BodyVars is empty + for (String variable : headExiVars) { + if (bodyVars.contains(variable)) + throw new ParseException("Malformed rule " + head + " :- " + body + "\nExistential variable " + variable + " also used in rule body."); + } + + // check that bodyVars contains headUniVars + for (String variable : headUniVars) { + if (!bodyVars.contains(variable)) + throw new ParseException("Unsafe rule " + head + " :- " + body + "\nUniversal variable " + variable + " occurs in head but not in body."); + } + + return Expressions.makeRule(Expressions.makePositiveConjunction(head), Expressions.makeConjunction(body)); + } +} + +List < PositiveLiteral > listOfPositiveLiterals(FormulaContext context) throws PrefixDeclarationException : { + PositiveLiteral l; + List < PositiveLiteral > list = new ArrayList < PositiveLiteral > (); +} { + l = positiveLiteral(context) { list.add(l); } ( < COMMA > l = positiveLiteral(context) { list.add(l); } )* { + return list; + } +} + +List < Literal > listOfLiterals(FormulaContext context) throws PrefixDeclarationException : { + Literal l; + List < Literal > list = new ArrayList < Literal > (); +} { + l = literal(context) { list.add(l); } ( < COMMA > l = literal(context) { list.add(l); } )* { + return list; + } +} + +Literal literal(FormulaContext context) throws PrefixDeclarationException : { + Literal l; +} { + ( l = positiveLiteral(context) + | l = negativeLiteral(context) ) { + return l; + } +} + +PositiveLiteral positiveLiteral(FormulaContext context) throws PrefixDeclarationException : { + Token t; + List < Term > terms; + String predicateName; +} { + predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > { + return Expressions.makePositiveLiteral(createPredicate(predicateName,terms.size()), terms); + } +} + +Fact fact(FormulaContext context) throws PrefixDeclarationException : { + Token t; + List < Term > terms; + String predicateName; +} { + predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > < DOT > { + try { + return Expressions.makeFact(createPredicate(predicateName,terms.size()), terms); + } catch (IllegalArgumentException e) { + throw makeParseExceptionWithCause("Error parsing fact: " + e.getMessage(), e); + } + } +} + +NegativeLiteral negativeLiteral(FormulaContext context) throws PrefixDeclarationException : { + List < Term > terms; + String predicateName; +} { + < TILDE > predicateName = predicateName() < LPAREN > terms = listOfTerms(context) < RPAREN > { + return Expressions.makeNegativeLiteral(createPredicate(predicateName,terms.size()), terms); + } +} + +List < Term > listOfTerms(FormulaContext context) throws PrefixDeclarationException : { + Term t; + List < Term > list = new ArrayList < Term > (); +} { + t = term(context) { list.add(t); } ( < COMMA > t = term(context) { list.add(t); } )* { + return list; + } +} + +String predicateName() throws PrefixDeclarationException : { + Token t; + String s; +} { + s = absoluteIri() { return s; } + | t = < VARORPREDNAME > { return absolutizeIri(t.image); } +} + +Term term(FormulaContext context) throws PrefixDeclarationException : { + Token t; + String s; + Constant c; + Term tt; +} { + s = absoluteIri() { return createConstant(s); } + | t = < VARORPREDNAME > { return createConstant(t.image); } + | LOOKAHEAD( < NAMED_NULL >, { isParsingOfNamedNullsAllowed() }) + t = < NAMED_NULL > { + if (context == FormulaContext.BODY) { + throw new ParseException("Named nulls may not appear in the body of a rule."); + } + + return createNamedNull(t.image); + } + | c = NumericLiteral() { return c; } + | c = RDFLiteral() { return c; } + | t = < UNIVAR > { + s = t.image.substring(1); + if (context == FormulaContext.HEAD) + headUniVars.add(s); + else if (context == FormulaContext.BODY) + bodyVars.add(s); + return createUniversalVariable(s); + } + | t = < EXIVAR > { + s = t.image.substring(1); + if (context == FormulaContext.HEAD) + headExiVars.add(s); + if (context == FormulaContext.BODY) + throw new ParseException("Existentialy quantified variables can not appear in the body. Line: " + t.beginLine + ", Column: "+ t.beginColumn); + return createExistentialVariable(s); + } + | try { + tt = ConfigurableLiteral () { return tt; } + } catch (ParsingException e) { + throw makeParseExceptionWithCause("Invalid configurable literal expression", e); + } +} + +Constant NumericLiteral() : { + Token t; +} { + t = < INTEGER > { return createConstant(t.image, PrefixDeclarationRegistry.XSD_INTEGER); } + | t = < DECIMAL > { return createConstant(t.image, PrefixDeclarationRegistry.XSD_DECIMAL); } + | t = < DOUBLE > { return createConstant(t.image, PrefixDeclarationRegistry.XSD_DOUBLE); } +} + +Constant RDFLiteral() throws PrefixDeclarationException : { + String lex; + Token lang = null; // Optional lang tag and datatype. + String dt = null; +} { + lex = String() ( lang = < LANGTAG > | < DATATYPE > dt = absoluteIri() )? { + if (lang != null) { + return createLanguageStringConstant(lex, lang.image); + } + return createConstant(lex, dt); + } +} + +Term ConfigurableLiteral() throws ParsingException : { + String s; + Token t; +} { + ( LOOKAHEAD( < PIPE_DELIMITED_LITERAL >, + { isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PIPE) } ) + t = < PIPE_DELIMITED_LITERAL > { + return parseConfigurableLiteral(ConfigurableLiteralDelimiter.PIPE, t.image, getSubParserFactory()); + } + | LOOKAHEAD( < HASH_DELIMITED_LITERAL >, + { isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.HASH) } ) + t = < HASH_DELIMITED_LITERAL > { + return parseConfigurableLiteral(ConfigurableLiteralDelimiter.HASH, t.image, getSubParserFactory()); + } + | LOOKAHEAD( < LPAREN >, + { isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PAREN) } ) + s = parenDelimitedLiteral() { + return parseConfigurableLiteral(ConfigurableLiteralDelimiter.PAREN, s, getSubParserFactory()); + } + | LOOKAHEAD( < LBRACE >, + { isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACE) } ) + s = braceDelimitedLiteral() { + return parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACE, s, getSubParserFactory()); + } + | LOOKAHEAD( < LBRACKET >, + { isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.BRACKET) } ) + s = bracketDelimitedLiteral() { + return parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACKET, s, getSubParserFactory()); + } + ) +} + +String parenDelimitedLiteral() : { + String s; + StringBuilder sb = new StringBuilder(); +} { + < LPAREN > ( s = parenDelimitedLiteralBody() { sb.append(s); } )* < RPAREN > { + return sb.toString(); + } +} + +String parenDelimitedLiteralBody() : { + Token t; + String s; +} { + t = < UNPAREN > { return t.image; } + | ( < LPAREN > s = parenDelimitedLiteralBody() < RPAREN > ) { return "(" + s + ")"; } +} + +String braceDelimitedLiteral() : { + String s; + StringBuilder sb = new StringBuilder(); +} { + < LBRACE > ( s = braceDelimitedLiteralBody() { sb.append(s); } )* < RBRACE > { + return sb.toString(); + } +} + +String braceDelimitedLiteralBody() : { + Token t; + String s; +} { + t = < UNBRACE > { return t.image; } + | ( < LBRACE > s = braceDelimitedLiteralBody() < RBRACE > ) { return "{" + s + "}"; } +} + +String bracketDelimitedLiteral() : { + String s; + StringBuilder sb = new StringBuilder(); +} { + < LBRACKET > ( s = bracketDelimitedLiteralBody() { sb.append(s); } )* < RBRACKET > { + return sb.toString(); + } +} + +String bracketDelimitedLiteralBody() : { + Token t; + String s; +} { + t = < UNBRACKET > { return t.image; } + | ( < LBRACKET > s = bracketDelimitedLiteralBody() < RBRACKET > ) { return "[" + s + "]"; } +} + +String String() : { + Token t; +} { + ( t = < SINGLE_QUOTED_STRING > + | t = < DOUBLE_QUOTED_STRING > + | t = < TRIPLE_QUOTED_STRING > + | t = < SIXFOLD_QUOTED_STRING > + ) { return unescapeStr(t.image, t.beginLine, t.beginColumn); } +} + +LinkedList< Argument > Arguments() throws PrefixDeclarationException : { + Argument argument; + String str; + Rule rule; + PositiveLiteral positiveLiteral; + Term t; + LinkedList< Argument > rest = new LinkedList< Argument >(); +} { + ( LOOKAHEAD(ruleNoDot()) rule = ruleNoDot() { argument = Argument.rule(rule); } + | LOOKAHEAD(positiveLiteral(FormulaContext.HEAD)) positiveLiteral = positiveLiteral(FormulaContext.HEAD) { argument = Argument.positiveLiteral(positiveLiteral); } + | t = term(FormulaContext.HEAD) { argument = Argument.term(t); } + ) [rest = Arguments()] { + rest.addFirst(argument); + return rest; + } +} + +String PrefixedName() throws PrefixDeclarationException : { + Token t; +} { + t = < PNAME_LN > { return resolvePrefixedName(t.image); } +} + +// ------------------------------------------ + +// Whitespace +< * > SKIP : { + < WHITESPACE : [ " ", "\t", "\n", "\r", "\f" ] > +} + +// Comments +< DEFAULT, BODY, DIRECTIVE_ARGUMENTS > SKIP : { + < COMMENT : "%" ( ~[ "\n" ] )* "\n" > +} + +MORE : { + "@": DIRECTIVE +} + +< DEFAULT, TERM, DIRECTIVE_ARGUMENTS, BODY > MORE : { + "<" { pushState(); } : ABSOLUTE_IRI +} + +< DEFAULT, BODY, TERM, DIRECTIVE_ARGUMENTS > TOKEN : { + < VARORPREDNAME : < A2Z> (< A2ZND >)* > + | < #A2Z : [ "a"-"z", "A"-"Z" ] > + | < #A2ZN : [ "a"-"z", "A"-"Z", "0"-"9" ] > + | < #A2ZND : [ "a"-"z", "A"-"Z", "0"-"9", "-", "_" ] > + | < PNAME_LN : (< PN_PREFIX >)? ":" < PN_LOCAL > > + | < PNAME_NS : < PN_PREFIX > ":" > + | < #PN_CHARS_BASE : [ "a"-"z", "A"-"Z", "\u00c0"-"\u00d6", + "\u00d8"-"\u00f6", "\u00f8"-"\u02ff", + "\u0370"-"\u037d", "\u037f"-"\u1fff", + "\u200c"-"\u200d", "\u2070"-"\u218f", + "\u2c00"-"\u2fef", "\u3001"-"\ud7ff", + "\uf900"-"\ufffd" ] > + | < #PN_CHARS_U : < PN_CHARS_BASE > | "_" > + | < #PN_CHARS : ( < PN_CHARS_U > | [ "-", "0"-"9", "\u00b7", + "\u0300"-"\u036f", + "\u203f"-"\u2040" ] ) > + | < #PN_PREFIX : < PN_CHARS_BASE > + ( ( < PN_CHARS > | "." )* < PN_CHARS > )? > + | < #PN_LOCAL : ( < PN_CHARS_U > | [ ":", "0"-"9" ] ) + ( ( < PN_CHARS > | [ ".", ":" ] )* < PN_CHARS > )? > + | < COMMA : "," > + | < NAMED_NULL : "_:" ( < PN_CHARS_U > | [ "0"-"9" ] ) (( < PN_CHARS > | "." )* < PN_CHARS > )? > { + matchedToken.image = JavaCCParserBase.stripChars(matchedToken.image, 2); + } +} + +< DEFAULT, BODY, TERM, DIRECTIVE_ARGUMENTS, PAREN_DELIMITED > TOKEN : { + < LPAREN : "(" > { + pushState(); + + if (curLexState == TERM) { + SwitchTo(PAREN_DELIMITED); + } else if (curLexState == DEFAULT || curLexState == BODY) { + SwitchTo(TERM); + } + } + | < RPAREN : ")" > { popState(); } +} + +< TERM, DIRECTIVE_ARGUMENTS > TOKEN : { + < INTEGER : (< SIGN >)? < DIGITS > > + | < DECIMAL : (< SIGN >)? ( < DIGITS > "." (< DIGIT >)* + | "." < DIGITS > ) > + | < DOUBLE : (< SIGN >)? ( < DIGITS > "." (< DIGIT >)* < EXPONENT > + | "." (< DIGITS >) (< EXPONENT >) + | < DIGITS > < EXPONENT > ) > + | < #SIGN : [ "+", "-" ] > + | < #DIGIT : [ "0"-"9" ] > + | < #DIGITS : (< DIGIT >)+ > + | < #EXPONENT : [ "e", "E" ] (< SIGN >)? < DIGITS > > + | < COLON : ":" > +} + +< DEFAULT, BODY, DIRECTIVE_ARGUMENTS > TOKEN : { + < ARROW : ":-" > +} + +< DEFAULT, BODY, DIRECTIVE_ARGUMENTS > TOKEN : { + < TILDE : "~" > +} + +< ABSOLUTE_IRI > TOKEN : { + < IRI_ABSOLUTE : (~[ ">", "<", "\"", "{", "}", "^", "\\", "|", "`", "\u0000"-"\u0020" ])* ">" > { + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); + popState(); + } +} + +< DIRECTIVE > TOKEN : { + < BASE : "base" > : DIRECTIVE_ARGUMENTS + | < PREFIX : "prefix" > : DIRECTIVE_ARGUMENTS + | < SOURCE : "source" > : DIRECTIVE_ARGUMENTS + | < CUSTOM_DIRECTIVE : < DIRECTIVENAME > > { + matchedToken.image = JavaCCParserBase.stripChars(matchedToken.image, 1); + }: DIRECTIVE_ARGUMENTS + | < DIRECTIVENAME : [ "a"-"z", "A"-"Z" ] ([ "a"-"z", "A"-"Z", "0"-"9", "-", "_" ])* > +} + +< DEFAULT, BODY, DIRECTIVE_ARGUMENTS > TOKEN : { + < DOT : "." > : DEFAULT +} + +< DIRECTIVE_ARGUMENTS > TOKEN : { + < ARITY : "[" < INTEGER > "]" > { + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); + } + | < ARGUMENT_NAME : < DIRECTIVENAME > > +} + +< TERM, DIRECTIVE_ARGUMENTS > TOKEN : { + < UNIVAR : "?" < VARORPREDNAME > > + | < EXIVAR : "!" < VARORPREDNAME > > + | < LANGTAG : "@" ( < A2Z > )+ ( "-" ( < A2ZN > )+ )? > { + matchedToken.image = JavaCCParserBase.stripChars(matchedToken.image, 1); + } + | < DATATYPE : "^^" > +} + +< TERM, DIRECTIVE_ARGUMENTS > MORE : { + < "'" > { pushState(); } : SINGLE_QUOTED + | < "\"" > { pushState(); } : DOUBLE_QUOTED + | < "'''" > { pushState(); }: TRIPLE_QUOTED + | < "\"\"\"" > { pushState(); } : SIXFOLD_QUOTED + | < "|" > { pushState(); } : PIPE_DELIMITED + | < "#" > { pushState(); } : HASH_DELIMITED +} + +< TERM, BRACE_DELIMITED > TOKEN : { + < LBRACE : "{" > { pushState(); } : BRACE_DELIMITED +} + +< TERM, BRACKET_DELIMITED > TOKEN : { + < LBRACKET : "[" > { pushState(); } : BRACKET_DELIMITED +} + +< PIPE_DELIMITED > TOKEN : { + < PIPE_DELIMITED_LITERAL : ( ~ [ "|" ] )* "|" > { + popState(); + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); + } +} + +< HASH_DELIMITED > TOKEN : { + < HASH_DELIMITED_LITERAL : ( ~ [ "#" ] )* "#" > { + popState(); + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); + } +} + +< PAREN_DELIMITED > TOKEN : { + < UNPAREN : ( ~ [ "(", ")" ] )+ > +} + +< BRACE_DELIMITED > TOKEN : { + < RBRACE : "}" > { popState(); } + | < UNBRACE : (~ [ "{", "}" ] )+ > +} + +< BRACKET_DELIMITED > TOKEN : { + < RBRACKET : "]" > { popState(); } + | < UNBRACKET : ( ~ [ "[", "]" ] )+ > +} + +< SINGLE_QUOTED > TOKEN : { + < SINGLE_QUOTED_STRING : ( ~[ "'", "\\", "\n", "\r" ] + | < ESCAPE_SEQUENCE > )* "'" > { + popState(); + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); + } +} + +< DOUBLE_QUOTED > TOKEN : { + < DOUBLE_QUOTED_STRING : ( ~[ "\"", "\\", "\n", "\r" ] + | < ESCAPE_SEQUENCE > )* "\"" > { + popState(); + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 1); + } +} + +< TRIPLE_QUOTED > TOKEN : { + < TRIPLE_QUOTED_STRING : ( ~[ "'", "\\" ] + | < ESCAPE_SEQUENCE > + | ( "'" ~[ "'" ] ) + | ( "''" ~[ "'" ] ) )* "'''" > { + popState(); + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 3); + } +} + +< SIXFOLD_QUOTED > TOKEN : { + < SIXFOLD_QUOTED_STRING : ( ~[ "\"", "\\" ] + | < ESCAPE_SEQUENCE > + | ( "\"" ~[ "\"" ] ) + | ( "\"\"" ~[ "\"" ] ) )* "\"\"\"" > { + popState(); + matchedToken.image = JavaCCParserBase.stripDelimiters(matchedToken.image, 3); + } +} + +< SINGLE_QUOTED, DOUBLE_QUOTED, TRIPLE_QUOTED, SIXFOLD_QUOTED > MORE : { + < ESCAPE_SEQUENCE : "\\" [ "t", "b", "n", "r", "f", "\\", "\"", "'" ] > +} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java new file mode 100644 index 000000000..981632edf --- /dev/null +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBase.java @@ -0,0 +1,400 @@ +package org.semanticweb.rulewerk.parser.javacc; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.HashSet; +import java.util.List; + +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Statement; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.TermFactory; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.LocalPrefixDeclarationRegistry; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.parser.ParsingException; + +/** + * Basic methods used in the JavaCC-generated parser. + * + * Implementation of some string escaping methods adapted from Apache Jena, + * released under Apache 2.0 license terms. + * + * @see https://github.com/apache/jena/blob/master/jena-core/src/main/java/org/apache/jena/n3/turtle/ParserBase.java + * + * @author Markus Kroetzsch + * @author Larry Gonzalez + * @author Maximilian Marx + * @author Jena developers, Apache Software Foundation (ASF) + * + */ +public class JavaCCParserBase { + private PrefixDeclarationRegistry prefixDeclarationRegistry; + + private KnowledgeBase knowledgeBase; + private ParserConfiguration parserConfiguration; + private Skolemization skolemization = new Skolemization(); + private TermFactory termFactory = new TermFactory(); + + /** + * "Local" variable to remember (universal) body variables during parsing. + */ + protected final HashSet bodyVars = new HashSet(); + /** + * "Local" variable to remember existential head variables during parsing. + */ + protected final HashSet headExiVars = new HashSet(); + /** + * "Local" variable to remember universal head variables during parsing. + */ + protected final HashSet headUniVars = new HashSet(); + + /** + * Defines the context for parsing sub-formulas. + * + * @author Markus Kroetzsch + * + */ + public enum FormulaContext { + /** + * Formula is to be interpreted in the context of a rule head (positive + * occurrence). + */ + HEAD, + /** + * Formula is to be interpreted in the context of a rule body (negative + * occurrence). + */ + BODY + } + + /** + * Defines delimiters for configurable literals. + * + * Since the parser is generated from a fixed grammar, we need to provide + * productions for these literals, even if they are not part of the syntax. With + * the {@link DefaultParserConfiguration}, any occurence of these literals will + * result in a {@link ParseException}. + * + * @author Maximilian Marx + */ + public enum ConfigurableLiteralDelimiter { + /** + * Literals of the form {@code |…|} + */ + PIPE, + /** + * Literals of the form {@code #…#} + */ + HASH, + /** + * Literals of the form {@code (…)} + */ + PAREN, + /** + * Literals of the form {@code {…}} + */ + BRACE, + /** + * Literals of the form {@code […]} + */ + BRACKET, + } + + public JavaCCParserBase() { + this.knowledgeBase = new KnowledgeBase(); + this.prefixDeclarationRegistry = new LocalPrefixDeclarationRegistry(); + this.parserConfiguration = new DefaultParserConfiguration(); + } + + AbstractConstant createConstant(String lexicalForm) throws ParseException { + String absoluteIri; + try { + absoluteIri = absolutizeIri(lexicalForm); + } catch (PrefixDeclarationException e) { + throw makeParseExceptionWithCause("Failed to parse IRI", e); + } + return termFactory.makeAbstractConstant(absoluteIri); + } + + /** + * Creates a suitable {@link Constant} from the parsed data. + * + * @param lexicalForm the string data (unescaped) + * @param datatype the datatype, or null if not provided + * @return suitable constant + */ + Constant createConstant(String lexicalForm, String datatype) throws ParseException { + try { + return parserConfiguration.parseDatatypeConstant(lexicalForm, datatype, termFactory); + } catch (ParsingException e) { + throw makeParseExceptionWithCause("Failed to parse Constant", e); + } + } + + NamedNull createNamedNull(String lexicalForm) { + return this.skolemization.getRenamedNamedNull(lexicalForm); + } + + UniversalVariable createUniversalVariable(String name) { + return termFactory.makeUniversalVariable(name); + } + + ExistentialVariable createExistentialVariable(String name) { + return termFactory.makeExistentialVariable(name); + } + + LanguageStringConstant createLanguageStringConstant(String string, String languageTag) { + return termFactory.makeLanguageStringConstant(string, languageTag); + } + + Predicate createPredicate(String name, int arity) { + return termFactory.makePredicate(name, arity); + } + + void addStatement(Statement statement) { + knowledgeBase.addStatement(statement); + } + + void addDataSource(String predicateName, int arity, DataSource dataSource) throws ParseException { + if (dataSource.getRequiredArity().isPresent()) { + Integer requiredArity = dataSource.getRequiredArity().get(); + if (arity != requiredArity) { + throw new ParseException( + "Invalid arity " + arity + " for data source, " + "expected " + requiredArity + "."); + } + } + + Predicate predicate = termFactory.makePredicate(predicateName, arity); + addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); + } + + static String unescapeStr(String s, int line, int column) throws ParseException { + return unescape(s, '\\', line, column); + } + + static String unescape(String s, char escape, int line, int column) throws ParseException { + int i = s.indexOf(escape); + + if (i == -1) { + return s; + } + + // Dump the initial part straight into the string buffer + StringBuilder sb = new StringBuilder(s.substring(0, i)); + + for (; i < s.length(); i++) { + char ch = s.charAt(i); + // Keep line and column numbers. + switch (ch) { + case '\n': + case '\r': + line++; + column = 1; + break; + + default: + column++; + break; + } + + if (ch != escape) { + sb.append(ch); + continue; + } + + // Escape + if (i >= s.length() - 1) { + throw new ParseException("Illegal escape at end of string, line: " + line + ", column: " + column); + } + char ch2 = s.charAt(i + 1); + column = column + 1; + i = i + 1; + + // Not just codepoints. Must be a legal escape. + char ch3 = 0; + switch (ch2) { + case 'n': + ch3 = '\n'; + break; + case 't': + ch3 = '\t'; + break; + case 'r': + ch3 = '\r'; + break; + case 'b': + ch3 = '\b'; + break; + case 'f': + ch3 = '\f'; + break; + case '\'': + ch3 = '\''; + break; + case '\"': + ch3 = '\"'; + break; + case '\\': + ch3 = '\\'; + break; + default: + throw new ParseException("Unknown escape: \\" + ch2 + ", line: " + line + ", column: " + column); + } + sb.append(ch3); + } + return sb.toString(); + } + + /** + * Remove the first and last {@code n} characters from string {@code s} + * + * @param s string to strip delimiters from + * @param n number of characters to strip from both ends + * + * @return the stripped string. + */ + static String stripDelimiters(String s, int n) { + return s.substring(n, s.length() - n); + } + + /** remove the first n charcacters from the string */ + static String stripChars(String s, int n) { + return s.substring(n, s.length()); + } + + /** + * Reset the local set variables used when parsing a rule. + */ + void resetVariableSets() { + this.bodyVars.clear(); + this.headExiVars.clear(); + this.headUniVars.clear(); + } + + /** + * Convert a throwable into a ParseException. + * + * @param message The error message. + * @param cause The {@link Throwable} that caused this exception. + * + * @return A {@link ParseException} with appropriate cause and message. + */ + protected ParseException makeParseExceptionWithCause(String message, Throwable cause) { + ParseException parseException = new ParseException(message); + parseException.initCause(cause); + return parseException; + } + + public void setKnowledgeBase(KnowledgeBase knowledgeBase) { + this.knowledgeBase = knowledgeBase; + } + + public KnowledgeBase getKnowledgeBase() { + return knowledgeBase; + } + + public void setParserConfiguration(ParserConfiguration parserConfiguration) { + this.parserConfiguration = parserConfiguration; + } + + public ParserConfiguration getParserConfiguration() { + return parserConfiguration; + } + + Skolemization getSkolemization() { + return skolemization; + } + + void setSkolemization(Skolemization skolemization) { + this.skolemization = skolemization; + } + + public void setPrefixDeclarationRegistry(PrefixDeclarationRegistry prefixDeclarationRegistry) { + this.prefixDeclarationRegistry = prefixDeclarationRegistry; + } + + public PrefixDeclarationRegistry getPrefixDeclarationRegistry() { + return this.prefixDeclarationRegistry; + } + + DataSource parseDataSourceSpecificPartOfDataSourceDeclaration(PositiveLiteral declaration) throws ParseException { + try { + return parserConfiguration.parseDataSourceSpecificPartOfDataSourceDeclaration(declaration); + } catch (ParsingException e) { + throw makeParseExceptionWithCause( + "Failed while trying to parse the source-specific part of a data source declaration", e); + } + } + + Term parseConfigurableLiteral(ConfigurableLiteralDelimiter delimiter, String syntacticForm, + SubParserFactory subParserFactory) throws ParsingException { + return parserConfiguration.parseConfigurableLiteral(delimiter, syntacticForm, subParserFactory); + } + + KnowledgeBase parseDirectiveStatement(String name, List arguments, SubParserFactory subParserFactory) + throws ParseException { + try { + return parserConfiguration.parseDirectiveStatement(name, arguments, subParserFactory); + } catch (ParsingException e) { + throw makeParseExceptionWithCause(e.getMessage(), e); + } + } + + boolean isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter delimiter) { + return parserConfiguration.isConfigurableLiteralRegistered(delimiter); + } + + boolean isParsingOfNamedNullsAllowed() { + return parserConfiguration.isParsingOfNamedNullsAllowed(); + } + + void setBase(String baseIri) throws PrefixDeclarationException { + prefixDeclarationRegistry.setBaseIri(baseIri); + } + + void setPrefix(String prefixName, String baseIri) throws PrefixDeclarationException { + prefixDeclarationRegistry.setPrefixIri(prefixName, baseIri); + } + + String absolutizeIri(String iri) throws PrefixDeclarationException { + return prefixDeclarationRegistry.absolutizeIri(iri); + } + + String resolvePrefixedName(String prefixedName) throws PrefixDeclarationException { + return prefixDeclarationRegistry.resolvePrefixedName(prefixedName); + } +} diff --git a/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/SubParserFactory.java b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/SubParserFactory.java new file mode 100644 index 000000000..a607ac22e --- /dev/null +++ b/rulewerk-parser/src/main/java/org/semanticweb/rulewerk/parser/javacc/SubParserFactory.java @@ -0,0 +1,85 @@ +package org.semanticweb.rulewerk.parser.javacc; + +import java.io.ByteArrayInputStream; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.InputStream; + +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.parser.RuleParser; + +/** + * Factory for creating a SubParser sharing configuration, (semantic) state, and + * prefixes, but with an independent input stream, to be used, e.g., for parsing + * arguments in data source declarations. The parser will start in the + * {@code DEFAULT} lexical state. + * + * @author Maximilian Marx + */ +public class SubParserFactory { + private final KnowledgeBase knowledgeBase; + private final ParserConfiguration parserConfiguration; + private final PrefixDeclarationRegistry prefixDeclarationRegistry; + private final Skolemization skolemization; + + /** + * Construct a SubParserFactory. + * + * @param parser the parser instance to get the (semantic) state from. + */ + SubParserFactory(final JavaCCParser parser) { + this.knowledgeBase = parser.getKnowledgeBase(); + this.prefixDeclarationRegistry = parser.getPrefixDeclarationRegistry(); + this.parserConfiguration = parser.getParserConfiguration(); + this.skolemization = parser.getSkolemization(); + } + + /** + * Create a new parser with the specified (semantic) state and given input. + * + * @param inputStream the input stream to parse. + * @param encoding encoding of the input stream. + * + * @return A new {@link JavaCCParser} bound to inputStream and with the + * specified parser state. + */ + public JavaCCParser makeSubParser(final InputStream inputStream, final String encoding) { + final JavaCCParser subParser = new JavaCCParser(inputStream, encoding); + subParser.setKnowledgeBase(this.knowledgeBase); + subParser.setPrefixDeclarationRegistry(this.prefixDeclarationRegistry); + subParser.setParserConfiguration(this.parserConfiguration); + subParser.setSkolemization(this.skolemization); + + return subParser; + } + + public JavaCCParser makeSubParser(final InputStream inputStream) { + return this.makeSubParser(inputStream, RuleParser.DEFAULT_STRING_ENCODING); + } + + public JavaCCParser makeSubParser(final String string) { + return this.makeSubParser(new ByteArrayInputStream(string.getBytes())); + } +} diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java new file mode 100644 index 000000000..3381f05cc --- /dev/null +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/CommandParserTest.java @@ -0,0 +1,69 @@ +package org.semanticweb.rulewerk.parser; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.net.URI; +import java.net.URISyntaxException; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Command; +import org.semanticweb.rulewerk.core.model.api.Terms; +import org.semanticweb.rulewerk.parser.javacc.JavaCCParser; + +public class CommandParserTest { + + @Test + public void parseCommand() throws ParsingException { + String input = "@query p(?X, a):- q(?X) \"string\" abcd p(a) ."; + Command command = RuleParser.parseSyntaxFragment(input, JavaCCParser::command, "command", null); + assertEquals("query", command.getName()); + assertEquals(5, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromRule().isPresent()); + assertTrue(command.getArguments().get(1).fromTerm().isPresent()); + assertTrue(command.getArguments().get(2).fromTerm().isPresent()); + assertTrue(command.getArguments().get(3).fromPositiveLiteral().isPresent()); + assertTrue(command.getArguments().get(4).fromTerm().isPresent()); + } + + @Test + public void parsePrefix() throws ParsingException, URISyntaxException { + String input = "@myprefix wdqs: ."; + Command command = RuleParser.parseSyntaxFragment(input, JavaCCParser::command, "command", null); + assertEquals(2, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromTerm().isPresent()); + assertTrue(command.getArguments().get(1).fromTerm().isPresent()); + assertEquals("wdqs:", Terms.extractString(command.getArguments().get(0).fromTerm().get())); + assertEquals(new URI("https://query.wikidata.org/"), Terms.extractIri(command.getArguments().get(1).fromTerm().get())); + } + + @Test + public void parseSourceDeclaration() throws ParsingException, URISyntaxException { + String input = "@mysource diseaseId[2]: 123 ."; + Command command = RuleParser.parseSyntaxFragment(input, JavaCCParser::command, "command", null); + assertEquals(2, command.getArguments().size()); + assertTrue(command.getArguments().get(0).fromTerm().isPresent()); + assertTrue(command.getArguments().get(1).fromTerm().isPresent()); + assertEquals("diseaseId[2]:", Terms.extractString(command.getArguments().get(0).fromTerm().get())); + assertEquals(123, Terms.extractInt(command.getArguments().get(1).fromTerm().get())); + } +} diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java new file mode 100644 index 000000000..eaedc2198 --- /dev/null +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/DirectiveHandlerTest.java @@ -0,0 +1,70 @@ +package org.semanticweb.rulewerk.parser; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +import static org.junit.Assert.*; + +import java.io.File; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Argument; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +public class DirectiveHandlerTest { + private static final String BASE_PATH = System.getProperty("user.dir"); + private static final String STRING = "src/test/resources/facts.rls"; + private static final Term STRINGTERM = Expressions.makeDatatypeConstant(STRING, + PrefixDeclarationRegistry.XSD_STRING); + private static final Term INTTERM = Expressions.makeDatatypeConstant("42", PrefixDeclarationRegistry.XSD_INT); + + private static final Argument TERM_STRING_ARGUMENT = Argument.term(STRINGTERM); + private static final Argument TERM_INT_ARGUMENT = Argument.term(INTTERM); + + @Test + public void validateStringArgument_stringArgument_succeeds() throws ParsingException { + assertEquals(STRING, DirectiveHandler.validateStringArgument(TERM_STRING_ARGUMENT, "string argument")); + } + + @Test(expected = ParsingException.class) + public void validateStringArgument_stringArgument_throws() throws ParsingException { + assertEquals(STRING, DirectiveHandler.validateStringArgument(TERM_INT_ARGUMENT, "string argument")); + } + + @Test + public void validateTermArgument_termArgument_succeeds() throws ParsingException { + assertEquals(STRINGTERM, DirectiveHandler.validateTermArgument(TERM_STRING_ARGUMENT, "term argument")); + } + + @Test + public void validateFilenameArgument_filename_succeeds() throws ParsingException { + assertEquals(new File(BASE_PATH + File.separator + STRING), + DirectiveHandler.validateFilenameArgument(TERM_STRING_ARGUMENT, "filename argument", BASE_PATH)); + } + + @Test + public void validateFilenameArgument_invalidFilename_throws() throws ParsingException { + DirectiveHandler.validateFilenameArgument(Argument + .term(Expressions.makeDatatypeConstant(STRING + "-nonexistent", PrefixDeclarationRegistry.XSD_STRING)), + "filename argument", BASE_PATH); + } + +} diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/EntityTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/EntityTest.java new file mode 100644 index 000000000..442e56de9 --- /dev/null +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/EntityTest.java @@ -0,0 +1,227 @@ +package org.semanticweb.rulewerk.parser; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.NegativeLiteral; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.RuleImpl; + +public class EntityTest { + + @Test + public void languageStringConstantToStringRoundTripTest() throws ParsingException { + LanguageStringConstantImpl s = new LanguageStringConstantImpl("Test", "en"); + Predicate p = Expressions.makePredicate("p", 1); + Fact f3 = Expressions.makeFact(p, s); + assertEquals(f3, RuleParser.parseFact(f3.toString())); + } + + @Test + public void abstractConstantStringToStringRoundTripTest() throws ParsingException { + AbstractConstantImpl f = new AbstractConstantImpl("f"); + Fact f1 = Expressions.makeFact("p", f); + assertEquals(f1, RuleParser.parseFact(f1.toString())); + } + + @Test + public void abstractConstantAbsoluteToStringRoundTripTest() throws ParsingException { + AbstractConstantImpl a = new AbstractConstantImpl("http://example.org/test"); + Fact f1 = Expressions.makeFact("p", a); + assertEquals(f1, RuleParser.parseFact(f1.toString())); + } + + @Test + public void abstractConstantRelativeDoubleToStringRoundTripTest() throws ParsingException { + AbstractConstantImpl b = new AbstractConstantImpl("4.2E9"); + Fact f1 = Expressions.makeFact("p", b); + assertEquals(f1, RuleParser.parseFact(f1.toString())); + } + + @Test + public void abstractConstantRelativeIntegerToStringRoundTripTest() throws ParsingException { + AbstractConstantImpl b = new AbstractConstantImpl("11"); + Fact f1 = Expressions.makeFact("p", b); + assertEquals(f1, RuleParser.parseFact(f1.toString())); + } + + @Test + public void abstractConstantRelativeBooleanToStringRoundTripTest() throws ParsingException { + AbstractConstantImpl b = new AbstractConstantImpl("false"); + Fact f1 = Expressions.makeFact("p", b); + assertEquals(f1, RuleParser.parseFact(f1.toString())); + } + + @Test + public void abstractConstantRelativeDecimalToStringRoundTripTest() throws ParsingException { + AbstractConstantImpl b = new AbstractConstantImpl("-5.0"); + Fact f1 = Expressions.makeFact("p", b); + assertEquals(f1, RuleParser.parseFact(f1.toString())); + } + + @Test + public void iriRoundTripTest() throws ParsingException { + String abstractConstant = "<1.0>"; + Fact f2 = RuleParser.parseFact("p(" + abstractConstant + ")."); + assertEquals(abstractConstant, f2.getArguments().get(0).toString()); + } + + @Test + public void iriRoundTripTest2() throws ParsingException { + String abstractConstant = ""; + Fact f2 = RuleParser.parseFact("p(" + abstractConstant + ")."); + assertEquals(abstractConstant, f2.getArguments().get(0).toString()); + } + + @Test + public void iriRoundTripTest3() throws ParsingException { + String abstractConstant = ""; + Fact f2 = RuleParser.parseFact("p(" + abstractConstant + ")."); + assertEquals(abstractConstant, f2.getArguments().get(0).toString()); + } + + @Test + public void predicateIriRoundTripTest() throws ParsingException { + AbstractConstantImpl a = new AbstractConstantImpl("a"); + Fact f = Expressions.makeFact("1.e1", a); + assertEquals(f, RuleParser.parseFact(f.toString())); + } + + @Test + public void predicateRoundTripTest3() throws ParsingException { + AbstractConstantImpl a = new AbstractConstantImpl("a"); + Fact f2 = Expressions.makeFact("a:1", a); + assertEquals(f2, RuleParser.parseFact(f2.toString())); + } + + @Test + public void iriAngularBracketsTest() throws ParsingException { + String constant = "a"; + Fact fact = RuleParser.parseFact("p(" + constant + ")."); + Term abstractConst = fact.getArguments().get(0); + assertEquals(constant, abstractConst.toString()); + Fact fact2 = RuleParser.parseFact("p(<" + constant + ">)."); + Term abstractConst2 = fact2.getArguments().get(0); + assertEquals(abstractConst, abstractConst2); + } + + @Test + public void ruleToStringRoundTripTest() throws ParsingException { + Constant c = Expressions.makeAbstractConstant("c"); + Variable x = Expressions.makeUniversalVariable("X"); + Variable y = Expressions.makeUniversalVariable("Y"); + Variable z = Expressions.makeExistentialVariable("Z"); + PositiveLiteral atom1 = Expressions.makePositiveLiteral("p", x, c); + PositiveLiteral atom2 = Expressions.makePositiveLiteral("p", x, y); + PositiveLiteral headAtom1 = Expressions.makePositiveLiteral("q", x, z); + Conjunction bodyLiterals = Expressions.makeConjunction(atom1, atom2); + Conjunction headPositiveLiterals = Expressions.makePositiveConjunction(headAtom1); + Rule rule1 = new RuleImpl(headPositiveLiterals, bodyLiterals); + assertEquals(rule1, RuleParser.parseRule(rule1.toString())); + } + + @Test + public void conjunctionToStringRoundTripTest() throws ParsingException { + Constant c = Expressions.makeAbstractConstant("c"); + Variable x = Expressions.makeUniversalVariable("X"); + Variable y = Expressions.makeUniversalVariable("Y"); + Variable z = Expressions.makeExistentialVariable("Z"); + NegativeLiteral atom1 = Expressions.makeNegativeLiteral("p", x, c); + PositiveLiteral atom2 = Expressions.makePositiveLiteral("p", x, y); + PositiveLiteral headAtom1 = Expressions.makePositiveLiteral("q", x, z); + Conjunction bodyLiterals = Expressions.makeConjunction(atom1, atom2); + Conjunction headPositiveLiterals = Expressions.makePositiveConjunction(headAtom1); + Rule rule1 = new RuleImpl(headPositiveLiterals, bodyLiterals); + assertEquals(bodyLiterals, RuleParser.parseRule(rule1.toString()).getBody()); + assertEquals(headPositiveLiterals, RuleParser.parseRule(rule1.toString()).getHead()); + } + + @Test + public void positiveLiteralToStringRoundTripTest() throws ParsingException { + Constant c = Expressions.makeAbstractConstant("c"); + Variable x = Expressions.makeUniversalVariable("X"); + PositiveLiteral atom1 = Expressions.makePositiveLiteral("p", x, c); + assertEquals(atom1, RuleParser.parseLiteral(atom1.toString())); + } + + @Test + public void literalToStringRoundTripTest() throws ParsingException { + Constant c = Expressions.makeAbstractConstant("c"); + Variable x = Expressions.makeUniversalVariable("X"); + NegativeLiteral atom1 = Expressions.makeNegativeLiteral("p", x, c); + assertEquals(atom1, RuleParser.parseLiteral(atom1.toString())); + } + + @Test + public void datatypeDoubleConstantToStringRoundTripTest() throws ParsingException { + String doubleConstant = "\"12.345E67\"^^"; + assertEquals(doubleConstant, + RuleParser.parseFact("p(" + doubleConstant + ").").getArguments().get(0).toString()); + assertEquals(doubleConstant, RuleParser.parseFact("p(12.345E67).").getArguments().get(0).toString()); + } + + @Test + public void datatypeFloatConstantToStringRoundTripTest() throws ParsingException { + String floatConstant = "\"0.5\"^^"; + assertEquals(floatConstant, RuleParser.parseFact("p(" + floatConstant + ").").getArguments().get(0).toString()); + } + + @Test + public void datatypeStringConstantToStringRoundTripTest() throws ParsingException { + String shortStringConstant = "\"data\""; + assertEquals(shortStringConstant, + RuleParser.parseFact("p(" + shortStringConstant + "^^).") + .getArguments().get(0).toString()); + assertEquals(shortStringConstant, + RuleParser.parseFact("p(" + shortStringConstant + ").").getArguments().get(0).toString()); + } + + @Test + public void datatypeIntegerConstantToStringRoundTripTest() throws ParsingException { + String shortIntegerConstant = "1"; + assertEquals(shortIntegerConstant, + RuleParser.parseFact("p(\"" + shortIntegerConstant + "\"^^).") + .getArguments().get(0).toString()); + assertEquals(shortIntegerConstant, + RuleParser.parseFact("p(" + shortIntegerConstant + ").").getArguments().get(0).toString()); + } + + @Test + public void datatypeDecimalToStringRoundTripTest() throws ParsingException { + String decimalConstant = "\"0.23\"^^"; + assertEquals(decimalConstant, + RuleParser.parseFact("p(" + decimalConstant + ").").getArguments().get(0).toString()); + assertEquals(decimalConstant, RuleParser.parseFact("p(0.23).").getArguments().get(0).toString()); + } +} diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserConfigurationTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserConfigurationTest.java new file mode 100644 index 000000000..c9d513f6e --- /dev/null +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserConfigurationTest.java @@ -0,0 +1,129 @@ +package org.semanticweb.rulewerk.parser; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.util.ArrayList; + +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.parser.DatatypeConstantHandler; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.parser.datasources.DataSourceDeclarationHandler; +import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; +import org.semanticweb.rulewerk.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter; + +public class ParserConfigurationTest { + private static final String TYPE_NAME = "test-type"; + private static final String SOURCE_NAME = "test-source"; + private static final String DIRECTIVE_NAME = "test-directive"; + + private ParserConfiguration parserConfiguration; + + @Mock + private DatatypeConstantHandler datatypeConstantHandler; + @Mock + private DataSourceDeclarationHandler dataSourceDeclarationHandler; + @Mock + private SubParserFactory subParserFactory; + @Mock + private DirectiveHandler directiveHandler; + + @Before + public void init() { + parserConfiguration = new ParserConfiguration(); + } + + @Test(expected = IllegalArgumentException.class) + public void registerDataSource_duplicateName_throws() { + parserConfiguration.registerDataSource(SOURCE_NAME, dataSourceDeclarationHandler) + .registerDataSource(SOURCE_NAME, dataSourceDeclarationHandler); + } + + @Test(expected = IllegalArgumentException.class) + public void registerDatatype_duplicateName_throws() { + parserConfiguration.registerDatatype(TYPE_NAME, datatypeConstantHandler).registerDatatype(TYPE_NAME, + datatypeConstantHandler); + } + + @Test + public void registerDataSource_datatypeName_succeeds() { + parserConfiguration.registerDatatype(TYPE_NAME, datatypeConstantHandler).registerDataSource(TYPE_NAME, + dataSourceDeclarationHandler); + } + + @Test + public void registerDatatype_dataSourceName_succeeds() { + parserConfiguration.registerDataSource(SOURCE_NAME, dataSourceDeclarationHandler).registerDatatype(SOURCE_NAME, + datatypeConstantHandler); + } + + @Test + public void isParsingOfNamedNullsAllowed_default_returnsTrue() { + assertTrue("named nulls are allowed by default", parserConfiguration.isParsingOfNamedNullsAllowed()); + } + + @Test + public void isParsingOfNamedNullsAllowed_disabled_returnsFalse() { + parserConfiguration.disallowNamedNulls(); + assertFalse("named nulls are disallowed after disallowing them", + parserConfiguration.isParsingOfNamedNullsAllowed()); + } + + @Test + public void isParsingOfNamedNullsAllowed_disabledAndEnabled_returnsTrue() { + parserConfiguration.disallowNamedNulls(); + assertFalse("named nulls are disallowed after disallowing them", + parserConfiguration.isParsingOfNamedNullsAllowed()); + parserConfiguration.allowNamedNulls(); + assertTrue("named nulls are allowed after allowing them", parserConfiguration.isParsingOfNamedNullsAllowed()); + } + + @Test(expected = ParsingException.class) + public void parseConfigurableLiteral_unregisteredLiteral_throws() throws ParsingException { + parserConfiguration.parseConfigurableLiteral(ConfigurableLiteralDelimiter.BRACE, "test", subParserFactory); + } + + @Test(expected = IllegalArgumentException.class) + public void registerDirective_reservedName_throws() throws IllegalArgumentException { + parserConfiguration.registerDirective("base", directiveHandler); + } + + @Test + public void registerDirective_unreserverdName_succeeds() throws IllegalArgumentException { + parserConfiguration.registerDirective(DIRECTIVE_NAME, directiveHandler); + } + + @Test(expected = IllegalArgumentException.class) + public void registerDirective_duplicateName_throws() throws IllegalArgumentException { + parserConfiguration.registerDirective(DIRECTIVE_NAME, directiveHandler); + parserConfiguration.registerDirective(DIRECTIVE_NAME, directiveHandler); + } + + @Test(expected = ParsingException.class) + public void parseDirectiveStatement_unregisteredDirective_throws() throws ParsingException { + parserConfiguration.parseDirectiveStatement(DIRECTIVE_NAME, new ArrayList<>(), subParserFactory); + } + +} diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserTestUtils.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserTestUtils.java new file mode 100644 index 000000000..99c4e513e --- /dev/null +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/ParserTestUtils.java @@ -0,0 +1,52 @@ +package org.semanticweb.rulewerk.parser; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +import static org.junit.Assert.assertTrue; + +import java.util.List; +import java.util.UUID; + +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; + +public interface ParserTestUtils { + public default void assertUuid(String uuidLike) { + try { + UUID.fromString(uuidLike); + } catch (IllegalArgumentException e) { + throw new AssertionError("expected a valid UUID, but got \"" + uuidLike + "\"", e); + } + } + + public default void assertArgumentIsNamedNull(Literal literal, int argument) { + List arguments = literal.getArguments(); + assertTrue("argument is positive", argument >= 1); + assertTrue("argument is a valid position", argument <= arguments.size()); + Term term = arguments.get(argument - 1); + assertTrue("argument is a named null", term instanceof NamedNullImpl); + + if (term instanceof RenamedNamedNull) { + assertUuid(term.getName()); + } + } +} diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserConfigurableLiteralTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserConfigurableLiteralTest.java new file mode 100644 index 000000000..639b30c01 --- /dev/null +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserConfigurableLiteralTest.java @@ -0,0 +1,278 @@ +package org.semanticweb.rulewerk.parser; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + +import org.junit.Before; +import org.junit.Test; +import org.mockito.ArgumentMatchers; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.parser.ConfigurableLiteralHandler; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.parser.javacc.JavaCCParserBase.ConfigurableLiteralDelimiter; +import org.semanticweb.rulewerk.parser.javacc.SubParserFactory; + +public class RuleParserConfigurableLiteralTest { + public static final Constant pipeConstant = Expressions.makeAbstractConstant("testPipe"); + public static final Constant hashConstant = Expressions.makeAbstractConstant("testHash"); + public static final Constant parenConstant = Expressions.makeAbstractConstant("testParen"); + public static final Constant braceConstant = Expressions.makeAbstractConstant("testBrace"); + public static final Constant bracketConstant = Expressions.makeAbstractConstant("testBracket"); + + public static final ConfigurableLiteralHandler pipeHandler = getMockLiteralHandler( + ConfigurableLiteralDelimiter.PIPE, pipeConstant); + public static final ConfigurableLiteralHandler hashHandler = getMockLiteralHandler( + ConfigurableLiteralDelimiter.HASH, hashConstant); + public static final ConfigurableLiteralHandler parenHandler = getMockLiteralHandler( + ConfigurableLiteralDelimiter.PAREN, parenConstant); + public static final ConfigurableLiteralHandler braceHandler = getMockLiteralHandler( + ConfigurableLiteralDelimiter.BRACE, braceConstant); + public static final ConfigurableLiteralHandler bracketHandler = getMockLiteralHandler( + ConfigurableLiteralDelimiter.BRACKET, bracketConstant); + + private ParserConfiguration parserConfiguration; + + @Before + public void init() { + parserConfiguration = new ParserConfiguration(); + } + + @Test(expected = ParsingException.class) + public void parseLiteral_unregisteredCustomLiteral_throws() throws ParsingException { + RuleParser.parseLiteral("p(|test|)"); + } + + @Test + public void registerLiteral_succeeds() throws ParsingException { + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler); + assertTrue("Configurable Literal Handler has been registered", + parserConfiguration.isConfigurableLiteralRegistered(ConfigurableLiteralDelimiter.PIPE)); + } + + @Test(expected = IllegalArgumentException.class) + public void registerLiteral_duplicateHandler_throws() throws ParsingException, IllegalArgumentException { + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler) + .registerLiteral(ConfigurableLiteralDelimiter.PIPE, hashHandler); + } + + @Test + public void parseLiteral_customPipeLiteral_succeeds() throws ParsingException { + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler); + Literal result = RuleParser.parseLiteral("p(|test|)", parserConfiguration); + assertEquals(pipeConstant, result.getConstants().toArray()[0]); + } + + @Test + public void parseLiteral_customHashLiteral_succeeds() throws ParsingException { + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler); + Literal result = RuleParser.parseLiteral("p(#test#)", parserConfiguration); + assertEquals(hashConstant, result.getConstants().toArray()[0]); + } + + @Test + public void parseLiteral_customParenLiteral_succeeds() throws ParsingException { + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, parenHandler); + Literal result = RuleParser.parseLiteral("p((test))", parserConfiguration); + assertEquals(parenConstant, result.getConstants().toArray()[0]); + } + + @Test + public void parseLiteral_customBraceLiteral_succeeds() throws ParsingException { + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, braceHandler); + Literal result = RuleParser.parseLiteral("p({test})", parserConfiguration); + assertEquals(braceConstant, result.getConstants().toArray()[0]); + } + + @Test + public void parseLiteral_customBracketLiteral_succeeds() throws ParsingException { + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, bracketHandler); + Literal result = RuleParser.parseLiteral("p([test])", parserConfiguration); + assertEquals(bracketConstant, result.getConstants().toArray()[0]); + } + + @Test + public void parseLiteral_mixedLiterals_succeeds() throws ParsingException { + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler) + .registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler) + .registerLiteral(ConfigurableLiteralDelimiter.BRACKET, bracketHandler) + .registerLiteral(ConfigurableLiteralDelimiter.BRACE, braceHandler); + Literal result = RuleParser.parseLiteral("p(||, #test#, [], {})", parserConfiguration); + List constants = result.getConstants().collect(Collectors.toList()); + List expected = new ArrayList<>( + Arrays.asList(pipeConstant, hashConstant, bracketConstant, braceConstant)); + assertEquals(expected, constants); + } + + @Test + public void parseLiteral_nontrivialPipeLiteral_succeeds() throws ParsingException { + String label = "this is a test, do not worry."; + String input = "p(|" + label + "|)"; + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, reversingHandler); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); + } + + @Test + public void parseLiteral_nestedParenLiterals_succeeds() throws ParsingException { + String label = "(((this is a test, do not worry.)))"; + String input = "p((" + label + "))"; + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, reversingHandler); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); + } + + @Test + public void parseLiteral_multipleParenLiterals_succeeds() throws ParsingException { + String input = "p((test), (tset))"; + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, reversingHandler); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + List constants = result.getConstants().collect(Collectors.toList()); + List expected = new ArrayList<>( + Arrays.asList(makeReversedConstant("test"), makeReversedConstant("tset"))); + assertEquals(expected, constants); + } + + @Test + public void parseLiteral_multipleNestedParenLiterals_succeeds() throws ParsingException { + String input = "p(((test)), ((tset), (tst)))"; + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, reversingHandler); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + List constants = result.getConstants().collect(Collectors.toList()); + List expected = new ArrayList<>( + Arrays.asList(makeReversedConstant("(test)"), makeReversedConstant("(tset), (tst)"))); + assertEquals(expected, constants); + } + + @Test(expected = ParsingException.class) + public void parseLiteral_mismatchedNestedParenLiteral_throws() throws ParsingException { + String input = "p((test ())"; + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PAREN, reversingHandler); + RuleParser.parseLiteral(input, parserConfiguration); + } + + @Test + public void parseLiteral_nestedBraceLiteral_succeeds() throws ParsingException { + String label = "{{{this is a test, do not worry.}}}"; + String input = "p({" + label + "})"; + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, reversingHandler); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); + } + + @Test + public void parseLiteral_multipleBraceLiterals_succeeds() throws ParsingException { + String input = "p({test}, {tset})"; + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, reversingHandler); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + List constants = result.getConstants().collect(Collectors.toList()); + List expected = new ArrayList<>( + Arrays.asList(makeReversedConstant("test"), makeReversedConstant("tset"))); + assertEquals(expected, constants); + } + + @Test + public void parseLiteral_multipleNestedBraceLiterals_succeeds() throws ParsingException { + String input = "p({{test}}, {{tset}})"; + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACE, reversingHandler); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + List constants = result.getConstants().collect(Collectors.toList()); + List expected = new ArrayList<>( + Arrays.asList(makeReversedConstant("{test}"), makeReversedConstant("{tset}"))); + assertEquals(expected, constants); + } + + @Test + public void parseLiteral_nestedBracketLiteral_succeeds() throws ParsingException { + String label = "[[[this is a test, do not worry.]]]"; + String input = "p([" + label + "])"; + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, reversingHandler); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + assertEquals(makeReversedConstant(label), result.getConstants().toArray()[0]); + } + + @Test + public void parseLiteral_multipleBracketLiterals_succeeds() throws ParsingException { + String input = "p([test], [tset])"; + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, reversingHandler); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + List constants = result.getConstants().collect(Collectors.toList()); + List expected = new ArrayList<>( + Arrays.asList(makeReversedConstant("test"), makeReversedConstant("tset"))); + assertEquals(expected, constants); + } + + @Test + public void parseLiteral_multipleNestedBracketLiterals_succeeds() throws ParsingException { + String input = "p([[test]], [[tset], [tst]])"; + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.BRACKET, reversingHandler); + Literal result = RuleParser.parseLiteral(input, parserConfiguration); + List constants = result.getConstants().collect(Collectors.toList()); + List expected = new ArrayList<>( + Arrays.asList(makeReversedConstant("[test]"), makeReversedConstant("[tset], [tst]"))); + assertEquals(expected, constants); + } + + @Test + public void parseLiteral_mixedAndNestedLiterals_succeeds() throws ParsingException { + parserConfiguration.registerLiteral(ConfigurableLiteralDelimiter.PIPE, pipeHandler) + .registerLiteral(ConfigurableLiteralDelimiter.HASH, hashHandler) + .registerLiteral(ConfigurableLiteralDelimiter.PAREN, parenHandler) + .registerLiteral(ConfigurableLiteralDelimiter.BRACE, braceHandler) + .registerLiteral(ConfigurableLiteralDelimiter.BRACKET, bracketHandler); + Literal result = RuleParser.parseLiteral("p(|{}|, #test#, [|test, #test#, test|], ([], {}, [{[{}]}]))", + parserConfiguration); + List constants = result.getConstants().collect(Collectors.toList()); + List expected = new ArrayList<>( + Arrays.asList(pipeConstant, hashConstant, bracketConstant, parenConstant)); + assertEquals(expected, constants); + } + + static Constant makeReversedConstant(String name) { + StringBuilder builder = new StringBuilder(name); + return Expressions.makeAbstractConstant(builder.reverse().toString()); + } + + static ConfigurableLiteralHandler reversingHandler = (String syntacticForm, + SubParserFactory subParserFactory) -> makeReversedConstant(syntacticForm); + + static ConfigurableLiteralHandler getMockLiteralHandler(ConfigurableLiteralDelimiter delimiter, Constant constant) { + ConfigurableLiteralHandler handler = mock(ConfigurableLiteralHandler.class); + try { + doReturn(constant).when(handler).parseLiteral(ArgumentMatchers.anyString(), + ArgumentMatchers.any()); + } catch (ParsingException e) { + // ignore it, since the mock will not throw + } + return handler; + } + +} diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java new file mode 100644 index 000000000..6a22ed61b --- /dev/null +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserDataSourceTest.java @@ -0,0 +1,227 @@ +package org.semanticweb.rulewerk.parser; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; + +import java.io.File; +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.Arrays; +import java.util.List; + +import org.junit.Test; +import org.mockito.ArgumentMatchers; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.TridentDataSource; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.parser.datasources.DataSourceDeclarationHandler; + +public class RuleParserDataSourceTest { + private static final String BASE_PATH = System.getProperty("user.dir") + File.separator; + private static final String EXAMPLE_RDF_FILE_PATH = "src/main/data/input/example.nt.gz"; + private static final String EXAMPLE_CSV_FILE_PATH = "src/main/data/input/example.csv"; + private static final String WIKIDATA_SPARQL_ENDPOINT_URI = "https://query.wikidata.org/sparql"; + private static final String EXAMPLE_TRIDENT_PATH = "src/main/data/trident"; + + @Test + public void testCsvSource() throws ParsingException, IOException { + String input = "@source p[2] : load-csv(\"" + EXAMPLE_CSV_FILE_PATH + "\") ."; + CsvFileDataSource csvds = new CsvFileDataSource(BASE_PATH + EXAMPLE_CSV_FILE_PATH); + assertEquals(csvds, RuleParser.parseDataSourceDeclaration(input).getDataSource()); + } + + @Test + public void testRdfSource() throws ParsingException, IOException { + String input = "@source p[3] : load-rdf(\"" + EXAMPLE_RDF_FILE_PATH + "\") ."; + RdfFileDataSource rdfds = new RdfFileDataSource(BASE_PATH + EXAMPLE_RDF_FILE_PATH); + assertEquals(rdfds, RuleParser.parseDataSourceDeclaration(input).getDataSource()); + } + + @Test(expected = ParsingException.class) + public void testRdfSourceInvalidArity() throws ParsingException, IOException { + String input = "@source p[2] : load-rdf(\"" + EXAMPLE_RDF_FILE_PATH + "\") ."; + RuleParser.parseDataSourceDeclaration(input); + } + + @Test + public void testSparqlSource() throws ParsingException, MalformedURLException { + String input = "@source p[2] : sparql(<" + WIKIDATA_SPARQL_ENDPOINT_URI + + ">,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + SparqlQueryResultDataSource sparqlds = new SparqlQueryResultDataSource(new URL(WIKIDATA_SPARQL_ENDPOINT_URI), + "disease, doid", "?disease wdt:P699 ?doid ."); + assertEquals(sparqlds, RuleParser.parseDataSourceDeclaration(input).getDataSource()); + } + + @Test(expected = ParsingException.class) + public void parseDataSourceDeclaration_sparqlSourceInvalidArity_throws() + throws ParsingException, MalformedURLException { + String input = "@source p[3] : sparql(<" + WIKIDATA_SPARQL_ENDPOINT_URI + + ">,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + RuleParser.parseDataSourceDeclaration(input); + } + + @Test(expected = ParsingException.class) + public void parseDataSourceDeclaration_sparqlSourceMalformedUri_throws() + throws ParsingException, MalformedURLException { + String input = "@source p[3] : sparql(\"\",\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + RuleParser.parseDataSourceDeclaration(input); + } + + @Test(expected = ParsingException.class) + public void parseDataSourceDeclaration_sparqlSourceUnknownPrefix_throws() + throws ParsingException, MalformedURLException { + String input = "@source p[3] : sparql(\"wdqs:sparql\",\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + RuleParser.parseDataSourceDeclaration(input); + } + + @Test(expected = ParsingException.class) + public void parseDataSourceDeclaration_sparqlSourceUnparseableUrl_throws() + throws ParsingException, MalformedURLException { + String input = "@source p[3] : sparql(\"wdqs:\",\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + RuleParser.parseDataSourceDeclaration(input); + } + + @Test(expected = ParsingException.class) + public void testSparqlSourceMalformedUrl() throws ParsingException, MalformedURLException { + String input = "@source p[2] : sparql(,\"disease, doid\",\"?disease wdt:P699 ?doid .\") ."; + RuleParser.parseDataSourceDeclaration(input); + } + + @Test(expected = ParsingException.class) + public void parseDataSourceDeclaration_csvSourceInvalidPath_throws() throws ParsingException { + String input = "@source p[1] : load-csv(\"\0.csv\") ."; + RuleParser.parseDataSourceDeclaration(input); + } + + @Test(expected = ParsingException.class) + public void parseDataSourceDeclaration_rdfSourceInvalidPath_throws() throws ParsingException { + String input = "@source p[3] : load-rdf(\"\0.nt\") ."; + RuleParser.parseDataSourceDeclaration(input); + } + + @Test(expected = ParsingException.class) + public void testUnknownDataSource() throws ParsingException { + String input = "@source p[2] : unknown-data-source(\"hello, world\") ."; + RuleParser.parseDataSourceDeclaration(input); + } + + @Test + public void testCustomDataSource() throws ParsingException { + CsvFileDataSource source = mock(CsvFileDataSource.class); + DataSourceDeclarationHandler handler = mock(DataSourceDeclarationHandler.class); + ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerDataSource("mock-source", handler); + doReturn(source).when(handler).handleDataSourceDeclaration(ArgumentMatchers.>any(), + ArgumentMatchers.any()); + + String input = "@source p[2] : mock-source(\"hello\", \"world\") ."; + List expectedArguments = Arrays.asList( + Expressions.makeDatatypeConstant("hello", PrefixDeclarationRegistry.XSD_STRING), + Expressions.makeDatatypeConstant("world", PrefixDeclarationRegistry.XSD_STRING)); + RuleParser.parse(input, parserConfiguration); + final String expectedImportBasePath = System.getProperty("user.dir"); + + verify(handler).handleDataSourceDeclaration(ArgumentMatchers.eq(expectedArguments), + ArgumentMatchers.eq(expectedImportBasePath)); + } + + @Test + public void sparqlDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + Predicate predicate1 = Expressions.makePredicate("p", 1); + SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(new URL(WIKIDATA_SPARQL_ENDPOINT_URI), + "var", "?var wdt:P31 wd:Q5 ."); + DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, dataSource); + RuleParser.parseInto(kb, dataSourceDeclaration.toString()); + assertEquals(dataSourceDeclaration, kb.getDataSourceDeclarations().get(0)); + } + + @Test + public void rdfDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + Predicate predicate1 = Expressions.makePredicate("p", 3); + RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(EXAMPLE_RDF_FILE_PATH); + DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, + unzippedRdfFileDataSource); + ParserConfiguration parserConfiguration = new DefaultParserConfiguration().setImportBasePath(""); + RuleParser.parseInto(kb, dataSourceDeclaration.toString(), parserConfiguration); + assertEquals(dataSourceDeclaration, kb.getDataSourceDeclarations().get(0)); + } + + @Test + public void csvDataSourceDeclarationToStringParsingTest() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + Predicate predicate1 = Expressions.makePredicate("q", 1); + CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(EXAMPLE_CSV_FILE_PATH); + final DataSourceDeclaration dataSourceDeclaration = new DataSourceDeclarationImpl(predicate1, + unzippedCsvFileDataSource); + ParserConfiguration parserConfiguration = new DefaultParserConfiguration().setImportBasePath(""); + RuleParser.parseInto(kb, dataSourceDeclaration.toString(), parserConfiguration); + assertEquals(dataSourceDeclaration, kb.getDataSourceDeclarations().get(0)); + } + + @Test(expected = ParsingException.class) + public void sparqlDataSourceDeclaration_invalidNumberOfArguments_throws() throws ParsingException { + RuleParser.parseDataSourceDeclaration("@source p[1] : sparql(<" + WIKIDATA_SPARQL_ENDPOINT_URI + ">) ."); + } + + @Test + public void parseDataSourceDeclaration_windowsStylePathName_succeeds() throws ParsingException, IOException { + RuleParser.parseDataSourceDeclaration("@source p[1] : load-csv(\"\\\\test\\\\with\\\\backslashes.csv\") ."); + } + + @Test + public void testTridentSource_succeeds() throws ParsingException, IOException { + String input = "@source p[2] : trident(\"" + EXAMPLE_TRIDENT_PATH + "\") ."; + DataSource parsed = RuleParser.parseDataSourceDeclaration(input).getDataSource(); + TridentDataSource expected = new TridentDataSource(BASE_PATH + EXAMPLE_TRIDENT_PATH); + + assertEquals(expected, parsed); + } + + @Test(expected = ParsingException.class) + public void testTridentSourcewrongParameterCount_fails() throws ParsingException, IOException { + String input = "@source p[2] : trident(\"" + EXAMPLE_TRIDENT_PATH + "\", 42) ."; + RuleParser.parseDataSourceDeclaration(input).getDataSource(); + } + + @Test(expected = ParsingException.class) + public void testTridentSourcewrongParameterType_fails() throws ParsingException, IOException { + String input = "@source p[2] : trident(42) ."; + RuleParser.parseDataSourceDeclaration(input).getDataSource(); + } + +} diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java new file mode 100644 index 000000000..378382700 --- /dev/null +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserParseFactTest.java @@ -0,0 +1,96 @@ +package org.semanticweb.rulewerk.parser; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +public class RuleParserParseFactTest implements ParserTestUtils { + + private final Constant a = Expressions.makeDatatypeConstant("a", PrefixDeclarationRegistry.XSD_STRING); + private final Constant b = Expressions.makeDatatypeConstant("b%c", PrefixDeclarationRegistry.XSD_STRING); + + private final Fact factA = Expressions.makeFact("p", a); + private final Fact factAB = Expressions.makeFact("p", a, b); + + @Test + public void parseFact_string_succeeds() throws ParsingException { + assertEquals(factA, RuleParser.parseFact("p(\"a\") .")); + } + + @Test + public void parseFact_twoStrings_succeeds() throws ParsingException { + assertEquals(factAB, RuleParser.parseFact("p(\"a\",\"b%c\") .")); + } + + @Test(expected = ParsingException.class) + public void parseFact_nonGroundFact_throws() throws ParsingException { + String input = "p(?X) ."; + RuleParser.parseFact(input); + } + + @Test(expected = ParsingException.class) + public void parseFact_arityZeroFact_throws() throws ParsingException { + String input = "p() ."; + RuleParser.parseFact(input); + } + + @Test(expected = ParsingException.class) + public void parseFact_namedNullDisallowed_throws() throws ParsingException { + String input = "p(_:1) ."; + ParserConfiguration parserConfiguration = new ParserConfiguration().disallowNamedNulls(); + RuleParser.parseFact(input, parserConfiguration); + } + + @Test + public void parseFact_namedNull_succeeds() throws ParsingException { + String input = "p(_:1) ."; + Fact result = RuleParser.parseFact(input); + assertArgumentIsNamedNull(result, 1); + } + + @Test(expected = ParsingException.class) + public void parseFact_namedNullAsPredicateName_throws() throws ParsingException { + String input = "_:p(\"a\") ."; + RuleParser.parseFact(input); + } + + @Test(expected = ParsingException.class) + public void parseRule_namedNullInBody_throws() throws ParsingException { + String input = "q(_:head) :- p(_:body) ."; + RuleParser.parseRule(input); + } + + @Test + public void parseRule_namedNullInHead_succeeds() throws ParsingException { + String input = "q(_:head) :- p(\"a\") ."; + Rule result = RuleParser.parseRule(input); + Literal literal = result.getHead().getLiterals().get(0); + assertArgumentIsNamedNull(literal, 1); + } +} diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java new file mode 100644 index 000000000..dd8bc7582 --- /dev/null +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/RuleParserTest.java @@ -0,0 +1,610 @@ +package org.semanticweb.rulewerk.parser; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.junit.Test; +import org.mockito.ArgumentMatchers; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Statement; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.parser.javacc.JavaCCParserBase.FormulaContext; + +public class RuleParserTest implements ParserTestUtils { + + private final Variable x = Expressions.makeUniversalVariable("X"); + private final Variable y = Expressions.makeExistentialVariable("Y"); + private final Variable z = Expressions.makeUniversalVariable("Z"); + private final Constant c = Expressions.makeAbstractConstant("http://example.org/c"); + private final Constant d = Expressions.makeAbstractConstant("http://example.org/d"); + private final Constant e = Expressions.makeAbstractConstant("https://example.org/e"); + private final Constant abc = Expressions.makeDatatypeConstant("abc", PrefixDeclarationRegistry.XSD_STRING); + private final Constant xyz = Expressions.makeDatatypeConstant("xyz", PrefixDeclarationRegistry.XSD_STRING); + private final Literal atom1 = Expressions.makePositiveLiteral("http://example.org/p", this.x, this.c); + private final Literal negAtom1 = Expressions.makeNegativeLiteral("http://example.org/p", this.x, this.c); + private final Literal atom2 = Expressions.makePositiveLiteral("http://example.org/p", this.x, this.z); + private final PositiveLiteral atom3 = Expressions.makePositiveLiteral("http://example.org/q", this.x, this.y); + private final PositiveLiteral atom4 = Expressions.makePositiveLiteral("http://example.org/r", this.x, this.d); + private final PositiveLiteral fact1 = Expressions.makePositiveLiteral("http://example.org/s", this.c); + private final PositiveLiteral fact2 = Expressions.makePositiveLiteral("p", this.abc); + private final PositiveLiteral fact3 = Expressions.makePositiveLiteral("http://example.org/p", this.abc); + private final PositiveLiteral fact4 = Expressions.makePositiveLiteral("https://example.org/s", this.e); + private final PositiveLiteral fact5 = Expressions.makePositiveLiteral("q", this.xyz); + private final Conjunction body1 = Expressions.makeConjunction(this.atom1, this.atom2); + private final Conjunction body2 = Expressions.makeConjunction(this.negAtom1, this.atom2); + private final Conjunction head = Expressions.makePositiveConjunction(this.atom3, this.atom4); + private final Rule rule1 = Expressions.makeRule(this.head, this.body1); + private final Rule rule2 = Expressions.makeRule(this.head, this.body2); + + @Test + public void parse_explicitIri_succeeds() throws ParsingException { + final String input = "() ."; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.fact1), statements); + } + + @Test + public void parse_withPrefix_succeeds() throws ParsingException { + final String input = "@prefix ex: . ex:s(ex:c) ."; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.fact1), statements); + } + + @Test + public void parse_withBaseRelative_succeeds() throws ParsingException { + final String input = "@base . () ."; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.fact1), statements); + } + + @Test + public void parse_withBase_succeeds() throws ParsingException { + final String input = "@base . s(c) ."; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.fact1), statements); + } + + @Test + public void parse_withoutBaseRelative_succeeds() throws ParsingException { + final String input = "s(c) ."; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + final PositiveLiteral atom = Expressions.makePositiveLiteral("s", Expressions.makeAbstractConstant("c")); + assertEquals(Arrays.asList(atom), statements); + } + + @Test(expected = ParsingException.class) + public void parse_prefixConflict_throws() throws ParsingException { + final String input = "@prefix ex: . @prefix ex: . s(c) ."; + RuleParser.parse(input); + } + + @Test(expected = ParsingException.class) + public void parse_baseConflict_throws() throws ParsingException { + final String input = "@base . @base . s(c) ."; + RuleParser.parse(input); + } + + @Test(expected = ParsingException.class) + public void parse_undefinedPrefix_throws() throws ParsingException { + final String input = "ex:s(c) ."; + RuleParser.parse(input); + } + + @Test + public void parse_emptyPrefix_succeeds() throws ParsingException { + final String input = "@prefix : . :s(:c) ."; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.fact1), statements); + } + + @Test(expected = ParsingException.class) + public void testNoUniversalLiterals() throws ParsingException { + final String input = "p(?X) ."; + RuleParser.parse(input); + } + + @Test(expected = ParsingException.class) + public void testNoExistentialLiterals() throws ParsingException { + final String input = "p(!X) ."; + RuleParser.parse(input); + } + + @Test + public void testSimpleRule() throws ParsingException { + final String input = "@base . " + " q(?X, !Y), r(?X, d) :- p(?X,c), p(?X,?Z) . "; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.rule1), statements); + } + + @Test + public void testFactWithCommentSymbol() throws ParsingException { + final String input = "t(\"%test\") . "; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(Expressions.makeFact("t", + Expressions.makeDatatypeConstant("%test", PrefixDeclarationRegistry.XSD_STRING))), statements); + } + + @Test + public void testNegationRule() throws ParsingException { + final String input = "@base . " + " q(?X, !Y), r(?X, d) :- ~p(?X,c), p(?X,?Z) . "; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.rule2), statements); + } + + @Test(expected = ParsingException.class) + public void testUnsafeNegationRule() throws ParsingException { + final String input = "@base . " + " q(?X, !Y), r(?X, d) :- ~p(?Y,c), p(?X,?Z) . "; + RuleParser.parse(input); + } + + @Test + public void testWhiteSpace() throws ParsingException { + final String input = "@base \n\n . " + + " q(?X, !Y) , r(?X, d\t ) \n\n:- p(?X,c), p(?X,\n?Z) \n. "; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.rule1), statements); + } + + @Test(expected = ParsingException.class) + public void testNoUnsafeVariables() throws ParsingException { + final String input = "p(?X,?Y) :- q(?X) ."; + RuleParser.parse(input); + } + + @Test(expected = ParsingException.class) + public void testNoConflictingQuantificationVariables() throws ParsingException { + final String input = "p(?X,!X) :- q(?X) ."; + RuleParser.parse(input); + } + + @Test(expected = ParsingException.class) + public void testNoBodyExistential() throws ParsingException { + final String input = "p(?X) :- q(?X,!Y) ."; + RuleParser.parse(input); + } + + @Test(expected = ParsingException.class) + public void testNoDollarVariables() throws ParsingException { + final String input = "p($X) :- q($X) ."; + RuleParser.parse(input); + } + + @Test + public void testIntegerLiteral() throws ParsingException { + final String input = "p(42)"; + final PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", + Expressions.makeDatatypeConstant("42", PrefixDeclarationRegistry.XSD_INTEGER)); + assertEquals(integerLiteral, RuleParser.parseLiteral(input)); + } + + @Test + public void testAbbreviatedIntegerLiteral() throws ParsingException { + final String input = "@prefix xsd: <" + PrefixDeclarationRegistry.XSD + "> . " + "p(\"42\"^^xsd:integer) ."; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + final PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", + Expressions.makeDatatypeConstant("42", PrefixDeclarationRegistry.XSD_INTEGER)); + assertEquals(Arrays.asList(integerLiteral), statements); + } + + @Test + public void testFullIntegerLiteral() throws ParsingException { + final String input = "p(\"42\"^^<" + PrefixDeclarationRegistry.XSD_INTEGER + "> )"; + final PositiveLiteral integerLiteral = Expressions.makePositiveLiteral("p", + Expressions.makeDatatypeConstant("42", PrefixDeclarationRegistry.XSD_INTEGER)); + assertEquals(integerLiteral, RuleParser.parseLiteral(input)); + } + + @Test + public void testDecimalLiteral() throws ParsingException { + final String input = "p(-5.0)"; + final PositiveLiteral decimalLiteral = Expressions.makePositiveLiteral("p", + Expressions.makeDatatypeConstant("-5.0", PrefixDeclarationRegistry.XSD_DECIMAL)); + assertEquals(decimalLiteral, RuleParser.parseLiteral(input)); + } + + @Test + public void testDoubleLiteral() throws ParsingException { + final String input = "p(4.2E9)"; + final PositiveLiteral doubleLiteral = Expressions.makePositiveLiteral("p", + Expressions.makeDatatypeConstant("4.2E9", PrefixDeclarationRegistry.XSD_DOUBLE)); + assertEquals(doubleLiteral, RuleParser.parseLiteral(input)); + } + + @Test + public void testStringLiteral() throws ParsingException { + final String input = "p(\"abc\")"; + assertEquals(this.fact2, RuleParser.parseLiteral(input)); + } + + @Test(expected = ParsingException.class) + public void testIncompleteStringLiteral() throws ParsingException { + final String input = "p(\"abc)"; + RuleParser.parseLiteral(input); + } + + @Test + public void parseLiteral_escapeSequences_succeeds() throws ParsingException { + final String input = "p(\"_\\\"_\\\\_\\n_\\t_\")"; // User input: p("_\"_\\_\n_\t_") + final PositiveLiteral fact = Expressions.makePositiveLiteral("p", + Expressions.makeDatatypeConstant("_\"_\\_\n_\t_", PrefixDeclarationRegistry.XSD_STRING)); + assertEquals(fact, RuleParser.parseLiteral(input)); + } + + @Test + public void parseLiteral_escapeSequences_roundTrips() throws ParsingException { + final PositiveLiteral fact = Expressions.makePositiveLiteral("p", + Expressions.makeDatatypeConstant("_\"_\\_\n_\t_", PrefixDeclarationRegistry.XSD_STRING)); + assertEquals(fact, RuleParser.parseLiteral(fact.toString())); + } + + @Test + public void parseLiteral_allEscapeSequences_succeeds() throws ParsingException { + // User input: p("_\n_\t_\r_\b_\f_\'_\"_\\_") + final String input = "p(\"_\\n_\\t_\\r_\\b_\\f_\\'_\\\"_\\\\_\")"; + final PositiveLiteral fact = Expressions.makePositiveLiteral("p", + Expressions.makeDatatypeConstant("_\n_\t_\r_\b_\f_\'_\"_\\_", PrefixDeclarationRegistry.XSD_STRING)); + assertEquals(fact, RuleParser.parseLiteral(input)); + } + + @Test + public void parseLiteral_allEscapeSequences_roundTrips() throws ParsingException { + final PositiveLiteral fact = Expressions.makePositiveLiteral("p", + Expressions.makeDatatypeConstant("_\n_\t_\r_\b_\f_\'_\"_\\_", PrefixDeclarationRegistry.XSD_STRING)); + assertEquals(fact, RuleParser.parseLiteral(fact.toString())); + } + + @Test(expected = ParsingException.class) + public void parseLiteral_invalidEscapeSequence_throws() throws ParsingException { + final String input = "p(\"\\ÿ\")"; + RuleParser.parseLiteral(input); + } + + @Test(expected = ParsingException.class) + public void parseLiteral_incompleteEscapeAtEndOfLiteral_throws() throws ParsingException { + final String input = "p(\"\\\")"; + RuleParser.parseLiteral(input); + } + + @Test + public void parseLiteral_multiLineLiteral_succeeds() throws ParsingException { + final String input = "p('''line 1\n\n" + "line 2\n" + "line 3''')"; // User input: p("a\"b\\c") + final PositiveLiteral fact = Expressions.makePositiveLiteral("p", + Expressions.makeDatatypeConstant("line 1\n\nline 2\nline 3", PrefixDeclarationRegistry.XSD_STRING)); + assertEquals(fact, RuleParser.parseLiteral(input)); + } + + @Test + public void parseLiteral_multiLineLiteral_roundTrips() throws ParsingException { + final PositiveLiteral fact = Expressions.makePositiveLiteral("p", + Expressions.makeDatatypeConstant("line 1\n\nline 2\nline 3", PrefixDeclarationRegistry.XSD_STRING)); + assertEquals(fact, RuleParser.parseLiteral(fact.toString())); + } + + @Test(expected = ParsingException.class) + public void testIncompleteStringLiteralMultiLine() throws ParsingException { + final String input = "p('''abc\ndef'')"; + RuleParser.parseLiteral(input); + } + + @Test + public void testFullLiteral() throws ParsingException { + final String input = "p(\"abc\"^^)"; + assertEquals(this.fact2, RuleParser.parseLiteral(input)); + } + + @Test + public void testUnicodeLiteral() throws ParsingException { + final String input = "p(\"\\u0061\\u0062\\u0063\")"; // "abc" + assertEquals(this.fact2, RuleParser.parseLiteral(input)); + } + + @Test + public void testUnicodeUri() throws ParsingException { + final String input = "@base . @prefix ex: . ex:\\u0073(c) ."; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.fact1), statements); + } + + @Test + public void testPrefixedLiteral() throws ParsingException { + final String input = "@prefix xsd: <" + PrefixDeclarationRegistry.XSD + "> . " + "p(\"abc\"^^xsd:string) ."; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.fact2), statements); + } + + @Test + public void testLangStringLiteral() throws ParsingException { + final String input = "p(\"abc\"@en-gb)"; + final PositiveLiteral fact = Expressions.makePositiveLiteral("p", + Expressions.makeLanguageStringConstant("abc", "en-gb")); + assertEquals(fact, RuleParser.parseLiteral(input)); + } + + @Test + public void testLineComments() throws ParsingException { + final String input = "@prefix ex: . % comment \n" + "%@prefix ex: \n" + + " ex:s(ex:c) . % comment \n"; + final ArrayList statements = new ArrayList<>(RuleParser.parse(input).getStatements()); + assertEquals(Arrays.asList(this.fact1), statements); + } + + @Test + public void testPositiveLiteral() throws ParsingException { + final String input = "(?X,)"; + final Literal literal = RuleParser.parsePositiveLiteral(input); + assertEquals(this.atom1, literal); + } + + @Test(expected = ParsingException.class) + public void testPositiveLiteralError() throws ParsingException { + final String input = "~ (?X,)"; + RuleParser.parsePositiveLiteral(input); + } + + @Test + public void testLiteral() throws ParsingException { + final String input = "~ (?X,)"; + final Literal literal = RuleParser.parseLiteral(input); + assertEquals(this.negAtom1, literal); + } + + @Test(expected = ParsingException.class) + public void tesLiteralError() throws ParsingException { + final String input = "(?X, facts = result.getFacts(); + + assertEquals(1, facts.size()); + this.assertArgumentIsNamedNull(facts.get(0), 1); + } + + @Test + public void parseTerm_NamedNull_succeeds() throws ParsingException { + final String input = "_:blank"; + final Term result = RuleParser.parseTerm(input); + this.assertUuid(result.getName()); + } + + @Test + public void parseTerm_NamedNullInHead_succeeds() throws ParsingException { + final String input = "_:blank"; + final Term result = RuleParser.parseTerm(input, FormulaContext.HEAD); + this.assertUuid(result.getName()); + } + + @Test(expected = ParsingException.class) + public void parseTerm_NamedNullInBodyContext_throws() throws ParsingException { + final String input = "_:blank"; + RuleParser.parseTerm(input, FormulaContext.BODY); + } + + @Test(expected = ParsingException.class) + public void testBParsingExceptione() throws ParsingException { + final String input = "_:(a) ."; + RuleParser.parse(input); + } + + @Test(expected = ParsingException.class) + public void parseLiteral_invalidLiteralString_throws() throws ParsingException { + final String input = "P(\"a\")^^whatever"; + RuleParser.parseLiteral(input); + } + + @Test(expected = ParsingException.class) + public void testNonIriTypeInDatatypeLiteral() throws ParsingException { + final String input = "\"a\"^^whatever"; + RuleParser.parseTerm(input); + } + + @Test + public void testIriTypeInDatatypeLiteral() throws ParsingException { + final String iri = "whatever"; + final String input = "P(\"a\"^^<" + iri + ">)"; + final Literal literal = RuleParser.parseLiteral(input); + final DatatypeConstant result = (DatatypeConstant) literal.getConstants().toArray()[0]; + assertEquals(iri, result.getDatatype()); + } + + @Test + public void predicateRelativeNumericIRITest() throws ParsingException { + final AbstractConstantImpl a = new AbstractConstantImpl("a"); + final Fact f = RuleParser.parseFact("<1.e1>(a)."); // 1.e1 == "10"^^xsd:double + final Fact f2 = Expressions.makeFact("1.e1", a); + assertEquals(f, f2); + } + + @Test + public void predicateAbsoluteIRITest() throws ParsingException { + final AbstractConstantImpl a = new AbstractConstantImpl("a"); + final Fact f = RuleParser.parseFact("(a)."); + final Fact f2 = Expressions.makeFact("a:b", a); + assertEquals(f, f2); + } + + @Test + public void parse_absoluteIriInRuleHead_succeeds() throws ParsingException { + RuleParser.parseRule("(?x) :- B(?x), C(?x) ."); + } + + @Test + public void parse_absoluteIriInRuleBody_succeeds() throws ParsingException { + RuleParser.parseRule("A(?x) :- B(?x), (?x) ."); + } + + @Test + public void parse_absoluteIrisInRule_succeeds() throws ParsingException { + RuleParser.parseRule("(?x) :- B(?x), (?x) ."); + } + + @Test + public void testCustomDatatype() throws ParsingException { + final String typename = "http://example.org/#test"; + final DatatypeConstant constant = Expressions.makeDatatypeConstant("test", typename); + final DatatypeConstantHandler handler = mock(DatatypeConstantHandler.class); + final ParserConfiguration parserConfiguration = new ParserConfiguration(); + parserConfiguration.registerDatatype(typename, handler); + doReturn(constant).when(handler).createConstant(ArgumentMatchers.eq("hello, world")); + + final String input = "p(\"hello, world\"^^<" + typename + ">)"; + final Literal literal = RuleParser.parseLiteral(input, parserConfiguration); + final DatatypeConstant result = (DatatypeConstant) literal.getConstants().toArray()[0]; + assertEquals(constant, result); + } + + @Test + public void parse_importStatement_succeeds() throws ParsingException { + final String input = "@import \"src/test/resources/facts.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List expected = Arrays.asList(this.fact1, this.fact2); + final List result = knowledgeBase.getFacts(); + assertEquals(expected, result); + } + + @Test + public void parse_relativeImportStatement_succeeds() throws ParsingException { + final String input = "@base . @import-relative \"src/test/resources/facts.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List expected = Arrays.asList(this.fact1, this.fact3); + final List result = knowledgeBase.getFacts(); + assertEquals(expected, result); + } + + @Test + public void parse_importStatement_relativeImport_succeeds() throws ParsingException { + final String input = "@import \"src/test/resources/subdir/sibling.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List expected = Arrays.asList(this.fact4, this.fact5); + final List result = knowledgeBase.getFacts(); + assertEquals(expected, result); + } + + @Test + public void parse_importStatement_relativeParentImport_succeeds() throws ParsingException { + final String input = "@import \"src/test/resources/subdir/parent.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List expected = Arrays.asList(this.fact1, this.fact2); + final List result = knowledgeBase.getFacts(); + assertEquals(expected, result); + } + + @Test + public void parse_relativeImportStatement_relativeImport_succeeds() throws ParsingException { + final String input = "@base . @import-relative \"src/test/resources/subdir/sibling.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List expected = Arrays.asList(this.fact4, this.fact5); + final List result = knowledgeBase.getFacts(); + assertEquals(expected, result); + } + + @Test + public void parse_relativeImportStatement_relativeParentImport_succeeds() throws ParsingException { + final String input = "@base . @import-relative \"src/test/resources/subdir/parent.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List expected = Arrays.asList(this.fact1, this.fact2); + final List result = knowledgeBase.getFacts(); + assertEquals(expected, result); + } + + @Test + public void parse_import_renamesNamedNulls() throws ParsingException { + final String input = "p(_:blank) . @import \"src/test/resources/blank.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List facts = knowledgeBase.getFacts(); + assertEquals(2, facts.size()); + final Fact fact1 = facts.get(0); + final Fact fact2 = facts.get(1); + + assertNotEquals(fact1, fact2); + this.assertArgumentIsNamedNull(fact1, 1); + this.assertArgumentIsNamedNull(fact2, 1); + } + + @Test + public void parse_reusedNamedNulls_identical() throws ParsingException { + final String input = "p(_:blank) . q(_:blank) . p(_:other) ."; + + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List facts = knowledgeBase.getFacts(); + assertEquals(3, facts.size()); + final Fact fact1 = facts.get(0); + final Fact fact2 = facts.get(1); + final Fact fact3 = facts.get(2); + + assertEquals(fact1.getArguments().get(0), fact2.getArguments().get(0)); + assertNotEquals(fact1.getArguments().get(0), fact3.getArguments().get(0)); + this.assertArgumentIsNamedNull(fact1, 1); + this.assertArgumentIsNamedNull(fact2, 1); + this.assertArgumentIsNamedNull(fact3, 1); + } + + @Test + public void parseInto_duplicateImportStatements_succeeds() throws ParsingException { + final String input = "@import \"src/test/resources/facts.rls\" . "; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + RuleParser.parseInto(knowledgeBase, input); + } + + @Test + public void parseInto_duplicateRelativeImportStatements_succeeds() throws ParsingException { + final String input = "@import \"src/test/resources/facts.rls\" . @import-relative \"src/test/resources/facts.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + RuleParser.parseInto(knowledgeBase, input); + } + + @Test + public void parseInto_relativeImportRedeclaringBase_succeeds() throws ParsingException { + final String input = "@base . @import-relative \"src/test/resources/base.rls\" ."; + final KnowledgeBase knowledgeBase = RuleParser.parse(input); + final List expected = Arrays.asList(this.fact1, this.fact3); + final List result = knowledgeBase.getFacts(); + assertEquals(expected, result); + } +} diff --git a/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBaseTest.java b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBaseTest.java new file mode 100644 index 000000000..0ba85b209 --- /dev/null +++ b/rulewerk-parser/src/test/java/org/semanticweb/rulewerk/parser/javacc/JavaCCParserBaseTest.java @@ -0,0 +1,102 @@ +package org.semanticweb.rulewerk.parser.javacc; + +/*- + * #%L + * Rulewerk Parser + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; + +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.parser.DatatypeConstantHandler; +import org.semanticweb.rulewerk.parser.DefaultParserConfiguration; +import org.semanticweb.rulewerk.parser.ParserConfiguration; +import org.semanticweb.rulewerk.parser.ParsingException; + +public class JavaCCParserBaseTest { + private JavaCCParserBase parserBase; + private static final String DATATYPE_NAME = "https://example.org/test-type"; + + private DatatypeConstantHandler datatypeConstantHandler = mock(DatatypeConstantHandler.class); + + @Before + public void init() { + parserBase = new JavaCCParserBase(); + } + + @Rule + public ExpectedException exceptionRule = ExpectedException.none(); + + @Test + public void createConstant_undeclaredPrefix_throws() throws ParseException { + exceptionRule.expect(ParseException.class); + exceptionRule.expectMessage("Failed to parse IRI"); + parserBase.createConstant("ïnvälid://test"); + } + + @Test + public void createConstant_throwingDatatypeConstantHandler_throws() throws ParseException, ParsingException { + exceptionRule.expect(ParseException.class); + exceptionRule.expectMessage("Failed to parse Constant"); + + when(datatypeConstantHandler.createConstant(anyString())).thenThrow(ParsingException.class); + ParserConfiguration parserConfiguration = new DefaultParserConfiguration().registerDatatype(DATATYPE_NAME, + datatypeConstantHandler); + parserBase.setParserConfiguration(parserConfiguration); + parserBase.createConstant("test", DATATYPE_NAME); + } + + @Test + public void unescapeStr_escapeChars_succeeds() throws ParseException { + String input = "\\\\test\r\ntest: \\n\\t\\r\\b\\f\\'\\\"\\\\"; + String expected = "\\test\r\ntest: \n\t\r\b\f\'\"\\"; + String result = JavaCCParserBase.unescapeStr(input, 0, 0); + assertEquals(result, expected); + } + + @Test + public void unescapeStr_illegalEscapeAtEndOfString_throws() throws ParseException { + exceptionRule.expect(ParseException.class); + exceptionRule.expectMessage("Illegal escape at end of string"); + + JavaCCParserBase.unescapeStr("\\", 0, 0); + } + + @Test + public void unescapeStr_unknownEscapeSequence_throws() throws ParseException { + exceptionRule.expect(ParseException.class); + exceptionRule.expectMessage("Unknown escape"); + + JavaCCParserBase.unescapeStr("\\y", 0, 0); + } + + @Test + public void setBase_changingBase_throws() throws PrefixDeclarationException { + exceptionRule.expect(PrefixDeclarationException.class); + exceptionRule.expectMessage("Base is already defined as"); + + parserBase.setBase("https://example.org/"); + parserBase.setBase("https://example.com/"); + } +} diff --git a/rulewerk-parser/src/test/resources/base.rls b/rulewerk-parser/src/test/resources/base.rls new file mode 100644 index 000000000..69e925147 --- /dev/null +++ b/rulewerk-parser/src/test/resources/base.rls @@ -0,0 +1,4 @@ +@base . + +s(c) . +p("abc") . diff --git a/rulewerk-parser/src/test/resources/blank.rls b/rulewerk-parser/src/test/resources/blank.rls new file mode 100644 index 000000000..cc44c5389 --- /dev/null +++ b/rulewerk-parser/src/test/resources/blank.rls @@ -0,0 +1 @@ +p(_:blank) . diff --git a/rulewerk-parser/src/test/resources/facts.rls b/rulewerk-parser/src/test/resources/facts.rls new file mode 100644 index 000000000..ce985f5a9 --- /dev/null +++ b/rulewerk-parser/src/test/resources/facts.rls @@ -0,0 +1,4 @@ +@prefix ex: . + +ex:s(ex:c) . +p("abc") . diff --git a/rulewerk-parser/src/test/resources/subdir/facts.rls b/rulewerk-parser/src/test/resources/subdir/facts.rls new file mode 100644 index 000000000..b7b5da5ef --- /dev/null +++ b/rulewerk-parser/src/test/resources/subdir/facts.rls @@ -0,0 +1,4 @@ +@prefix ex: . + +ex:s(ex:e) . +q("xyz") . diff --git a/rulewerk-parser/src/test/resources/subdir/parent.rls b/rulewerk-parser/src/test/resources/subdir/parent.rls new file mode 100644 index 000000000..4abb16ff5 --- /dev/null +++ b/rulewerk-parser/src/test/resources/subdir/parent.rls @@ -0,0 +1 @@ +@import "../facts.rls" . diff --git a/rulewerk-parser/src/test/resources/subdir/sibling.rls b/rulewerk-parser/src/test/resources/subdir/sibling.rls new file mode 100644 index 000000000..0d1d18bfb --- /dev/null +++ b/rulewerk-parser/src/test/resources/subdir/sibling.rls @@ -0,0 +1 @@ +@import "facts.rls" . diff --git a/rulewerk-rdf/LICENSE.txt b/rulewerk-rdf/LICENSE.txt new file mode 100644 index 000000000..261eeb9e9 --- /dev/null +++ b/rulewerk-rdf/LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vlog4j-rdf/pom.xml b/rulewerk-rdf/pom.xml similarity index 67% rename from vlog4j-rdf/pom.xml rename to rulewerk-rdf/pom.xml index 3aae49455..960fa4946 100644 --- a/vlog4j-rdf/pom.xml +++ b/rulewerk-rdf/pom.xml @@ -2,28 +2,33 @@ - + 4.0.0 - + - org.semanticweb.vlog4j - vlog4j-parent - 0.0.1 + org.semanticweb.rulewerk + rulewerk-parent + 0.10.0-SNAPSHOT - vlog4j-rdf + rulewerk-rdf jar - VLog4j RDF Support + Rulewerk RDF Support Bindings and utilities for working with RDF data ${project.groupId} - vlog4j-core + rulewerk-core ${project.version} - + + ${project.groupId} + rulewerk-vlog + ${project.version} + test + org.openrdf.sesame @@ -41,7 +46,13 @@ org.openrdf.sesame sesame-rio-turtle ${openrdf.sesame.version} - test + + + + org.apache.commons + commons-collections4 + 4.2 + test diff --git a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java new file mode 100644 index 000000000..7bc936719 --- /dev/null +++ b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfModelConverter.java @@ -0,0 +1,200 @@ +package org.semanticweb.rulewerk.rdf; + +/*- + * #%L + * Rulewerk RDF Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.Arrays; +import java.util.Set; +import java.util.stream.Collectors; + +import org.openrdf.model.BNode; +import org.openrdf.model.Literal; +import org.openrdf.model.Model; +import org.openrdf.model.Namespace; +import org.openrdf.model.Statement; +import org.openrdf.model.URI; +import org.openrdf.model.Value; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Class for converting RDF {@link Model}s to {@link PositiveLiteral} sets. + * Converts each {@code } triple statement of the + * given {@code rdfModel} into an {@link PositiveLiteral} of the form + * {@code TRIPLE(subject, predicate, object)}. The ternary predicate used for + * all literals generated from RDF triples is + * {@link RdfModelConverter#RDF_TRIPLE_PREDICATE_NAME}. Subject, predicate and + * object {@link Value}s are converted to corresponding {@link Term}s: + *
    + *
  • {@link URI}s are converted to {@link Constant}s with the escaped URI + * String as name.
  • + *
  • {@link Literal}s are converted to {@link Constant}s with names containing + * the canonical form of the literal label, the data type and the language.
  • + *
  • {@link BNode}s are converted to {@link NamedNull}s with the generated + * blank ID as name. {@link BNode}s have unique generated IDs in the context a + * {@link Model}s. Blanks with the same name loaded from different models will + * have different ids.
  • + *
+ * + * @author Irina Dragoste + * @author Markus Kroetzsch + * + */ +public final class RdfModelConverter { + + private static Logger LOGGER = LoggerFactory.getLogger(RdfModelConverter.class); + + /** + * The name of the ternary predicate of literals generated from RDF triples by + * default. + */ + public static final String RDF_TRIPLE_PREDICATE_NAME = "TRIPLE"; + + final RdfValueToTermConverter rdfValueToTermConverter; + final Predicate triplePredicate; + + /** + * Construct an object that does not skolemize blank nodes and that uses a + * ternary predicate named {@link RdfModelConverter#RDF_TRIPLE_PREDICATE_NAME} + * for storing triples. + */ + public RdfModelConverter() { + this(false, RDF_TRIPLE_PREDICATE_NAME); + } + + /** + * Constructor. If {@code triplePredicateName} is a string, then RDF triples + * will be represented as ternary facts with a predicate of that name. If it is + * {@code null}, then triples will be converted to binary facts where the + * predicate is the RDF predicate; moreover, triples with rdf:rype as predicate + * will be converted to unary facts. + * + * @param skolemize if true, blank nodes are translated to constants + * with generated IRIs; otherwise they are replanced + * by named nulls with generated ids + * @param triplePredicateName name of the ternary predicate that should be used + * to store RDF triples; or null to generate binary + * predicates from the predicates of RDF triples + */ + public RdfModelConverter(final boolean skolemize, final String triplePredicateName) { + this.rdfValueToTermConverter = new RdfValueToTermConverter(skolemize); + if (triplePredicateName != null) { + this.triplePredicate = Expressions.makePredicate(triplePredicateName, 3); + } else { + this.triplePredicate = null; + } + } + + /** + * Converts each {@code } triple statement of the + * given {@code rdfModel} into a {@link Fact} of the form + * {@code TRIPLE(subject, predicate, object)}. See + * {@link RdfModelConverter#RDF_TRIPLE_PREDICATE_NAME}, the ternary predicate + * name used for all literals generated from RDF triples. + * + * @param model a {@link Model} of an RDF document, containing triple statements + * that will be converter to facts. + * @return a set of facts corresponding to the statements of given + * {@code rdfModel}. + */ + public Set rdfModelToFacts(final Model model) { + return model.stream().map((statement) -> this.rdfStatementToFact(statement)).collect(Collectors.toSet()); + } + + /** + * Adds data and prefix declarations from a given RDF {@link Model} to a given + * {@link KnowledgeBase}. + * + * @param knowledgeBase the {@link KnowledgeBase} to add to + * @param model the {@link Model} with the RDF data + */ + public void addAll(final KnowledgeBase knowledgeBase, final Model model) { + this.addPrefixes(knowledgeBase, model); + this.addFacts(knowledgeBase, model); + } + + /** + * Adds the data from a given RDF {@link Model} as {@link Fact}s to the given + * {@link KnowledgeBase}. + * + * @param knowledgeBase the {@link KnowledgeBase} to add {@link Fact}s to + * @param model the {@link Model} with the RDF data + */ + public void addFacts(final KnowledgeBase knowledgeBase, final Model model) { + model.stream().forEach((statement) -> { + knowledgeBase.addStatement(this.rdfStatementToFact(statement)); + }); + } + + /** + * Adds the prefixes declared for a given RDF {@link Model} to the given + * {@link KnowledgeBase}. If a prefix cannot be added for some reason, it is + * ignored and a warning is logged. + * + * @param knowledgeBase the {@link KnowledgeBase} to add prefix declarations to + * @param model the {@link Model} with the RDF data + */ + public void addPrefixes(final KnowledgeBase knowledgeBase, final Model model) { + for (final Namespace namespace : model.getNamespaces()) { + try { + knowledgeBase.getPrefixDeclarationRegistry().setPrefixIri(namespace.getPrefix() + ":", + namespace.getName()); + } catch (final PrefixDeclarationException e) { + LOGGER.warn("Failed to set prefix \"" + namespace.getPrefix() + "\" from RDF model: " + e.getMessage()); + } + } + } + + /** + * Converts an RDF statement (triple) to a Rulewerk {@link Fact}. + * + * @param statement + * @return + */ + Fact rdfStatementToFact(final Statement statement) { + final Term subject = this.rdfValueToTermConverter.convertValue(statement.getSubject()); + final Term object = this.rdfValueToTermConverter.convertValue(statement.getObject()); + + if (this.triplePredicate != null) { + final Term predicate = this.rdfValueToTermConverter.convertUri(statement.getPredicate()); + return Expressions.makeFact(this.triplePredicate, Arrays.asList(subject, predicate, object)); + } else { + if (PrefixDeclarationRegistry.RDF_TYPE.equals(statement.getPredicate().stringValue()) + && statement.getObject() instanceof URI) { + final Predicate classPredicate = this.rdfValueToTermConverter.convertUriToPredicate((URI) statement.getObject(), 1); + return Expressions.makeFact(classPredicate, Arrays.asList(subject)); + } else { + final Predicate factPredicate = this.rdfValueToTermConverter.convertUriToPredicate(statement.getPredicate(), 2); + return Expressions.makeFact(factPredicate, Arrays.asList(subject, object)); + } + } + } + +} diff --git a/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java new file mode 100644 index 000000000..c152e19c5 --- /dev/null +++ b/rulewerk-rdf/src/main/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverter.java @@ -0,0 +1,104 @@ +package org.semanticweb.rulewerk.rdf; + +/*- + * #%L + * Rulewerk RDF Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.openrdf.model.BNode; +import org.openrdf.model.Literal; +import org.openrdf.model.URI; +import org.openrdf.model.Value; +import org.openrdf.model.datatypes.XMLDatatypeUtil; +import org.openrdf.rio.ntriples.NTriplesUtil; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; +import org.semanticweb.rulewerk.core.model.implementation.TermFactory; + +/** + * Helper class to convert RDF ters to Rulewerk {@link Term} objects. + * + * @author Markus Kroetzsch + * + */ +final class RdfValueToTermConverter { + + final boolean skolemize; + final Skolemization skolemization = new Skolemization(); + final TermFactory termFactory = new TermFactory(); + + /** + * Constructor. + * + * @param skolemize if true, blank nodes are translated to constants with + * generated IRIs; otherwise they are replanced by named nulls + * with generated ids + */ + public RdfValueToTermConverter(boolean skolemize) { + this.skolemize = skolemize; + } + + public Term convertValue(final Value value) { + if (value instanceof BNode) { + return convertBlankNode((BNode) value); + } else if (value instanceof Literal) { + return convertLiteral((Literal) value); + } else if (value instanceof URI) { + return convertUri((URI) value); + } else { + throw new RulewerkRuntimeException("Unknown value type: " + value.getClass()); + } + } + + public Term convertBlankNode(final BNode bNode) { + // Note: IDs are generated to be unique in every Model, so our renaming might be + // redundant. But we want a RenamedNamedNull here, and a consistent name format + // is nice too. + if (skolemize) { + return skolemization.getSkolemConstant(bNode.getID(), termFactory); + } else { + return skolemization.getRenamedNamedNull(bNode.getID()); + } + } + + public Term convertUri(final URI uri) { + final String escapedURIString = NTriplesUtil.escapeString(uri.toString()); + return termFactory.makeAbstractConstant(escapedURIString); + } + + public Term convertLiteral(final Literal literal) { + final URI datatype = literal.getDatatype(); + if (datatype != null) { + return termFactory.makeDatatypeConstant(XMLDatatypeUtil.normalize(literal.getLabel(), datatype), + datatype.toString()); + } else if (literal.getLanguage() != null) { + return termFactory.makeLanguageStringConstant(literal.getLabel(), literal.getLanguage()); + } else { + return termFactory.makeDatatypeConstant(literal.getLabel(), PrefixDeclarationRegistry.XSD_STRING); + } + } + + public Predicate convertUriToPredicate(final URI uri, int arity) { + final String escapedURIString = NTriplesUtil.escapeString(uri.toString()); + return termFactory.makePredicate(escapedURIString, arity); + } + +} diff --git a/rulewerk-rdf/src/test/data/input/collections.ttl b/rulewerk-rdf/src/test/data/input/collections.ttl new file mode 100644 index 000000000..850fe80c7 --- /dev/null +++ b/rulewerk-rdf/src/test/data/input/collections.ttl @@ -0,0 +1,20 @@ +@base . +@prefix rdf: . +@prefix rdfs: . +@prefix xsd: . + +<1>
() . +<2> (1) . +<3> (<#1> <#2>) . + +# <1> _:1 . +# _:1 rdf:first rdf:nil . +# _:1 rdf:last rdf:nil . +# <2> _:2 . +# _:2 rdf:first 1 . +# _:2 rdf:last rdf:nil . +# <3> _:3 . +# _:3 rdf:first <#1> . +# _:3 rdf:last _:4 . +# _:4 rdf:first <#2> . +# _:4 rdf:last rdf:nil . diff --git a/rulewerk-rdf/src/test/data/input/escapedCharacters.ttl b/rulewerk-rdf/src/test/data/input/escapedCharacters.ttl new file mode 100644 index 000000000..227b3f57b --- /dev/null +++ b/rulewerk-rdf/src/test/data/input/escapedCharacters.ttl @@ -0,0 +1,6 @@ +@base . +@prefix rdf: . +@prefix rdfs: . +@prefix xsd: . + +<1> "\t\b\n\r\f\"\'\\" . diff --git a/rulewerk-rdf/src/test/data/input/exampleFacts.ttl b/rulewerk-rdf/src/test/data/input/exampleFacts.ttl new file mode 100644 index 000000000..db479954c --- /dev/null +++ b/rulewerk-rdf/src/test/data/input/exampleFacts.ttl @@ -0,0 +1,13 @@ +@base . +@prefix rdf: . +@prefix rdfs: . +@prefix xsd: . + + rdfs:label "Carl Benz" ; + ; + "1844-11-25"^^xsd:date ; + "car"@en ; + "自动车"@zh-hans . + rdfs:label "Karlsruhe"@en ; + rdfs:label "卡尔斯鲁厄"@zh-hans ; + 311919 . diff --git a/rulewerk-rdf/src/test/data/input/labelledBNodes.ttl b/rulewerk-rdf/src/test/data/input/labelledBNodes.ttl new file mode 100644 index 000000000..0f93c6838 --- /dev/null +++ b/rulewerk-rdf/src/test/data/input/labelledBNodes.ttl @@ -0,0 +1,8 @@ +@prefix rdf: . +@prefix rdfs: . +@prefix xsd: . + +_:b0 rdf:first 1 . +_:b0 rdf:rest _:b1 . +_:b1 rdf:first 2 . +_:b1 rdf:rest rdf:nil . diff --git a/rulewerk-rdf/src/test/data/input/languageTags.ttl b/rulewerk-rdf/src/test/data/input/languageTags.ttl new file mode 100644 index 000000000..eb54b014e --- /dev/null +++ b/rulewerk-rdf/src/test/data/input/languageTags.ttl @@ -0,0 +1,7 @@ +@base . +@prefix rdf: . +@prefix rdfs: . +@prefix xsd: . + +<1> "This is a test."@en . +<1> "Das ist ein Test."@de . diff --git a/rulewerk-rdf/src/test/data/input/literalValues.ttl b/rulewerk-rdf/src/test/data/input/literalValues.ttl new file mode 100644 index 000000000..65dfa998b --- /dev/null +++ b/rulewerk-rdf/src/test/data/input/literalValues.ttl @@ -0,0 +1,11 @@ +@base . +@prefix rdf: . +@prefix rdfs: . +@prefix xsd: . + +<1> "1"^^xsd:integer . +<2> "1.0"^^xsd:decimal . +<3> "1.0E1"^^xsd:double . +<4> "true"^^xsd:boolean . +<5> "false"^^xsd:boolean . +<6> "test string" . diff --git a/rulewerk-rdf/src/test/data/input/relativeURIs.ttl b/rulewerk-rdf/src/test/data/input/relativeURIs.ttl new file mode 100644 index 000000000..6cf08372a --- /dev/null +++ b/rulewerk-rdf/src/test/data/input/relativeURIs.ttl @@ -0,0 +1,10 @@ +@base . +@prefix ex: . +@prefix exhash: . +@prefix rdf: . +@prefix rdfs: . +@prefix xsd: . + + . +<2> <#1> . +ex:3 ex:a exhash:1 . diff --git a/rulewerk-rdf/src/test/data/input/test-turtle-type-weird.ttl b/rulewerk-rdf/src/test/data/input/test-turtle-type-weird.ttl new file mode 100644 index 000000000..75cae7e3e --- /dev/null +++ b/rulewerk-rdf/src/test/data/input/test-turtle-type-weird.ttl @@ -0,0 +1,4 @@ +@prefix : . +@prefix rdf: . + +:a rdf:type "test"@de . diff --git a/rulewerk-rdf/src/test/data/input/test-turtle-type.ttl b/rulewerk-rdf/src/test/data/input/test-turtle-type.ttl new file mode 100644 index 000000000..53844257a --- /dev/null +++ b/rulewerk-rdf/src/test/data/input/test-turtle-type.ttl @@ -0,0 +1,4 @@ +@prefix : . +@prefix rdf: . + +:a rdf:type :c . diff --git a/rulewerk-rdf/src/test/data/input/test-turtle.ttl b/rulewerk-rdf/src/test/data/input/test-turtle.ttl new file mode 100644 index 000000000..3fbe612de --- /dev/null +++ b/rulewerk-rdf/src/test/data/input/test-turtle.ttl @@ -0,0 +1,3 @@ +@prefix : . + +:a :b :c . diff --git a/rulewerk-rdf/src/test/data/input/unlabelledBNodes.ttl b/rulewerk-rdf/src/test/data/input/unlabelledBNodes.ttl new file mode 100644 index 000000000..6d0970137 --- /dev/null +++ b/rulewerk-rdf/src/test/data/input/unlabelledBNodes.ttl @@ -0,0 +1,7 @@ +@prefix rdf: . +@prefix rdfs: . +@prefix xsd: . + +[ rdf:first 1 ] rdf:rest [ + rdf:first 2; + rdf:rest rdf:nil ]. diff --git a/rulewerk-rdf/src/test/data/input/unnormalizedLiteralValues.ttl b/rulewerk-rdf/src/test/data/input/unnormalizedLiteralValues.ttl new file mode 100644 index 000000000..96247612b --- /dev/null +++ b/rulewerk-rdf/src/test/data/input/unnormalizedLiteralValues.ttl @@ -0,0 +1,12 @@ +@base . +@prefix rdf: . +@prefix rdfs: . +@prefix xsd: . + +<1> "-001"^^xsd:integer . +<2> +1 . +<3> "-01.00"^^xsd:decimal . +<4> +1.0 . +<5> "-01.10E01"^^xsd:double . +<6> +1.1E1 . +<7> true . diff --git a/vlog4j-core/src/test/data/output/outputXYX.csv b/rulewerk-rdf/src/test/data/output/.keep similarity index 100% rename from vlog4j-core/src/test/data/output/outputXYX.csv rename to rulewerk-rdf/src/test/data/output/.keep diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/IntegrationTestsConvertRdfFiles.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/IntegrationTestsConvertRdfFiles.java new file mode 100644 index 000000000..823a1589f --- /dev/null +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/IntegrationTestsConvertRdfFiles.java @@ -0,0 +1,227 @@ +package org.semanticweb.rulewerk.rdf; + +/*- + * #%L + * Rulewerk RDF Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.semanticweb.rulewerk.rdf.RdfModelConverter.RDF_TRIPLE_PREDICATE_NAME; +import static org.semanticweb.rulewerk.rdf.RdfTestUtils.RDF_FIRST; +import static org.semanticweb.rulewerk.rdf.RdfTestUtils.RDF_NIL; +import static org.semanticweb.rulewerk.rdf.RdfTestUtils.RDF_REST; + +import java.io.File; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; +import java.util.stream.Collectors; + +import org.apache.commons.collections4.CollectionUtils; +import org.junit.Test; +import org.openrdf.model.Model; +import org.openrdf.rio.RDFFormat; +import org.openrdf.rio.RDFHandlerException; +import org.openrdf.rio.RDFParseException; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +public class IntegrationTestsConvertRdfFiles { + + final RdfModelConverter rdfModelConverter = new RdfModelConverter(); + + // FIXME: The openrdf parser does neither support '\b' nor '\f' (from ASCII) and + // encodes such characters as "\u0008" and "\u000C", respectively (the + // corresponding Unicode hex code). + + private final static Constant file1 = Expressions.makeAbstractConstant("file:/1"); + private final static Constant file2 = Expressions.makeAbstractConstant("file:/2"); + private final static Constant file3 = Expressions.makeAbstractConstant("file:/3"); + private final static Constant file4 = Expressions.makeAbstractConstant("file:/4"); + private final static Constant file5 = Expressions.makeAbstractConstant("file:/5"); + private final static Constant file6 = Expressions.makeAbstractConstant("file:/6"); + private final static Constant file7 = Expressions.makeAbstractConstant("file:/7"); + private final static Constant fileA = Expressions.makeAbstractConstant("file:/a"); + + private final static Constant booleanTrue = Expressions.makeDatatypeConstant("true", + "http://www.w3.org/2001/XMLSchema#boolean"); + private final static Constant booleanFalse = Expressions.makeDatatypeConstant("false", + "http://www.w3.org/2001/XMLSchema#boolean"); + + private final static Constant decimalOne = Expressions.makeDatatypeConstant("1.0", + "http://www.w3.org/2001/XMLSchema#decimal"); + private final static Constant decimalMinusOne = Expressions.makeDatatypeConstant("-1.0", + "http://www.w3.org/2001/XMLSchema#decimal"); + + private final static Constant integerOne = Expressions.makeDatatypeConstant("1", + "http://www.w3.org/2001/XMLSchema#integer"); + private final static Constant integerMinusOne = Expressions.makeDatatypeConstant("-1", + "http://www.w3.org/2001/XMLSchema#integer"); + + private final static Constant doubleOnePoitZero = Expressions.makeDatatypeConstant("1.0E1", + "http://www.w3.org/2001/XMLSchema#double"); + private final static Constant doubleOnePoitOne = Expressions.makeDatatypeConstant("1.1E1", + "http://www.w3.org/2001/XMLSchema#double"); + private final static Constant doubleMinusOnePoitOne = Expressions.makeDatatypeConstant("-1.1E1", + "http://www.w3.org/2001/XMLSchema#double"); + + private static final Set expectedNormalizedFacts = new HashSet<>( + Arrays.asList(Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(file1, fileA, integerMinusOne)), + Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(file2, fileA, integerOne)), + Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(file3, fileA, decimalMinusOne)), + Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(file4, fileA, decimalOne)), + Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(file5, fileA, doubleMinusOnePoitOne)), + Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(file6, fileA, doubleOnePoitOne)), + Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(file7, fileA, booleanTrue)))); + + private static final Set expectedLiteralFacts = new HashSet<>(Arrays.asList( + Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(file1, fileA, integerOne)), + Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(file2, fileA, decimalOne)), + Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(file3, fileA, doubleOnePoitZero)), + Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(file4, fileA, booleanTrue)), + Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(file5, fileA, booleanFalse)), + Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(file6, fileA, + Expressions.makeDatatypeConstant("test string", "http://www.w3.org/2001/XMLSchema#string"))))); + + private final static Constant exampleA = Expressions.makeAbstractConstant("http://example.org/a"); + private final static Constant example1 = Expressions.makeAbstractConstant("http://example.org/1"); + private final static Constant example2 = Expressions.makeAbstractConstant("http://example.org/2"); + private final static Constant example3 = Expressions.makeAbstractConstant("http://example.org/3"); + private final static Constant exampleHash1 = Expressions.makeAbstractConstant("http://example.org/#1"); + + private static final Set expectedRelativeUriFacts = new HashSet<>(Arrays.asList( + Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(example1, exampleA, exampleHash1)), + Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(example2, exampleA, exampleHash1)), + Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(example3, exampleA, exampleHash1)))); + + private static final Set expectedEscapedCharacterFacts = new HashSet<>( + Arrays.asList(Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(file1, fileA, Expressions + .makeDatatypeConstant("\t\u0008\n\r\u000C\"'\\", "http://www.w3.org/2001/XMLSchema#string"))))); + + private static final Set expectedLanguageTagFacts = new HashSet<>(Arrays.asList( + Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, + Arrays.asList(file1, fileA, Expressions.makeLanguageStringConstant("This is a test.", "en"))), + Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, + Arrays.asList(file1, fileA, Expressions.makeLanguageStringConstant("Das ist ein Test.", "de"))))); + + @Test + public void testDataTypesNormalized() throws RDFHandlerException, RDFParseException, IOException { + final Model model = RdfTestUtils + .parseFile(new File(RdfTestUtils.INPUT_FOLDER + "unnormalizedLiteralValues.ttl"), RDFFormat.TURTLE); + final Set facts = rdfModelConverter.rdfModelToFacts(model); + assertEquals(expectedNormalizedFacts, facts); + } + + @Test + public void testLiteralValuesPreserved() throws RDFHandlerException, RDFParseException, IOException { + final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "literalValues.ttl"), + RDFFormat.TURTLE); + final Set facts = rdfModelConverter.rdfModelToFacts(model); + assertEquals(expectedLiteralFacts, facts); + } + + @Test + public void testRelativeURIsMadeAbsolute() throws RDFHandlerException, RDFParseException, IOException { + final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "relativeURIs.ttl"), + RDFFormat.TURTLE); + final Set facts = rdfModelConverter.rdfModelToFacts(model); + assertEquals(expectedRelativeUriFacts, facts); + } + + @Test + public void testEscapedCharactersExpanded() throws RDFHandlerException, RDFParseException, IOException { + final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "escapedCharacters.ttl"), + RDFFormat.TURTLE); + final Set facts = rdfModelConverter.rdfModelToFacts(model); + assertEquals(expectedEscapedCharacterFacts, facts); + } + + @Test + public void testLanguageTagsPreserved() throws RDFHandlerException, RDFParseException, IOException { + final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "languageTags.ttl"), + RDFFormat.TURTLE); + final Set facts = rdfModelConverter.rdfModelToFacts(model); + assertEquals(expectedLanguageTagFacts, facts); + } + + @Test + public void testCollectionsPreserved() throws RDFHandlerException, RDFParseException, IOException { + final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "collections.ttl"), + RDFFormat.TURTLE); + final Set factsFromModel = rdfModelConverter.rdfModelToFacts(model); + + final Term blank1 = RdfTestUtils.getObjectOfFirstMatchedTriple(file2, fileA, factsFromModel); + final Term blank2 = RdfTestUtils.getObjectOfFirstMatchedTriple(file3, fileA, factsFromModel); + final Term blank3 = RdfTestUtils.getObjectOfFirstMatchedTriple(blank2, RDF_REST, factsFromModel); + + final Set expectedSetFacts = new HashSet<>( + Arrays.asList(Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(file1, fileA, RDF_NIL)), + Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(file2, fileA, blank1)), + Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, + Arrays.asList(blank1, RDF_FIRST, + Expressions.makeDatatypeConstant("1", PrefixDeclarationRegistry.XSD_INTEGER))), + Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(blank1, RDF_REST, RDF_NIL)), + Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(file3, fileA, blank2)), + Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, + Arrays.asList(blank2, RDF_FIRST, Expressions.makeAbstractConstant("file:/#1"))), + Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(blank2, RDF_REST, blank3)), + Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, + Arrays.asList(blank3, RDF_FIRST, Expressions.makeAbstractConstant("file:/#2"))), + Expressions.makeFact(RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(blank3, RDF_REST, RDF_NIL)))); + + assertEquals(expectedSetFacts, factsFromModel); + } + + @Test + public void testNumberOfBlankNodesCorrect() throws RDFParseException, RDFHandlerException, IOException { + final File labelledFile = new File(RdfTestUtils.INPUT_FOLDER + "labelledBNodes.ttl"); + final File unlabelledFile = new File(RdfTestUtils.INPUT_FOLDER + "unlabelledBNodes.ttl"); + final Set labelledBlanks = this.getBlanksFromTurtleFile(labelledFile); + final Set unlabelledBlanks = this.getBlanksFromTurtleFile(unlabelledFile); + + assertEquals(2, labelledBlanks.size()); + assertEquals(2, unlabelledBlanks.size()); + } + + @Test + public void testBlankNodesWithSameLabelAreDifferentInDifferentModels() + throws RDFParseException, RDFHandlerException, IOException { + final File file = new File(RdfTestUtils.INPUT_FOLDER + "labelledBNodes.ttl"); + final Set blanks1 = this.getBlanksFromTurtleFile(file); + final Set blanks2 = this.getBlanksFromTurtleFile(file); + + assertTrue(CollectionUtils.intersection(blanks1, blanks2).isEmpty()); + } + + private Set getBlanksFromTurtleFile(final File file) + throws RDFParseException, RDFHandlerException, IOException { + final Model model = RdfTestUtils.parseFile(file, RDFFormat.TURTLE); + final Set facts = rdfModelConverter.rdfModelToFacts(model); + + final Set blanks = new HashSet<>(); + facts.forEach(fact -> blanks.addAll(fact.getNamedNulls().collect(Collectors.toSet()))); + return blanks; + } + +} diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/IntegrationTestsReasonOverRdfFactsinVLog.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/IntegrationTestsReasonOverRdfFactsinVLog.java new file mode 100644 index 000000000..4143669f4 --- /dev/null +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/IntegrationTestsReasonOverRdfFactsinVLog.java @@ -0,0 +1,111 @@ +package org.semanticweb.rulewerk.rdf; + +/*- + * #%L + * Rulewerk RDF Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; + +import java.io.File; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.junit.Test; +import org.openrdf.model.Model; +import org.openrdf.rio.RDFFormat; +import org.openrdf.rio.RDFHandlerException; +import org.openrdf.rio.RDFParseException; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; + +public class IntegrationTestsReasonOverRdfFactsinVLog { + + final RdfModelConverter rdfModelConverter = new RdfModelConverter(); + + private final Constant carlBenz = Expressions.makeAbstractConstant("https://example.org/Carl-Benz"); + private final Constant invention = Expressions.makeAbstractConstant("https://example.org/invention"); + private final Constant labelEn = Expressions.makeLanguageStringConstant("car", "en"); + private final Constant labelZh = Expressions.makeLanguageStringConstant("\u81EA\u52A8\u8F66", "zh-hans"); + + private final Set> expectedQueryResultsInvention = new HashSet<>( + Arrays.asList(Arrays.asList(carlBenz, invention, labelEn), Arrays.asList(carlBenz, invention, labelZh))); + + private static final Variable subject = Expressions.makeUniversalVariable("s"); + private static final Variable predicate = Expressions.makeUniversalVariable("p"); + private static final Variable object = Expressions.makeUniversalVariable("o"); + + @Test + public void testCanLoadRdfFactsIntoReasoner() throws RDFParseException, RDFHandlerException, IOException { + final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "exampleFacts.ttl"), + RDFFormat.TURTLE); + final Set facts = rdfModelConverter.rdfModelToFacts(model); + + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(facts); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + + final PositiveLiteral universalQuery = Expressions.makePositiveLiteral( + RdfModelConverter.RDF_TRIPLE_PREDICATE_NAME, Arrays.asList(subject, predicate, object)); + final Set> queryResults = this.getQueryResults(reasoner, universalQuery); + assertTrue(!queryResults.isEmpty()); + } + } + + @Test + public void testQueryAnsweringOverRdfFacts() throws RDFParseException, RDFHandlerException, IOException { + final Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "exampleFacts.ttl"), + RDFFormat.TURTLE); + final Set facts = rdfModelConverter.rdfModelToFacts(model); + + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(facts); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + + final PositiveLiteral inventionQuery = Expressions + .makePositiveLiteral(RdfModelConverter.RDF_TRIPLE_PREDICATE_NAME, carlBenz, invention, object); + assertEquals(expectedQueryResultsInvention, this.getQueryResults(reasoner, inventionQuery)); + } + } + + private Set> getQueryResults(final Reasoner reasoner, final PositiveLiteral query) { + final QueryResultIterator queryResultIterator = reasoner.answerQuery(query, true); + + final Set> queryResults = new HashSet<>(); + queryResultIterator.forEachRemaining(queryResult -> queryResults.add(queryResult.getTerms())); + queryResultIterator.close(); + return queryResults; + } + +} diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java new file mode 100644 index 000000000..0ab610723 --- /dev/null +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfModelConverterTest.java @@ -0,0 +1,153 @@ +package org.semanticweb.rulewerk.rdf; + +/*- + * #%L + * Rulewerk RDF Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import java.io.File; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +import org.junit.Test; +import org.openrdf.model.Model; +import org.openrdf.rio.RDFFormat; +import org.openrdf.rio.RDFHandlerException; +import org.openrdf.rio.RDFParseException; +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; + +public class RdfModelConverterTest { + + @Test + public void addToKnowledgeBase_succeeds() + throws RDFParseException, RDFHandlerException, IOException, PrefixDeclarationException { + RdfModelConverter rdfModelConverter = new RdfModelConverter(); + Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "test-turtle.ttl"), RDFFormat.TURTLE); + KnowledgeBase knowledgeBase = new KnowledgeBase(); + + Predicate predicate = Expressions.makePredicate("TRIPLE", 3); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termb = Expressions.makeAbstractConstant("http://example.org/b"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termb, termc); + + rdfModelConverter.addAll(knowledgeBase, model); + + assertEquals(Arrays.asList(fact), knowledgeBase.getFacts()); + assertEquals("http://example.org/", knowledgeBase.getPrefixIri(":")); + } + + @Test + public void getFactSet_succeeds() + throws RDFParseException, RDFHandlerException, IOException, PrefixDeclarationException { + RdfModelConverter rdfModelConverter = new RdfModelConverter(); + Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "test-turtle.ttl"), RDFFormat.TURTLE); + + Predicate predicate = Expressions.makePredicate("TRIPLE", 3); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termb = Expressions.makeAbstractConstant("http://example.org/b"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termb, termc); + Set expected = new HashSet(); + expected.add(fact); + + Set facts = rdfModelConverter.rdfModelToFacts(model); + + assertEquals(expected, facts); + } + + @Test + public void addFactsCustomTriplePredicate_succeeds() + throws RDFParseException, RDFHandlerException, IOException, PrefixDeclarationException { + RdfModelConverter rdfModelConverter = new RdfModelConverter(true, "mytriple"); + Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "test-turtle.ttl"), RDFFormat.TURTLE); + KnowledgeBase knowledgeBase = new KnowledgeBase(); + + Predicate predicate = Expressions.makePredicate("mytriple", 3); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termb = Expressions.makeAbstractConstant("http://example.org/b"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termb, termc); + + rdfModelConverter.addFacts(knowledgeBase, model); + + assertEquals(Arrays.asList(fact), knowledgeBase.getFacts()); + } + + @Test + public void addFactsNoTriplePredicate_succeeds() + throws RDFParseException, RDFHandlerException, IOException, PrefixDeclarationException { + RdfModelConverter rdfModelConverter = new RdfModelConverter(true, null); + Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "test-turtle.ttl"), RDFFormat.TURTLE); + KnowledgeBase knowledgeBase = new KnowledgeBase(); + + Predicate predicate = Expressions.makePredicate("http://example.org/b", 2); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termc = Expressions.makeAbstractConstant("http://example.org/c"); + Fact fact = Expressions.makeFact(predicate, terma, termc); + + rdfModelConverter.addFacts(knowledgeBase, model); + + assertEquals(Arrays.asList(fact), knowledgeBase.getFacts()); + } + + @Test + public void addFactsNoTriplePredicateType_succeeds() + throws RDFParseException, RDFHandlerException, IOException, PrefixDeclarationException { + RdfModelConverter rdfModelConverter = new RdfModelConverter(true, null); + Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "test-turtle-type.ttl"), + RDFFormat.TURTLE); + KnowledgeBase knowledgeBase = new KnowledgeBase(); + + Predicate predicate = Expressions.makePredicate("http://example.org/c", 1); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Fact fact = Expressions.makeFact(predicate, terma); + + rdfModelConverter.addFacts(knowledgeBase, model); + + assertEquals(Arrays.asList(fact), knowledgeBase.getFacts()); + } + + @Test + public void addFactsNoTriplePredicateTypeWeird_succeeds() + throws RDFParseException, RDFHandlerException, IOException, PrefixDeclarationException { + RdfModelConverter rdfModelConverter = new RdfModelConverter(true, null); + Model model = RdfTestUtils.parseFile(new File(RdfTestUtils.INPUT_FOLDER + "test-turtle-type-weird.ttl"), RDFFormat.TURTLE); + KnowledgeBase knowledgeBase = new KnowledgeBase(); + + Predicate predicate = Expressions.makePredicate(PrefixDeclarationRegistry.RDF_TYPE, 2); + Term terma = Expressions.makeAbstractConstant("http://example.org/a"); + Term termc = Expressions.makeLanguageStringConstant("test", "de"); + Fact fact = Expressions.makeFact(predicate, terma, termc); + + rdfModelConverter.addFacts(knowledgeBase, model); + + assertEquals(Arrays.asList(fact), knowledgeBase.getFacts()); + } + +} diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfTestUtils.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfTestUtils.java new file mode 100644 index 000000000..fa689c4ad --- /dev/null +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfTestUtils.java @@ -0,0 +1,97 @@ +package org.semanticweb.rulewerk.rdf; + +/*- + * #%L + * Rulewerk RDF Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; +import java.util.Set; + +import org.openrdf.model.Model; +import org.openrdf.model.impl.LinkedHashModel; +import org.openrdf.rio.RDFFormat; +import org.openrdf.rio.RDFHandlerException; +import org.openrdf.rio.RDFParseException; +import org.openrdf.rio.RDFParser; +import org.openrdf.rio.Rio; +import org.openrdf.rio.helpers.StatementCollector; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +public final class RdfTestUtils { + + static final String INPUT_FOLDER = "src/test/data/input/"; + static final String OUTPUT_FOLDER = "src/test/data/output/"; + + static final Constant RDF_FIRST = Expressions + .makeAbstractConstant("http://www.w3.org/1999/02/22-rdf-syntax-ns#first"); + static final Constant RDF_REST = Expressions + .makeAbstractConstant("http://www.w3.org/1999/02/22-rdf-syntax-ns#rest"); + static final Constant RDF_NIL = Expressions.makeAbstractConstant("http://www.w3.org/1999/02/22-rdf-syntax-ns#nil"); + + /* + * This is a utility class. Therefore, it is best practice to do the following: + * (1) Make the class final, (2) make its constructor private, (3) make all its + * fields and methods static. This prevents the classes instantiation and + * inheritance. + */ + private RdfTestUtils() { + + } + + static Model parseFile(final File file, final RDFFormat rdfFormat) + throws RDFParseException, RDFHandlerException, IOException { + final URI baseURI = file.toURI(); + final InputStream inputStream = new FileInputStream(file); + final RDFParser rdfParser = Rio.createParser(rdfFormat); + + final Model model = new LinkedHashModel(); + rdfParser.setRDFHandler(new StatementCollector(model)); + rdfParser.parse(inputStream, baseURI.toString()); + + return model; + } + + static Term getSubjectFromTriple(final PositiveLiteral triple) { + return triple.getArguments().get(0); + } + + static Term getPredicateFromTriple(final PositiveLiteral triple) { + return triple.getArguments().get(1); + } + + static Term getObjectFromTriple(final PositiveLiteral triple) { + return triple.getArguments().get(2); + } + + static Term getObjectOfFirstMatchedTriple(final Term subject, final Term predicate, final Set facts) { + return facts.stream() + .filter(triple -> getSubjectFromTriple(triple).equals(subject) + && getPredicateFromTriple(triple).equals(predicate)) + .findFirst().map(triple -> getObjectFromTriple(triple)).get(); + } + +} diff --git a/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java new file mode 100644 index 000000000..29dab9abf --- /dev/null +++ b/rulewerk-rdf/src/test/java/org/semanticweb/rulewerk/rdf/RdfValueToTermConverterTest.java @@ -0,0 +1,136 @@ +package org.semanticweb.rulewerk.rdf; + +/*- + * #%L + * Rulewerk RDF Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; + +import org.junit.Test; +import org.mockito.Mockito; +import org.openrdf.model.BNode; +import org.openrdf.model.Literal; +import org.openrdf.model.URI; +import org.openrdf.model.Value; +import org.openrdf.model.impl.BNodeImpl; +import org.openrdf.model.impl.LiteralImpl; +import org.openrdf.model.impl.URIImpl; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.TermType; +import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; + +public class RdfValueToTermConverterTest { + + @Test + public void convertUri_succeeds() { + URI uri = new URIImpl("http://example.org"); + + RdfValueToTermConverter converter = new RdfValueToTermConverter(true); + Term term = converter.convertValue(uri); + + assertEquals(TermType.ABSTRACT_CONSTANT, term.getType()); + assertEquals("http://example.org", term.getName()); + } + + @Test + public void convertUriToPredicate_succeeds() { + URI uri = new URIImpl("http://example.org/mypred"); + + RdfValueToTermConverter converter = new RdfValueToTermConverter(true); + Predicate predicate = converter.convertUriToPredicate(uri, 2); + + assertEquals("http://example.org/mypred", predicate.getName()); + assertEquals(2, predicate.getArity()); + } + + @Test + public void convertLiteralDatatype_succeeds() { + Literal literal = new LiteralImpl("42", new URIImpl("http://example.org/integer")); + + RdfValueToTermConverter converter = new RdfValueToTermConverter(true); + Term term = converter.convertValue(literal); + + assertEquals(TermType.DATATYPE_CONSTANT, term.getType()); + DatatypeConstant datataypeConstant = (DatatypeConstant) term; + assertEquals("http://example.org/integer", datataypeConstant.getDatatype()); + assertEquals("42", datataypeConstant.getLexicalValue()); + } + + @Test + public void convertLiteralLanguage_succeeds() { + Literal literal = new LiteralImpl("Test", "de"); + + RdfValueToTermConverter converter = new RdfValueToTermConverter(true); + Term term = converter.convertValue(literal); + + assertEquals(TermType.LANGSTRING_CONSTANT, term.getType()); + LanguageStringConstant langStringConstant = (LanguageStringConstant) term; + assertEquals("Test", langStringConstant.getString()); + assertEquals("de", langStringConstant.getLanguageTag()); + } + + @Test + public void convertLiteralString_succeeds() { + Literal literal = new LiteralImpl("RDF 1.0 untyped"); + + RdfValueToTermConverter converter = new RdfValueToTermConverter(true); + Term term = converter.convertValue(literal); + + assertEquals(TermType.DATATYPE_CONSTANT, term.getType()); + DatatypeConstant datataypeConstant = (DatatypeConstant) term; + assertEquals(PrefixDeclarationRegistry.XSD_STRING, datataypeConstant.getDatatype()); + assertEquals("RDF 1.0 untyped", datataypeConstant.getLexicalValue()); + } + + @Test + public void convertBNodeSkolemize_succeeds() { + BNode bnode = new BNodeImpl("myid"); + + RdfValueToTermConverter converter = new RdfValueToTermConverter(true); + Term term = converter.convertValue(bnode); + + assertEquals(TermType.ABSTRACT_CONSTANT, term.getType()); + assertTrue(term.getName().startsWith(Skolemization.SKOLEM_IRI_PREFIX)); + } + + @Test + public void convertBNode_succeeds() { + BNode bnode = new BNodeImpl("myid"); + + RdfValueToTermConverter converter = new RdfValueToTermConverter(false); + Term term = converter.convertValue(bnode); + + assertEquals(TermType.NAMED_NULL, term.getType()); + assertNotEquals("myid", term.getName()); + } + + @Test(expected = RulewerkRuntimeException.class) + public void convertValueUnkownType_fails() { + Value value = Mockito.mock(Value.class); + + RdfValueToTermConverter converter = new RdfValueToTermConverter(true); + converter.convertValue(value); + } + +} diff --git a/rulewerk-vlog/LICENSE.txt b/rulewerk-vlog/LICENSE.txt new file mode 100644 index 000000000..261eeb9e9 --- /dev/null +++ b/rulewerk-vlog/LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/rulewerk-vlog/pom.xml b/rulewerk-vlog/pom.xml new file mode 100644 index 000000000..52dce801c --- /dev/null +++ b/rulewerk-vlog/pom.xml @@ -0,0 +1,94 @@ + + + 4.0.0 + + + org.semanticweb.rulewerk + rulewerk-parent + 0.10.0-SNAPSHOT + + + rulewerk-vlog + jar + + Rulewerk VLog Reasoner Support + Bindings for the VLog reasoner backend. + + + 1.3.7 + vlog-java + + + + + ${project.groupId} + rulewerk-core + ${project.version} + + + ${project.groupId} + rulewerk-parser + ${project.version} + + + + ${project.groupId} + ${karmaresearch.vlog.artifactId} + ${karmaresearch.vlog.version} + + + + + + development + + + + + + io.github.zlika + reproducible-build-maven-plugin + 0.16 + + + initialize + + strip-jar + + + ./lib/ + + + + + + + org.apache.maven.plugins + maven-install-plugin + 2.4 + + + initialize + + install-file + + + ${project.groupId} + ${karmaresearch.vlog.artifactId} + ${karmaresearch.vlog.version} + jar + ./lib/jvlog-local.jar + + + + + + + + + diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverter.java similarity index 63% rename from vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverter.java rename to rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverter.java index 70fec7690..3fe63160a 100644 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverter.java +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverter.java @@ -1,10 +1,10 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * VLog4j Core Components + * Rulewerk VLog Reasoner Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,25 +20,26 @@ * #L% */ - - import java.util.Collection; import java.util.List; -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.reasoner.LogLevel; -import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; - +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.reasoner.LogLevel; +import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; /** * Utility class with static methods for converting from VLog API model objects - * ({@code org.semanticweb.vlog4j.core.model}) to internal VLog model objects + * ({@code org.semanticweb.rulewerk.core.model}) to internal VLog model objects * ({@code karmaresearch.vlog}). - * + * * @author Irina Dragoste * */ @@ -64,24 +65,31 @@ static karmaresearch.vlog.Term[] toVLogTermArray(final List terms) { return vLogTerms; } - static String[][] toVLogFactTuples(final Collection facts) { + static String[][] toVLogFactTuples(final Collection facts) { final String[][] tuples = new String[facts.size()][]; int i = 0; - for (final Atom atom : facts) { - final String[] vLogFactTuple = ModelToVLogConverter.toVLogFactTuple(atom); + for (final Fact fact : facts) { + final String[] vLogFactTuple = ModelToVLogConverter.toVLogFactTuple(fact); tuples[i] = vLogFactTuple; i++; } return tuples; } - static String[] toVLogFactTuple(final Atom fact) { - final List terms = fact.getTerms(); + static String[] toVLogFactTuple(final Fact fact) { + final List terms = fact.getArguments(); final String[] vLogFactTuple = new String[terms.size()]; int i = 0; for (final Term term : terms) { - final String vLogTupleTerm = term.getName(); - vLogFactTuple[i] = vLogTupleTerm; + // No checks for type of term -- only constants allowed in facts! + if (term instanceof Constant) { + vLogFactTuple[i] = TermToVLogConverter.getVLogNameForConstant((Constant) term); + } else if (term instanceof NamedNull) { + vLogFactTuple[i] = TermToVLogConverter.getVLogNameForNamedNull((NamedNull) term); + } else { + throw new RulewerkRuntimeException("Terms in facts must be constants or named nulls. Encountered " + + term + " of type " + term.getType() + "."); + } i++; } return vLogFactTuple; @@ -89,9 +97,8 @@ static String[] toVLogFactTuple(final Atom fact) { /** * Internal String representation that uniquely identifies a {@link Predicate}. - * - * @param predicate - * a {@link Predicate} + * + * @param predicate a {@link Predicate} * @return String representation corresponding to given predicate name and * arity. */ @@ -100,17 +107,18 @@ static String toVLogPredicate(Predicate predicate) { return vLogPredicate; } - static karmaresearch.vlog.Atom toVLogAtom(final Atom atom) { - final karmaresearch.vlog.Term[] vLogTerms = toVLogTermArray(atom.getTerms()); - final String vLogPredicate = toVLogPredicate(atom.getPredicate()); - final karmaresearch.vlog.Atom vLogAtom = new karmaresearch.vlog.Atom(vLogPredicate, vLogTerms); + static karmaresearch.vlog.Atom toVLogAtom(final Literal literal) { + final karmaresearch.vlog.Term[] vLogTerms = toVLogTermArray(literal.getArguments()); + final String vLogPredicate = toVLogPredicate(literal.getPredicate()); + final karmaresearch.vlog.Atom vLogAtom = new karmaresearch.vlog.Atom(vLogPredicate, literal.isNegated(), + vLogTerms); return vLogAtom; } - static karmaresearch.vlog.Atom[] toVLogAtomArray(final Conjunction conjunction) { - final karmaresearch.vlog.Atom[] vLogAtoms = new karmaresearch.vlog.Atom[conjunction.getAtoms().size()]; + static karmaresearch.vlog.Atom[] toVLogAtomArray(final Conjunction conjunction) { + final karmaresearch.vlog.Atom[] vLogAtoms = new karmaresearch.vlog.Atom[conjunction.getLiterals().size()]; int i = 0; - for (final Atom atom : conjunction.getAtoms()) { + for (final Literal atom : conjunction.getLiterals()) { vLogAtoms[i] = toVLogAtom(atom); i++; } @@ -123,7 +131,7 @@ static karmaresearch.vlog.Rule toVLogRule(final Rule rule) { return new karmaresearch.vlog.Rule(vLogHead, vLogBody); } - static karmaresearch.vlog.Rule[] toVLogRuleArray(final List rules) { + static karmaresearch.vlog.Rule[] toVLogRuleArray(final Collection rules) { final karmaresearch.vlog.Rule[] vLogRules = new karmaresearch.vlog.Rule[rules.size()]; int i = 0; for (final Rule rule : rules) { diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java new file mode 100644 index 000000000..ee43f3068 --- /dev/null +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/TermToVLogConverter.java @@ -0,0 +1,152 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.DatatypeConstant; +import org.semanticweb.rulewerk.core.model.api.ExistentialVariable; +import org.semanticweb.rulewerk.core.model.api.LanguageStringConstant; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.model.api.TermType; +import org.semanticweb.rulewerk.core.model.api.TermVisitor; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; + +/** + * A visitor that converts {@link Term}s of different types to corresponding + * internal VLog model {@link karmaresearch.vlog.Term}s. + * + * @author Irina Dragoste + * + */ +class TermToVLogConverter implements TermVisitor { + + static final Skolemization skolemization = new Skolemization(); + + /** + * Transforms an abstract constant to a {@link karmaresearch.vlog.Term} with the + * same name and type {@link karmaresearch.vlog.Term.TermType#CONSTANT}. + */ + @Override + public karmaresearch.vlog.Term visit(final AbstractConstant term) { + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, getVLogNameForConstant(term)); + } + + /** + * Transforms a datatype constant to a {@link karmaresearch.vlog.Term} with the + * same name and type {@link karmaresearch.vlog.Term.TermType#CONSTANT}. + */ + @Override + public karmaresearch.vlog.Term visit(final DatatypeConstant term) { + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, getVLogNameForConstant(term)); + } + + /** + * Transforms a language-tagged string constant to a + * {@link karmaresearch.vlog.Term} with the same name and type + * {@link karmaresearch.vlog.Term.TermType#CONSTANT}. + */ + @Override + public karmaresearch.vlog.Term visit(final LanguageStringConstant term) { + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getName()); + } + + /** + * Converts the given constant to the name of a constant in VLog. + * + * @param constant + * @return VLog constant string + */ + public static String getVLogNameForConstant(final Constant constant) { + if (constant.getType() == TermType.ABSTRACT_CONSTANT) { + return getVLogNameForIRI(constant.getName()); + } else if (constant.getType() == TermType.DATATYPE_CONSTANT) { + return ((DatatypeConstant)constant).getRdfLiteralString(false); + } else if (constant.getType() == TermType.LANGSTRING_CONSTANT) { + return constant.getName(); + } else { + throw new RulewerkRuntimeException("Unexpected term type: " + constant.getType()); + } + } + + /** + * Converts the given named null to the name of a (skolem) constant in VLog. + * + * @param named null + * @return VLog constant string + */ + public static String getVLogNameForNamedNull(final NamedNull namedNull) { + return skolemization.getSkolemConstantName(namedNull); + } + + /** + * Converts the string representation of a constant in Rulewerk directly to the + * name of a constant in VLog, without parsing it into a {@link Constant} first. + * + * @param rulewerkConstantName + * @return VLog constant string + */ + public static String getVLogNameForConstantName(final String rulewerkConstantName) { + if (rulewerkConstantName.startsWith("\"")) { // keep datatype literal strings unchanged + return rulewerkConstantName; + } else + return getVLogNameForIRI(rulewerkConstantName); + } + + private static String getVLogNameForIRI(final String rulewerkConstantName) { + if (rulewerkConstantName.contains(":")) { // enclose absolute IRIs with < > + return "<" + rulewerkConstantName + ">"; + } else { // keep relative IRIs unchanged + return rulewerkConstantName; + } + } + + /** + * Transforms a universal variable to a {@link karmaresearch.vlog.Term} with the + * same name and type {@link karmaresearch.vlog.Term.TermType#VARIABLE}. + */ + @Override + public karmaresearch.vlog.Term visit(final UniversalVariable term) { + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, term.getName()); + } + + /** + * Transforms an existential variable to a {@link karmaresearch.vlog.Term} with + * the same name and type {@link karmaresearch.vlog.Term.TermType#VARIABLE}. + */ + @Override + public karmaresearch.vlog.Term visit(final ExistentialVariable term) { + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "!" + term.getName()); + } + + /** + * Transforms a named null to a {@link karmaresearch.vlog.Term} with the same + * name and type {@link karmaresearch.vlog.Term.TermType#BLANK}. + */ + @Override + public karmaresearch.vlog.Term visit(final NamedNull term) { + return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, term.getName()); + } + +} diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java new file mode 100644 index 000000000..2e4f250df --- /dev/null +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitor.java @@ -0,0 +1,92 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/* + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.nio.file.Paths; + +import org.semanticweb.rulewerk.core.reasoner.implementation.DataSourceConfigurationVisitor; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; + +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.TridentDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; + +public class VLogDataSourceConfigurationVisitor implements DataSourceConfigurationVisitor { + private String configString = null; + + private static final String PREDICATE_NAME_CONFIG_LINE = "EDB%1$d_predname=%2$s\n"; + private static final String DATASOURCE_TYPE_CONFIG_PARAM = "EDB%1$d_type"; + private final static String FILE_DATASOURCE_TYPE_CONFIG_VALUE = "INMEMORY"; + private final static String TRIDENT_DATASOURCE_TYPE_CONFIG_VALUE = "Trident"; + private static final String SPARQL_DATASOURCE_TYPE_CONFIG_VALUE = "SPARQL"; + + public String getConfigString() { + return configString; + } + + protected void setFileConfigString(FileDataSource dataSource) throws IOException { + this.configString = PREDICATE_NAME_CONFIG_LINE + DATASOURCE_TYPE_CONFIG_PARAM + "=" + + FILE_DATASOURCE_TYPE_CONFIG_VALUE + "\n" + "EDB%1$d_param0=" + getDirCanonicalPath(dataSource) + "\n" + + "EDB%1$d_param1=" + getFileNameWithoutExtension(dataSource) + "\n"; + } + + String getDirCanonicalPath(FileDataSource dataSource) throws IOException { + return Paths.get(dataSource.getFile().getCanonicalPath()).getParent().toString(); + } + + String getFileNameWithoutExtension(FileDataSource dataSource) { + final String fileName = dataSource.getName(); + return fileName.substring(0, fileName.lastIndexOf(dataSource.getExtension())); + } + + @Override + public void visit(CsvFileDataSource dataSource) throws IOException { + setFileConfigString(dataSource); + } + + @Override + public void visit(RdfFileDataSource dataSource) throws IOException { + setFileConfigString(dataSource); + } + + @Override + public void visit(SparqlQueryResultDataSource dataSource) { + this.configString = PREDICATE_NAME_CONFIG_LINE + DATASOURCE_TYPE_CONFIG_PARAM + "=" + + SPARQL_DATASOURCE_TYPE_CONFIG_VALUE + "\n" + "EDB%1$d_param0=" + dataSource.getEndpoint() + "\n" + + "EDB%1$d_param1=" + dataSource.getQueryVariables() + "\n" + "EDB%1$d_param2=" + + dataSource.getQueryBody() + "\n"; + } + + @Override + public void visit(TridentDataSource dataSource) { + this.configString = PREDICATE_NAME_CONFIG_LINE + DATASOURCE_TYPE_CONFIG_PARAM + "=" // + + TRIDENT_DATASOURCE_TYPE_CONFIG_VALUE + "\n" // + + "EDB%1$d_param0=" + dataSource.getName() + "\n"; + } + + @Override + public void visit(InMemoryDataSource dataSource) { + this.configString = null; + } +} diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java new file mode 100644 index 000000000..33e88a5d6 --- /dev/null +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogFastQueryResultIterator.java @@ -0,0 +1,235 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; + +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; + +/* + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.QueryResultImpl; + +import karmaresearch.vlog.NotStartedException; +import karmaresearch.vlog.VLog; + +/** + * Iterates trough all answers to a query. An answer to a query is a + * {@link QueryResult}. Each query answer is distinct. + * + * @author Markus Kroetzsch + * + */ +public class VLogFastQueryResultIterator implements QueryResultIterator { + +// /** +// * Use of Java's LinkedHashMap for implementing a simple LRU cache that is used +// * here for mapping VLog ids to terms. +// * +// * @author Markus Kroetzsch +// * +// * @param +// * @param +// */ +// static class SimpleLruMap extends LinkedHashMap { +// private static final long serialVersionUID = 7151535464938775359L; +// private int maxCapacity; +// +// public SimpleLruMap(int initialCapacity, int maxCapacity) { +// super(initialCapacity, 0.75f, true); +// this.maxCapacity = maxCapacity; +// } +// +// @Override +// protected boolean removeEldestEntry(Map.Entry eldest) { +// return size() >= this.maxCapacity; +// } +// } + + /** + * Simple cache for finding terms for VLog ids that is optimised for the case + * where ids are inserted in a mostly ordered fashion. An LRU strategy is highly + * ineffective for this as soon as the cache capacity is smaller than the number + * of repeatedly used terms, since the cache entries there are always pushed out + * before being needed again. This implementation will at least cache a maximal + * initial fragment in such cases. It is also faster to write and requires less + * memory. + * + * @author Markus Kroetzsch + * + */ + static class OrderedTermCache { + final private HashMap terms = new HashMap<>(); + final int maxCapacity; + private long maxId = -1; + + public OrderedTermCache(int capacity) { + this.maxCapacity = capacity; + } + + public Term get(long id) { + if (id > maxId) { + return null; + } else { + return terms.get(id); + } + } + + public void put(long id, Term term) { + if (terms.size() < maxCapacity) { + terms.put(id, term); + if (id > maxId) { + maxId = id; + } + } + } + } + + /** + * The internal result iterator of VLog, returning numeric ids only. + */ + private final karmaresearch.vlog.QueryResultIterator vLogQueryResultIterator; + /** + * The VLog instance. Used for resolving numeric ids to term names. + */ + private final VLog vLog; + /** + * VLog ids of the previous tuple, with the last id fixed to -1 (since it is + * never useful in caching). + */ + private long[] prevIds = null; + /** + * True if this is the first result that is returned. + */ + boolean firstResult = true; + /** + * Size of the tuples returned in this result. + */ + int resultSize = -1; + /** + * Previous tuple that was returned. + */ + Term[] prevTuple; + /** + * Cache mapping ids to terms. + */ + // final SimpleLruMap termCache; + final OrderedTermCache termCache; + + private final Correctness correctness; + + /** + * Create a new {@link VLogFastQueryResultIterator}. + * + * @param queryResultIterator + * @param materialisationState + * @param vLog + */ + public VLogFastQueryResultIterator(final karmaresearch.vlog.QueryResultIterator queryResultIterator, + final Correctness materialisationState, final VLog vLog) { + this.vLogQueryResultIterator = queryResultIterator; + this.correctness = materialisationState; + this.vLog = vLog; + // this.termCache = new SimpleLruMap(256, 64000); + this.termCache = new OrderedTermCache(130000); + } + + @Override + public boolean hasNext() { + return this.vLogQueryResultIterator.hasNext(); + } + + @Override + public QueryResult next() { + final long[] idTuple = vLogQueryResultIterator.next(); + + if (firstResult) { + resultSize = idTuple.length; + prevTuple = new Term[resultSize]; + prevIds = new long[resultSize]; + Arrays.fill(prevIds, -1); // (practically) impossible id + firstResult = false; + } + + if (resultSize == 1) { // Caching is pointless for unary queries + return new QueryResultImpl(Collections.singletonList(computeTerm(idTuple[0]))); + } + + // (Array.copyOf was slightly faster than System.arraycopy in tests) + final Term[] terms = Arrays.copyOf(prevTuple, resultSize); + int i = 0; + for (long id : idTuple) { + if (prevIds[i] != id) { + Term term = this.termCache.get(id); + if (term == null) { + term = computeTerm(id); + this.termCache.put(id, term); + } + terms[i] = term; + if (i < resultSize - 1) { + prevIds[i] = id; + } + } + i++; + } + + prevTuple = terms; + return new QueryResultImpl(Arrays.asList(terms)); + } + + /** + * Compute the {@link Term} for a given VLog id. + * + * @param id + * @return + */ + Term computeTerm(long id) { + try { + String s = vLog.getConstant(id); + if (s == null) { + // This string operation extracts the internal rule number (val >> 40), + // the internal variable number ((val >> 32) & 0377), and + // a counter (val & 0xffffffffL) + return new NamedNullImpl("null" + (id >> 40) + "_" + ((id >> 32) & 0377) + "_" + (id & 0xffffffffL)); + } else { + return VLogToModelConverter.toConstant(s); + } + } catch (NotStartedException e) { // Should never happen, we just did a query ... + throw new RulewerkRuntimeException(e); + } + } + + @Override + public void close() { + this.vLogQueryResultIterator.close(); + } + + @Override + public Correctness getCorrectness() { + return this.correctness; + } + +} diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java new file mode 100644 index 000000000..e406487f9 --- /dev/null +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogInMemoryDataSource.java @@ -0,0 +1,97 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.Arrays; + +import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.reasoner.implementation.DataSourceConfigurationVisitor; + +/** + * Implementation of {@link InMemoryDataSource} for the VLog backend. + */ +public class VLogInMemoryDataSource extends InMemoryDataSource { + String[][] data; + int nextEmptyTuple = 0; + + public VLogInMemoryDataSource(final int arity, final int initialCapacity) { + super(arity, initialCapacity); + this.data = new String[initialCapacity][arity]; + } + + /** + * Adds a fact to this data source. The number of constant names must agree with + * the arity of this data source. + * + * @param constantNames the string names of the constants in this fact + */ + public void addTuple(final String... constantNames) { + validateArity(constantNames); + + if (this.nextEmptyTuple == this.capacity) { + this.capacity = this.capacity * 2; + this.data = Arrays.copyOf(this.data, this.capacity); + } + this.data[this.nextEmptyTuple] = new String[this.arity]; + for (int i = 0; i < this.arity; i++) { + this.data[this.nextEmptyTuple][i] = TermToVLogConverter.getVLogNameForConstantName(constantNames[i]); + } + this.nextEmptyTuple++; + } + + /** + * Returns the data stored in this data source, in the format expected by the + * VLog reasoner backend. + * + * @return the data + */ + public String[][] getData() { + if (this.nextEmptyTuple == this.capacity) { + return this.data; + } else { + return Arrays.copyOf(this.data, this.nextEmptyTuple); + } + } + + @Override + public String toString() { + final StringBuilder sb = new StringBuilder( + "This InMemoryDataSource holds the following tuples of constant names, one tuple per line:"); + for (int i = 0; i < getData().length; i++) { + for (int j = 0; j < this.data[i].length; j++) { + sb.append(this.data[i][j] + " "); + } + sb.append("\n"); + } + return sb.toString(); + } + + @Override + public void accept(DataSourceConfigurationVisitor visitor) { + visitor.visit(this); + } + + @Override + public Fact getDeclarationFact() { + throw new UnsupportedOperationException("VLogInMemoryDataSource is cannot be serialized."); + } +} diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBase.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBase.java new file mode 100644 index 000000000..9f41cc7c9 --- /dev/null +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBase.java @@ -0,0 +1,310 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Formatter; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Statement; +import org.semanticweb.rulewerk.core.model.api.StatementVisitor; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.ConjunctionImpl; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.rulewerk.core.model.implementation.PredicateImpl; +import org.semanticweb.rulewerk.core.model.implementation.RuleImpl; +import org.semanticweb.rulewerk.core.model.implementation.UniversalVariableImpl; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.implementation.ReasonerDataSource; + +/** + * Class for organizing a Knowledge Base using vLog-specific data structures. + * + * @author Irina Dragoste + * + */ +public class VLogKnowledgeBase { + + private final Map edbPredicates = new HashMap<>(); + private final Map aliasesForEdbPredicates = new HashMap<>(); + + private final Set aliasedEdbPredicates = new HashSet<>(); + + private final Set idbPredicates = new HashSet<>(); + + private final Map> directEdbFacts = new HashMap<>(); + + private final Set rules = new HashSet<>(); + + /** + * Package-protected constructor, that organizes given {@code knowledgeBase} in + * vLog-specific data structures. + * + * @param knowledgeBase + */ + VLogKnowledgeBase(final KnowledgeBase knowledgeBase) { + final LoadKbVisitor visitor = this.new LoadKbVisitor(); + visitor.clearIndexes(); + for (final Statement statement : knowledgeBase) { + statement.accept(visitor); + } + } + + boolean hasData() { + return !this.edbPredicates.isEmpty() || !this.aliasedEdbPredicates.isEmpty(); + } + + public boolean hasRules() { + return !this.rules.isEmpty(); + } + + Predicate getAlias(final Predicate predicate) { + if (this.edbPredicates.containsKey(predicate)) { + return predicate; + } else { + return this.aliasesForEdbPredicates.get(new LocalFactsDataSourceDeclaration(predicate)); + } + } + + String getVLogDataSourcesConfigurationString() { + final StringBuilder configStringBuilder = new StringBuilder(); + final Formatter formatter = new Formatter(configStringBuilder); + int dataSourceIndex = 0; + + for (final Entry e : this.edbPredicates.entrySet()) { + dataSourceIndex = addDataSourceConfigurationString(e.getValue().getDataSource(), e.getKey(), + dataSourceIndex, formatter); + } + + for (final Entry e : this.aliasesForEdbPredicates.entrySet()) { + dataSourceIndex = addDataSourceConfigurationString(e.getKey().getDataSource(), e.getValue(), + dataSourceIndex, formatter); + } + + formatter.close(); + return configStringBuilder.toString(); + } + + int addDataSourceConfigurationString(final DataSource dataSource, final Predicate predicate, + final int dataSourceIndex, final Formatter formatter) { + int newDataSourceIndex = dataSourceIndex; + + if (dataSource != null) { + if (dataSource instanceof ReasonerDataSource) { + final ReasonerDataSource reasonerDataSource = (ReasonerDataSource) dataSource; + final VLogDataSourceConfigurationVisitor visitor = new VLogDataSourceConfigurationVisitor(); + try { + reasonerDataSource.accept(visitor); + } catch (IOException e) { + throw new RulewerkRuntimeException("Error while building VLog data source configuration", e); + } + final String configString = visitor.getConfigString(); + if (configString != null) { + formatter.format(configString, dataSourceIndex, ModelToVLogConverter.toVLogPredicate(predicate)); + newDataSourceIndex++; + } + } + } + + return newDataSourceIndex; + } + + Map getEdbPredicates() { + return this.edbPredicates; + } + + Map getAliasesForEdbPredicates() { + return this.aliasesForEdbPredicates; + } + + Map> getDirectEdbFacts() { + return this.directEdbFacts; + } + + Set getRules() { + return this.rules; + } + + /** + * Local visitor implementation for processing statements upon loading. Internal + * index structures are updated based on the statements that are detected. + * + * @author Markus Kroetzsch + */ + + class LoadKbVisitor implements StatementVisitor { + + public void clearIndexes() { + VLogKnowledgeBase.this.edbPredicates.clear(); + VLogKnowledgeBase.this.idbPredicates.clear(); + VLogKnowledgeBase.this.aliasedEdbPredicates.clear(); + VLogKnowledgeBase.this.aliasesForEdbPredicates.clear(); + VLogKnowledgeBase.this.directEdbFacts.clear(); + VLogKnowledgeBase.this.rules.clear(); + } + + @Override + public Void visit(final Fact fact) { + final Predicate predicate = fact.getPredicate(); + registerEdbDeclaration(new LocalFactsDataSourceDeclaration(predicate)); + if (!VLogKnowledgeBase.this.directEdbFacts.containsKey(predicate)) { + final List facts = new ArrayList<>(); + facts.add(fact); + VLogKnowledgeBase.this.directEdbFacts.put(predicate, facts); + } else { + VLogKnowledgeBase.this.directEdbFacts.get(predicate).add(fact); + } + return null; + } + + @Override + public Void visit(final Rule statement) { + VLogKnowledgeBase.this.rules.add(statement); + for (final PositiveLiteral positiveLiteral : statement.getHead()) { + final Predicate predicate = positiveLiteral.getPredicate(); + if (!VLogKnowledgeBase.this.idbPredicates.contains(predicate)) { + if (VLogKnowledgeBase.this.edbPredicates.containsKey(predicate)) { + addEdbAlias(VLogKnowledgeBase.this.edbPredicates.get(predicate)); + VLogKnowledgeBase.this.edbPredicates.remove(predicate); + } + VLogKnowledgeBase.this.idbPredicates.add(predicate); + } + } + return null; + } + + @Override + public Void visit(final DataSourceDeclaration statement) { + registerEdbDeclaration(statement); + return null; + } + + void registerEdbDeclaration(final DataSourceDeclaration dataSourceDeclaration) { + final Predicate predicate = dataSourceDeclaration.getPredicate(); + if (VLogKnowledgeBase.this.idbPredicates.contains(predicate) + || VLogKnowledgeBase.this.aliasedEdbPredicates.contains(predicate)) { + if (!VLogKnowledgeBase.this.aliasesForEdbPredicates.containsKey(dataSourceDeclaration)) { + addEdbAlias(dataSourceDeclaration); + } + } else { + final DataSourceDeclaration currentMainDeclaration = VLogKnowledgeBase.this.edbPredicates + .get(predicate); + if (currentMainDeclaration == null) { + VLogKnowledgeBase.this.edbPredicates.put(predicate, dataSourceDeclaration); + } else if (!currentMainDeclaration.equals(dataSourceDeclaration)) { + addEdbAlias(currentMainDeclaration); + addEdbAlias(dataSourceDeclaration); + VLogKnowledgeBase.this.edbPredicates.remove(predicate); + } // else: predicate already known to have local facts (only) + } + } + + void addEdbAlias(final DataSourceDeclaration dataSourceDeclaration) { + final Predicate predicate = dataSourceDeclaration.getPredicate(); + Predicate aliasPredicate; + if (dataSourceDeclaration instanceof LocalFactsDataSourceDeclaration) { + aliasPredicate = new PredicateImpl(predicate.getName() + "##FACT", predicate.getArity()); + } else { + aliasPredicate = new PredicateImpl(predicate.getName() + "##" + dataSourceDeclaration.hashCode(), + predicate.getArity()); + } + VLogKnowledgeBase.this.aliasesForEdbPredicates.put(dataSourceDeclaration, aliasPredicate); + VLogKnowledgeBase.this.aliasedEdbPredicates.add(predicate); + + final List terms = new ArrayList<>(); + for (int i = 1; i <= predicate.getArity(); i++) { + terms.add(new UniversalVariableImpl("X" + i)); + } + final Literal body = new PositiveLiteralImpl(aliasPredicate, terms); + final PositiveLiteral head = new PositiveLiteralImpl(predicate, terms); + final Rule rule = new RuleImpl(new ConjunctionImpl<>(Arrays.asList(head)), + new ConjunctionImpl<>(Arrays.asList(body))); + VLogKnowledgeBase.this.rules.add(rule); + } + + } + + /** + * Dummy data source declaration for predicates for which we have explicit local + * facts in the input. + * + * @author Markus Kroetzsch + * + */ + class LocalFactsDataSourceDeclaration implements DataSourceDeclaration { + + final Predicate predicate; + + public LocalFactsDataSourceDeclaration(Predicate predicate) { + this.predicate = predicate; + } + + @Override + public T accept(StatementVisitor statementVisitor) { + return statementVisitor.visit(this); + } + + @Override + public Predicate getPredicate() { + return this.predicate; + } + + @Override + public DataSource getDataSource() { + return null; + } + + @Override + public int hashCode() { + return this.predicate.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final LocalFactsDataSourceDeclaration other = (LocalFactsDataSourceDeclaration) obj; + return this.predicate.equals(other.predicate); + } + } +} diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java new file mode 100644 index 000000000..d5047dc55 --- /dev/null +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasoner.java @@ -0,0 +1,764 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/* + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.apache.commons.lang3.Validate; +import org.semanticweb.rulewerk.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.rulewerk.core.exceptions.ReasonerStateException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Statement; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.AcyclicityNotion; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.CyclicityResult; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.LogLevel; +import org.semanticweb.rulewerk.core.reasoner.QueryAnswerCount; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.ReasonerState; +import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; +import org.semanticweb.rulewerk.core.reasoner.implementation.EmptyQueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.QueryAnswerCountImpl; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import karmaresearch.vlog.AlreadyStartedException; +import karmaresearch.vlog.EDBConfigurationException; +import karmaresearch.vlog.MaterializationException; +import karmaresearch.vlog.NonExistingPredicateException; +import karmaresearch.vlog.NotStartedException; +import karmaresearch.vlog.TermQueryResultIterator; +import karmaresearch.vlog.VLog; +import karmaresearch.vlog.VLog.CyclicCheckResult; + +/** + * Reasoner implementation using the VLog backend. + * + * + * + * @author Markus Kroetzsch + * + */ +public class VLogReasoner implements Reasoner { + private static Logger LOGGER = LoggerFactory.getLogger(VLogReasoner.class); + + final KnowledgeBase knowledgeBase; + final VLog vLog = new VLog(); + + private ReasonerState reasonerState = ReasonerState.KB_NOT_LOADED; + private Correctness correctness = Correctness.SOUND_BUT_INCOMPLETE; + + private LogLevel internalLogLevel = LogLevel.WARNING; + private Algorithm algorithm = Algorithm.RESTRICTED_CHASE; + private Integer timeoutAfterSeconds; + private RuleRewriteStrategy ruleRewriteStrategy = RuleRewriteStrategy.NONE; + + /** + * Holds the state of the reasoning result. Has value {@code true} if reasoning + * has completed, {@code false} if it has been interrupted. + */ + private boolean reasoningCompleted; + + public VLogReasoner(final KnowledgeBase knowledgeBase) { + super(); + this.knowledgeBase = knowledgeBase; + this.knowledgeBase.addListener(this); + + this.setLogLevel(this.internalLogLevel); + } + + @Override + public KnowledgeBase getKnowledgeBase() { + return this.knowledgeBase; + } + + @Override + public void setAlgorithm(final Algorithm algorithm) { + Validate.notNull(algorithm, "Algorithm cannot be null!"); + this.validateNotClosed(); + this.algorithm = algorithm; + } + + @Override + public Algorithm getAlgorithm() { + return this.algorithm; + } + + @Override + public void setReasoningTimeout(final Integer seconds) { + this.validateNotClosed(); + if (seconds != null) { + Validate.isTrue(seconds > 0, "Only strictly positive timeout period allowed!", seconds); + } + this.timeoutAfterSeconds = seconds; + } + + @Override + public Integer getReasoningTimeout() { + return this.timeoutAfterSeconds; + } + + @Override + public void setRuleRewriteStrategy(final RuleRewriteStrategy ruleRewritingStrategy) { + this.validateNotClosed(); + Validate.notNull(ruleRewritingStrategy, "Rewrite strategy cannot be null!"); + this.ruleRewriteStrategy = ruleRewritingStrategy; + } + + @Override + public RuleRewriteStrategy getRuleRewriteStrategy() { + return this.ruleRewriteStrategy; + } + + @Override + public Correctness getCorrectness() { + return this.correctness; + } + + /* + * TODO Due to automatic predicate renaming, it can happen that an EDB predicate + * cannot be queried after loading unless reasoning has already been invoked + * (since the auxiliary rule that imports the EDB facts to the "real" predicate + * must be used). This issue could be weakened by rewriting queries to + * (single-source) EDB predicates internally when in such a state. + */ + void load() throws IOException { + this.validateNotClosed(); + + switch (this.reasonerState) { + case KB_NOT_LOADED: + this.loadKnowledgeBase(); + break; + case KB_LOADED: + case MATERIALISED: + // do nothing, all KB is already loaded + break; + case KB_CHANGED: + this.resetReasoner(); + this.loadKnowledgeBase(); + default: + break; + } + } + + void loadKnowledgeBase() throws IOException { + LOGGER.info("Started loading knowledge base ..."); + + final VLogKnowledgeBase vLogKB = new VLogKnowledgeBase(this.knowledgeBase); + + if (!vLogKB.hasData()) { + LOGGER.warn("No data statements (facts or datasource declarations) have been provided."); + } + + // 1. vLog is initialized by loading VLog data sources + this.loadVLogDataSources(vLogKB); + + // 2. in-memory data is loaded + this.loadInMemoryDataSources(vLogKB); + this.validateDataSourcePredicateArities(vLogKB); + + this.loadFacts(vLogKB); + + // 3. rules are loaded + this.loadRules(vLogKB); + + this.reasonerState = ReasonerState.KB_LOADED; + + // if there are no rules, then materialisation state is complete + this.correctness = !vLogKB.hasRules() ? Correctness.SOUND_AND_COMPLETE : Correctness.SOUND_BUT_INCOMPLETE; + + LOGGER.info("Finished loading knowledge base."); + } + + void loadVLogDataSources(final VLogKnowledgeBase vLogKB) throws IOException { + try { + this.vLog.start(vLogKB.getVLogDataSourcesConfigurationString(), false); + } catch (final AlreadyStartedException e) { + throw new RulewerkRuntimeException("Inconsistent reasoner state.", e); + } catch (final EDBConfigurationException e) { + throw new RulewerkRuntimeException("Invalid data sources configuration.", e); + } + } + + void loadInMemoryDataSources(final VLogKnowledgeBase vLogKB) { + vLogKB.getEdbPredicates().forEach((k, v) -> this.loadInMemoryDataSource(v.getDataSource(), k)); + + vLogKB.getAliasesForEdbPredicates().forEach((k, v) -> this.loadInMemoryDataSource(k.getDataSource(), v)); + } + + void loadInMemoryDataSource(final DataSource dataSource, final Predicate predicate) { + if (dataSource instanceof VLogInMemoryDataSource) { + + final VLogInMemoryDataSource inMemoryDataSource = (VLogInMemoryDataSource) dataSource; + try { + this.load(predicate, inMemoryDataSource); + } catch (final EDBConfigurationException e) { + throw new RulewerkRuntimeException("Invalid data sources configuration!", e); + } + } + } + + void load(final Predicate predicate, final VLogInMemoryDataSource inMemoryDataSource) + throws EDBConfigurationException { + final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(predicate); + + this.vLog.addData(vLogPredicateName, inMemoryDataSource.getData()); + + if (LOGGER.isDebugEnabled()) { + for (final String[] tuple : inMemoryDataSource.getData()) { + LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); + } + } + } + + /** + * Checks if the loaded external data sources do in fact contain data of the + * correct arity. + * + * @throws IncompatiblePredicateArityException to indicate a problem + * (non-checked exception) + */ + void validateDataSourcePredicateArities(final VLogKnowledgeBase vLogKB) throws IncompatiblePredicateArityException { + + vLogKB.getEdbPredicates().forEach((k, v) -> this.validateDataSourcePredicateArity(k, v.getDataSource())); + + vLogKB.getAliasesForEdbPredicates() + .forEach((k, v) -> this.validateDataSourcePredicateArity(v, k.getDataSource())); + } + + /** + * Checks if the loaded external data for a given source does in fact contain + * data of the correct arity for the given predidate. + * + * @param predicate the predicate for which data is loaded + * @param dataSource the data source used + * + * @throws IncompatiblePredicateArityException to indicate a problem + * (non-checked exception) + */ + void validateDataSourcePredicateArity(final Predicate predicate, final DataSource dataSource) + throws IncompatiblePredicateArityException { + if (dataSource == null) { + return; + } + try { + final int dataSourcePredicateArity = this.vLog + .getPredicateArity(ModelToVLogConverter.toVLogPredicate(predicate)); + if (dataSourcePredicateArity == -1) { + LOGGER.warn("Data source {} for predicate {} is empty! ", dataSource, predicate); + } else if (predicate.getArity() != dataSourcePredicateArity) { + throw new IncompatiblePredicateArityException(predicate, dataSourcePredicateArity, dataSource); + } + } catch (final NotStartedException e) { + throw new RulewerkRuntimeException("Inconsistent reasoner state!", e); + } + } + + void loadFacts(final VLogKnowledgeBase vLogKB) { + final Map> directEdbFacts = vLogKB.getDirectEdbFacts(); + + directEdbFacts.forEach((k, v) -> { + try { + final String vLogPredicateName = ModelToVLogConverter.toVLogPredicate(vLogKB.getAlias(k)); + final String[][] vLogPredicateTuples = ModelToVLogConverter.toVLogFactTuples(v); + + this.vLog.addData(vLogPredicateName, vLogPredicateTuples); + + if (LOGGER.isDebugEnabled()) { + for (final String[] tuple : vLogPredicateTuples) { + LOGGER.debug("Loaded direct fact {}{}.", vLogPredicateName, Arrays.toString(tuple)); + } + } + } catch (final EDBConfigurationException e) { + throw new RulewerkRuntimeException("Invalid data sources configuration!", e); + } + + }); + } + + void loadRules(final VLogKnowledgeBase vLogKB) { + final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter.toVLogRuleArray(vLogKB.getRules()); + final karmaresearch.vlog.VLog.RuleRewriteStrategy vLogRuleRewriteStrategy = ModelToVLogConverter + .toVLogRuleRewriteStrategy(this.ruleRewriteStrategy); + try { + this.vLog.setRules(vLogRuleArray, vLogRuleRewriteStrategy); + if (LOGGER.isDebugEnabled()) { + for (final karmaresearch.vlog.Rule rule : vLogRuleArray) { + LOGGER.debug("Loaded rule {}.", rule.toString()); + } + } + } catch (final NotStartedException e) { + throw new RulewerkRuntimeException("Inconsistent reasoner state!", e); + } + } + + @Override + public boolean reason() throws IOException { + this.validateNotClosed(); + + switch (this.reasonerState) { + case KB_NOT_LOADED: + this.load(); + this.runChase(); + break; + case KB_LOADED: + this.runChase(); + break; + case KB_CHANGED: + this.resetReasoner(); + this.load(); + this.runChase(); + break; + case MATERIALISED: + this.runChase(); + break; + default: + break; + } + + return this.reasoningCompleted; + } + + private void runChase() { + LOGGER.info("Started materialisation of inferences ..."); + this.reasonerState = ReasonerState.MATERIALISED; + + final boolean skolemChase = this.algorithm == Algorithm.SKOLEM_CHASE; + try { + if (this.timeoutAfterSeconds == null) { + this.vLog.materialize(skolemChase); + this.reasoningCompleted = true; + } else { + this.reasoningCompleted = this.vLog.materialize(skolemChase, this.timeoutAfterSeconds); + } + } catch (final NotStartedException e) { + throw new RulewerkRuntimeException("Inconsistent reasoner state.", e); + } catch (final MaterializationException e) { + throw new RulewerkRuntimeException("VLog encounterd an error during materialization: " + e.getMessage(), e); + } + + if (this.reasoningCompleted) { + this.correctness = Correctness.SOUND_AND_COMPLETE; + LOGGER.info("Completed materialisation of inferences."); + } else { + this.correctness = Correctness.SOUND_BUT_INCOMPLETE; + LOGGER.info("Stopped materialisation of inferences (possibly incomplete)."); + } + } + + @Override + public QueryResultIterator answerQuery(final PositiveLiteral query, final boolean includeNulls) { + this.validateBeforeQuerying(query); + + final boolean filterBlanks = !includeNulls; + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); + + final karmaresearch.vlog.QueryResultIterator queryResultIterator; + + try { + final int predicateId = this.vLog.getPredicateId(vLogAtom.getPredicate()); + final long[] terms = this.extractTerms(vLogAtom.getTerms()); + queryResultIterator = this.vLog.query(predicateId, terms, true, filterBlanks); + } catch (final NotStartedException e) { + throw new RulewerkRuntimeException("Inconsistent reasoner state.", e); + } catch (final NonExistingPredicateException e1) { + return this.createEmptyResultIterator(query); + } + + this.logWarningOnCorrectness(this.correctness); + return new VLogFastQueryResultIterator(queryResultIterator, this.correctness, this.vLog); + } + + private QueryResultIterator createEmptyResultIterator(final PositiveLiteral query) { + final Correctness answerCorrectness = this.getCorrectnessUnknownPredicate(query); + this.logWarningOnCorrectness(answerCorrectness); + return new EmptyQueryResultIterator(answerCorrectness); + } + + private Correctness getCorrectnessUnknownPredicate(final PositiveLiteral query) { + final Correctness answerCorrectness; + if (this.reasonerState == ReasonerState.MATERIALISED) { + this.warnUnknownPredicate(query); + answerCorrectness = Correctness.SOUND_AND_COMPLETE; + } else { + answerCorrectness = Correctness.SOUND_BUT_INCOMPLETE; + } + return answerCorrectness; + } + + private void warnUnknownPredicate(final PositiveLiteral query) { + LOGGER.warn("Query uses predicate " + query.getPredicate() + + " that does not occur in the materialised knowledge base. Answer must be empty!"); + } + + /** + * Utility method copied from {@link karmaresearch.vlog.VLog}. + * + * @FIXME This should be provided by VLog and made visible to us rather than + * being copied here. + * @param terms + * @return + * @throws NotStartedException + */ + private long[] extractTerms(final karmaresearch.vlog.Term[] terms) throws NotStartedException { + final ArrayList variables = new ArrayList<>(); + final long[] longTerms = new long[terms.length]; + for (int i = 0; i < terms.length; i++) { + if (terms[i].getTermType() == karmaresearch.vlog.Term.TermType.VARIABLE) { + boolean found = false; + for (int j = 0; j < variables.size(); j++) { + if (variables.get(j).equals(terms[i].getName())) { + found = true; + longTerms[i] = -j - 1; + break; + } + } + if (!found) { + variables.add(terms[i].getName()); + longTerms[i] = -variables.size(); + } + } else { + longTerms[i] = this.vLog.getOrAddConstantId(terms[i].getName()); + } + } + return longTerms; + } + + @Override + public QueryAnswerCount countQueryAnswers(final PositiveLiteral query, final boolean includeNulls) { + this.validateBeforeQuerying(query); + + final boolean filterBlanks = !includeNulls; + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); + + long result; + try { + result = this.vLog.querySize(vLogAtom, true, filterBlanks); + } catch (final NotStartedException e) { + throw new RulewerkRuntimeException("Inconsistent reasoner state.", e); + } catch (final NonExistingPredicateException e) { + return this.createEmptyResultCount(query); + } + this.logWarningOnCorrectness(this.correctness); + return new QueryAnswerCountImpl(this.correctness, result); + } + + private QueryAnswerCount createEmptyResultCount(final PositiveLiteral query) { + final Correctness correctness = this.getCorrectnessUnknownPredicate(query); + this.logWarningOnCorrectness(correctness); + return new QueryAnswerCountImpl(correctness, 0); + } + + @Override + public Correctness exportQueryAnswersToCsv(final PositiveLiteral query, final String csvFilePath, + final boolean includeBlanks) throws IOException { + this.validateBeforeQuerying(query); + + Validate.notNull(csvFilePath, "File to export query answer to must not be null!"); + Validate.isTrue(csvFilePath.endsWith(".csv"), "Expected .csv extension for file [%s]!", csvFilePath); + + final boolean filterBlanks = !includeBlanks; + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(query); + try { + this.vLog.writeQueryResultsToCsv(vLogAtom, csvFilePath, filterBlanks); + } catch (final NotStartedException e) { + throw new RulewerkRuntimeException("Inconsistent reasoner state!", e); + } catch (final NonExistingPredicateException e1) { + final Correctness correctness = this.getCorrectnessUnknownPredicate(query); + this.logWarningOnCorrectness(correctness); + return correctness; + } + this.logWarningOnCorrectness(this.correctness); + return this.correctness; + } + + private void validateBeforeQuerying(final PositiveLiteral query) { + this.validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + throw new ReasonerStateException(this.reasonerState, + "Querying is not allowed before Reasoner#reason() was first called!"); + } + Validate.notNull(query, "Query atom must not be null!"); + } + + @Override + public Correctness forEachInference(final InferenceAction action) throws IOException { + this.validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + throw new ReasonerStateException(this.reasonerState, + "Obtaining inferences is not alowed before reasoner is loaded!"); + } + final Set toBeQueriedHeadPredicates = this.getKnowledgeBasePredicates(); + + for (final Predicate predicate : toBeQueriedHeadPredicates) { + final PositiveLiteral queryAtom = this.getQueryAtom(predicate); + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(queryAtom); + try (final TermQueryResultIterator answers = this.vLog.query(vLogAtom, true, false)) { + while (answers.hasNext()) { + final karmaresearch.vlog.Term[] vlogTerms = answers.next(); + final List termList = VLogToModelConverter.toTermList(vlogTerms); + action.accept(predicate, termList); + } + } catch (final NotStartedException e) { + throw new RulewerkRuntimeException("Inconsistent reasoner state.", e); + } catch (final NonExistingPredicateException e1) { + throw new RulewerkRuntimeException("Inconsistent knowledge base state.", e1); + } + } + + this.logWarningOnCorrectness(this.correctness); + return this.correctness; + } + + private void logWarningOnCorrectness(final Correctness correctness) { + if (correctness != Correctness.SOUND_AND_COMPLETE) { + LOGGER.warn("Query answers may be {} with respect to the current Knowledge Base!", this.correctness); + } + } + + @Override + public void resetReasoner() { + this.validateNotClosed(); + this.reasonerState = ReasonerState.KB_NOT_LOADED; + this.vLog.stop(); + LOGGER.info("Reasoner has been reset. All inferences computed during reasoning have been discarded."); + } + + @Override + public void close() { + if (this.reasonerState == ReasonerState.CLOSED) { + LOGGER.info("Reasoner is already closed."); + } else { + this.reasonerState = ReasonerState.CLOSED; + this.knowledgeBase.deleteListener(this); + this.vLog.stop(); + LOGGER.info("Reasoner closed."); + } + } + + @Override + public void setLogLevel(final LogLevel logLevel) { + this.validateNotClosed(); + Validate.notNull(logLevel, "Log level cannot be null!"); + this.internalLogLevel = logLevel; + this.vLog.setLogLevel(ModelToVLogConverter.toVLogLogLevel(this.internalLogLevel)); + } + + @Override + public LogLevel getLogLevel() { + return this.internalLogLevel; + } + + @Override + public void setLogFile(final String filePath) { + this.validateNotClosed(); + this.vLog.setLogFile(filePath); + } + + @Override + public boolean isJA() { + return this.checkAcyclicity(AcyclicityNotion.JA); + } + + @Override + public boolean isRJA() { + return this.checkAcyclicity(AcyclicityNotion.RJA); + } + + @Override + public boolean isMFA() { + return this.checkAcyclicity(AcyclicityNotion.MFA); + } + + @Override + public boolean isRMFA() { + return this.checkAcyclicity(AcyclicityNotion.RMFA); + } + + @Override + public boolean isMFC() { + this.validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + try { + this.load(); + } catch (final IOException e) { // FIXME: quick fix for https://github.com/knowsys/rulewerk/issues/128 + throw new RulewerkRuntimeException(e); + } + } + + CyclicCheckResult checkCyclic; + try { + checkCyclic = this.vLog.checkCyclic("MFC"); + } catch (final NotStartedException e) { + throw new RulewerkRuntimeException(e.getMessage(), e); // should be impossible + } + return checkCyclic.equals(CyclicCheckResult.CYCLIC); + } + + @Override + public CyclicityResult checkForCycles() { + final boolean acyclic = this.isJA() || this.isRJA() || this.isMFA() || this.isRMFA(); + if (acyclic) { + return CyclicityResult.ACYCLIC; + } else { + final boolean cyclic = this.isMFC(); + if (cyclic) { + return CyclicityResult.CYCLIC; + } + return CyclicityResult.UNDETERMINED; + } + } + + @Override + public void onStatementsAdded(final List statementsAdded) { + // TODO more elaborate materialisation state handling + + this.updateReasonerToKnowledgeBaseChanged(); + + // updateCorrectnessOnStatementsAdded(statementsAdded); + this.updateCorrectnessOnStatementsAdded(); + } + + @Override + public void onStatementAdded(final Statement statementAdded) { + // TODO more elaborate materialisation state handling + + this.updateReasonerToKnowledgeBaseChanged(); + + // updateCorrectnessOnStatementAdded(statementAdded); + this.updateCorrectnessOnStatementsAdded(); + } + + @Override + public void onStatementRemoved(final Statement statementRemoved) { + this.updateReasonerToKnowledgeBaseChanged(); + this.updateCorrectnessOnStatementsRemoved(); + } + + @Override + public void onStatementsRemoved(final List statementsRemoved) { + this.updateReasonerToKnowledgeBaseChanged(); + this.updateCorrectnessOnStatementsRemoved(); + } + + Set getKnowledgeBasePredicates() { + final Set toBeQueriedHeadPredicates = new HashSet<>(); + for (final Rule rule : this.knowledgeBase.getRules()) { + for (final Literal literal : rule.getHead()) { + toBeQueriedHeadPredicates.add(literal.getPredicate()); + } + } + for (final DataSourceDeclaration dataSourceDeclaration : this.knowledgeBase.getDataSourceDeclarations()) { + toBeQueriedHeadPredicates.add(dataSourceDeclaration.getPredicate()); + } + for (final Fact fact : this.knowledgeBase.getFacts()) { + toBeQueriedHeadPredicates.add(fact.getPredicate()); + } + return toBeQueriedHeadPredicates; + } + + private PositiveLiteral getQueryAtom(final Predicate predicate) { + final List toBeGroundedVariables = new ArrayList<>(predicate.getArity()); + for (int i = 0; i < predicate.getArity(); i++) { + toBeGroundedVariables.add(Expressions.makeUniversalVariable("X" + i)); + } + return Expressions.makePositiveLiteral(predicate, toBeGroundedVariables); + } + + private boolean checkAcyclicity(final AcyclicityNotion acyclNotion) { + this.validateNotClosed(); + if (this.reasonerState == ReasonerState.KB_NOT_LOADED) { + try { + this.load(); + } catch (final IOException e) { // FIXME: quick fix for https://github.com/knowsys/rulewerk/issues/128 + throw new RulewerkRuntimeException(e); + } + } + + CyclicCheckResult checkCyclic; + try { + checkCyclic = this.vLog.checkCyclic(acyclNotion.name()); + } catch (final NotStartedException e) { + throw new RulewerkRuntimeException(e.getMessage(), e); // should be impossible + } + return checkCyclic.equals(CyclicCheckResult.NON_CYCLIC); + } + + private void updateReasonerToKnowledgeBaseChanged() { + if (this.reasonerState.equals(ReasonerState.KB_LOADED) + || this.reasonerState.equals(ReasonerState.MATERIALISED)) { + + this.reasonerState = ReasonerState.KB_CHANGED; + } + } + + private void updateCorrectnessOnStatementsAdded() { + if (this.reasonerState == ReasonerState.KB_CHANGED) { + // TODO refine + this.correctness = Correctness.INCORRECT; + } + } + + private void updateCorrectnessOnStatementsRemoved() { + if (this.reasonerState == ReasonerState.KB_CHANGED) { + // TODO refine + this.correctness = Correctness.INCORRECT; + } + } + + /** + * Check if reasoner is closed and throw an exception if it is. + * + * @throws ReasonerStateException + */ + void validateNotClosed() throws ReasonerStateException { + if (this.reasonerState == ReasonerState.CLOSED) { + LOGGER.error("Invalid operation requested on a closed reasoner object!"); + throw new ReasonerStateException(this.reasonerState, "Operation not allowed after closing reasoner!"); + } + } + + ReasonerState getReasonerState() { + return this.reasonerState; + } + + void setReasonerState(final ReasonerState reasonerState) { + this.reasonerState = reasonerState; + } +} diff --git a/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java new file mode 100644 index 000000000..0824c0d73 --- /dev/null +++ b/rulewerk-vlog/src/main/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverter.java @@ -0,0 +1,139 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/* + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.util.ArrayList; +import java.util.List; + +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.DatatypeConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.reasoner.implementation.QueryResultImpl; + +/** + * Utility class with static methods for converting from VLog internal model + * ({@code karmaresearch.vlog} objects) to VLog API model + * ({@code org.semanticweb.rulewerk.core.model.api}) objects. + * + * @author Irina Dragoste + * + */ +class VLogToModelConverter { + + /** + * Converts internal VLog query results (represented as arrays of + * {@link karmaresearch.vlog.Term}s) into VLog model API QueryResults. + * + * @param vLogQueryResult an array of terms that represent an answer to a query. + * @return a QueryResult containing the corresponding {@code vLogQueryResult} as + * a List of {@link Term}s. + */ + static QueryResult toQueryResult(karmaresearch.vlog.Term[] vLogQueryResult) { + return new QueryResultImpl(toTermList(vLogQueryResult)); + } + + /** + * Converts an array of internal VLog terms ({@link karmaresearch.vlog.Term}) + * into the corresponding list of VLog API model {@link Term}. + * + * @param vLogTerms input terms array, to be converted to a list of + * corresponding {@link Term}s. + * @return list of {@link Term}s, where each element corresponds to the element + * in given {@code vLogTerms} at the same position. + */ + static List toTermList(karmaresearch.vlog.Term[] vLogTerms) { + final List terms = new ArrayList<>(vLogTerms.length); + for (final karmaresearch.vlog.Term vLogTerm : vLogTerms) { + terms.add(toTerm(vLogTerm)); + } + return terms; + } + + /** + * Converts an internal VLog term ({@link karmaresearch.vlog.Term}) to a VLog + * API model {@link Term} of the same type and name. + * + * @param vLogTerm term to be converted + * @return a ({@link karmaresearch.vlog.Term}) with the same name as given + * {@code vLogTerm} and of the corresponding type. + */ + static Term toTerm(karmaresearch.vlog.Term vLogTerm) { + final String name = vLogTerm.getName(); + switch (vLogTerm.getTermType()) { + case CONSTANT: + return toConstant(name); + case BLANK: + return new NamedNullImpl(name); + case VARIABLE: + throw new IllegalArgumentException( + "VLog variables cannot be converted without knowing if they are universally or existentially quantified."); + default: + throw new IllegalArgumentException("Unexpected VLog term type: " + vLogTerm.getTermType()); + } + } + + /** + * Creates a {@link Constant} from the given VLog constant name. + * + * @param vLogConstantName the string name used by VLog + * @return {@link Constant} object + */ + static Constant toConstant(String vLogConstantName) { + final Constant constant; + if (vLogConstantName.charAt(0) == '<' && vLogConstantName.charAt(vLogConstantName.length() - 1) == '>') { + // strip <> off of IRIs + constant = new AbstractConstantImpl(vLogConstantName.substring(1, vLogConstantName.length() - 1)); + } else if (vLogConstantName.charAt(0) == '"') { + if (vLogConstantName.charAt(vLogConstantName.length() - 1) == '>') { + final int startTypeIdx = vLogConstantName.lastIndexOf('<', vLogConstantName.length() - 2); + final String datatype = vLogConstantName.substring(startTypeIdx + 1, vLogConstantName.length() - 1); + final String lexicalValue = vLogConstantName.substring(1, startTypeIdx - 3); + constant = new DatatypeConstantImpl(lexicalValue, datatype); + } else { + final int startTypeIdx = vLogConstantName.lastIndexOf('@', vLogConstantName.length() - 2); + if (startTypeIdx > -1) { + final String languageTag = vLogConstantName.substring(startTypeIdx + 1, vLogConstantName.length()); + final String string = vLogConstantName.substring(1, startTypeIdx - 1); + constant = new LanguageStringConstantImpl(string, languageTag); + } else if (vLogConstantName.charAt(vLogConstantName.length() - 1) == '"' + && vLogConstantName.length() > 1) { + // This is already an unexpceted case. Untyped strings "constant" should not + // occur. But if they do, this is our best guess on how to interpret them. + constant = new DatatypeConstantImpl(vLogConstantName.substring(1, vLogConstantName.length() - 1), + PrefixDeclarationRegistry.XSD_STRING); + } else { + throw new RulewerkRuntimeException("VLog returned a constant name '" + vLogConstantName + + "' that Rulewerk cannot make sense of."); + } + } + } else { + constant = new AbstractConstantImpl(vLogConstantName); + } + return constant; + } + +} diff --git a/vlog4j-core/src/test/data/input/binaryFacts.csv b/rulewerk-vlog/src/test/data/input/binaryFacts.csv similarity index 100% rename from vlog4j-core/src/test/data/input/binaryFacts.csv rename to rulewerk-vlog/src/test/data/input/binaryFacts.csv diff --git a/vlog4j-core/src/test/data/input/constantD.csv b/rulewerk-vlog/src/test/data/input/constantD.csv similarity index 100% rename from vlog4j-core/src/test/data/input/constantD.csv rename to rulewerk-vlog/src/test/data/input/constantD.csv diff --git a/rulewerk-vlog/src/test/data/input/empty.csv b/rulewerk-vlog/src/test/data/input/empty.csv new file mode 100644 index 000000000..e69de29bb diff --git a/rulewerk-vlog/src/test/data/input/invalidFormatNtFacts.nt b/rulewerk-vlog/src/test/data/input/invalidFormatNtFacts.nt new file mode 100644 index 000000000..86e6232df --- /dev/null +++ b/rulewerk-vlog/src/test/data/input/invalidFormatNtFacts.nt @@ -0,0 +1,2 @@ +

. + . diff --git a/rulewerk-vlog/src/test/data/input/ternaryFacts.nt b/rulewerk-vlog/src/test/data/input/ternaryFacts.nt new file mode 100644 index 000000000..427f86fb9 --- /dev/null +++ b/rulewerk-vlog/src/test/data/input/ternaryFacts.nt @@ -0,0 +1,2 @@ + . + "test string" . \ No newline at end of file diff --git a/rulewerk-vlog/src/test/data/input/ternaryFactsZipped.nt.gz b/rulewerk-vlog/src/test/data/input/ternaryFactsZipped.nt.gz new file mode 100644 index 000000000..749fda951 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/ternaryFactsZipped.nt.gz differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/kbstats b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/kbstats new file mode 100644 index 000000000..4fecafaf2 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/kbstats differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p0/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p0/0 new file mode 100644 index 000000000..d6db588e8 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p0/0 differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p0/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p0/0.idx new file mode 100644 index 000000000..e80ed9ccb Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p0/0.idx differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p1/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p1/0 new file mode 100644 index 000000000..a903574af Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p1/0 differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p1/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p1/0.idx new file mode 100644 index 000000000..ed5f2a0b2 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p1/0.idx differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p2/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p2/0 new file mode 100644 index 000000000..6bebb85a7 --- /dev/null +++ b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p2/0 @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p2/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p2/0.idx new file mode 100644 index 000000000..587e420f8 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p2/0.idx differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p3/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p3/0 new file mode 100644 index 000000000..d825e1ad7 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p3/0 differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p3/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p3/0.idx new file mode 100644 index 000000000..e80ed9ccb Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p3/0.idx differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p4/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p4/0 new file mode 100644 index 000000000..15294a501 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p4/0 differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p4/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p4/0.idx new file mode 100644 index 000000000..ed5f2a0b2 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p4/0.idx differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p5/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p5/0 new file mode 100644 index 000000000..938838043 --- /dev/null +++ b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p5/0 @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p5/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p5/0.idx new file mode 100644 index 000000000..587e420f8 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/p5/0.idx differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/0 new file mode 100644 index 000000000..15b24cdc3 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/0 differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/idx new file mode 100644 index 000000000..6fce09e5e Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/idx differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/tree b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/tree new file mode 100644 index 000000000..c9426b2a5 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/_sample/tree/tree differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/0 new file mode 100644 index 000000000..52c0cfce6 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/0 differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/idx new file mode 100644 index 000000000..1e2cec818 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/idx differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/sb b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/sb new file mode 100644 index 000000000..4c0edb8f3 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/sb differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/sb.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/sb.idx new file mode 100644 index 000000000..8e4251486 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/sb.idx differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/tree b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/tree new file mode 100644 index 000000000..c9426b2a5 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/dict/0/tree differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/invdict/0/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/invdict/0/0 new file mode 100644 index 000000000..b2f1913e6 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/invdict/0/0 differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/invdict/0/idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/invdict/0/idx new file mode 100644 index 000000000..373449e87 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/invdict/0/idx differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/invdict/0/tree b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/invdict/0/tree new file mode 100644 index 000000000..c9426b2a5 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/invdict/0/tree differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/kbstats b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/kbstats new file mode 100644 index 000000000..1ee9a0536 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/kbstats differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p0/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p0/0 new file mode 100644 index 000000000..36efe2295 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p0/0 differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p0/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p0/0.idx new file mode 100644 index 000000000..e80ed9ccb Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p0/0.idx differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p1/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p1/0 new file mode 100644 index 000000000..9c47accf3 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p1/0 differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p1/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p1/0.idx new file mode 100644 index 000000000..28fe59636 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p1/0.idx differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p2/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p2/0 new file mode 100644 index 000000000..fe6a93a9d --- /dev/null +++ b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p2/0 @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p2/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p2/0.idx new file mode 100644 index 000000000..b5b5e052c Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p2/0.idx differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p3/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p3/0 new file mode 100644 index 000000000..51ab423f0 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p3/0 differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p3/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p3/0.idx new file mode 100644 index 000000000..e80ed9ccb Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p3/0.idx differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p4/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p4/0 new file mode 100644 index 000000000..01cbafe4d Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p4/0 differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p4/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p4/0.idx new file mode 100644 index 000000000..28fe59636 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p4/0.idx differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p5/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p5/0 new file mode 100644 index 000000000..7926fc053 --- /dev/null +++ b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p5/0 @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p5/0.idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p5/0.idx new file mode 100644 index 000000000..b5b5e052c Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/p5/0.idx differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/0 b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/0 new file mode 100644 index 000000000..0635f5470 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/0 differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/idx b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/idx new file mode 100644 index 000000000..f8cf365b4 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/idx differ diff --git a/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/tree b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/tree new file mode 100644 index 000000000..c9426b2a5 Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/tridentTernaryFacts/tree/tree differ diff --git a/vlog4j-core/src/test/data/input/unaryFacts.csv b/rulewerk-vlog/src/test/data/input/unaryFacts.csv similarity index 100% rename from vlog4j-core/src/test/data/input/unaryFacts.csv rename to rulewerk-vlog/src/test/data/input/unaryFacts.csv diff --git a/rulewerk-vlog/src/test/data/input/unaryFactsCD.csv b/rulewerk-vlog/src/test/data/input/unaryFactsCD.csv new file mode 100644 index 000000000..18ebd8521 --- /dev/null +++ b/rulewerk-vlog/src/test/data/input/unaryFactsCD.csv @@ -0,0 +1,2 @@ +c +d diff --git a/rulewerk-vlog/src/test/data/input/unaryFactsZipped.csv.gz b/rulewerk-vlog/src/test/data/input/unaryFactsZipped.csv.gz new file mode 100644 index 000000000..1222c03fa Binary files /dev/null and b/rulewerk-vlog/src/test/data/input/unaryFactsZipped.csv.gz differ diff --git a/rulewerk-vlog/src/test/data/output/.keep b/rulewerk-vlog/src/test/data/output/.keep new file mode 100644 index 000000000..e69de29bb diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/AddDataSourceTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/AddDataSourceTest.java new file mode 100644 index 000000000..5b329f9ba --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/AddDataSourceTest.java @@ -0,0 +1,270 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.junit.Ignore; +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; + +public class AddDataSourceTest { + + private static final String CSV_FILE_c1_c2_PATH = FileDataSourceTestUtils.INPUT_FOLDER + "unaryFacts.csv"; + + private static final String CSV_FILE_c_d_PATH = FileDataSourceTestUtils.INPUT_FOLDER + "unaryFactsCD.csv"; + + private final Set> csvFile_c1_c2_Content = new HashSet<>( + Arrays.asList(Arrays.asList(Expressions.makeAbstractConstant("c1")), + Arrays.asList(Expressions.makeAbstractConstant("c2")))); + + private final Set> csvFile_c_d_Content = new HashSet<>( + Arrays.asList(Arrays.asList(Expressions.makeAbstractConstant("c")), + Arrays.asList(Expressions.makeAbstractConstant("d"))));; + + @Test + public void testAddDataSourceExistentDataForDifferentPredicates() throws IOException { + final Predicate predicateParity1 = Expressions.makePredicate("p", 1); + final Constant constantA = Expressions.makeAbstractConstant("a"); + final Fact factPredicatePArity2 = Expressions.makeFact("p", Arrays.asList(constantA, constantA)); + final Fact factPredicateQArity1 = Expressions.makeFact("q", Arrays.asList(constantA)); + final Predicate predicateLArity1 = Expressions.makePredicate("l", 1); + final DataSource dataSource = new CsvFileDataSource(CSV_FILE_c1_c2_PATH); + + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(factPredicatePArity2); + kb.addStatement(factPredicateQArity1); + kb.addStatement(new DataSourceDeclarationImpl(predicateLArity1, dataSource)); + kb.addStatement(new DataSourceDeclarationImpl(predicateParity1, dataSource)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.reason(); + try (final QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicateLArity1, Expressions.makeUniversalVariable("x")), false)) { + assertEquals(this.csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); + } + try (final QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicateParity1, Expressions.makeUniversalVariable("x")), false)) { + assertEquals(this.csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); + } + + } + } + + @Test + public void testAddDataSourceBeforeLoading() throws IOException { + final Predicate predicateP = Expressions.makePredicate("p", 1); + final Predicate predicateQ = Expressions.makePredicate("q", 1); + final DataSource dataSource = new CsvFileDataSource(CSV_FILE_c1_c2_PATH); + + final KnowledgeBase kb = new KnowledgeBase(); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + kb.addStatement(new DataSourceDeclarationImpl(predicateP, dataSource)); + kb.addStatement(new DataSourceDeclarationImpl(predicateQ, dataSource)); + reasoner.load(); + try (final QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicateP, Expressions.makeUniversalVariable("x")), true)) { + assertEquals(this.csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); + } + try (final QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicateQ, Expressions.makeUniversalVariable("x")), true)) { + assertEquals(this.csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); + } + + } + } + + @Test + public void testAddDataSourceAfterLoading() throws IOException { + final Predicate predicateP = Expressions.makePredicate("p", 1); + final Predicate predicateQ = Expressions.makePredicate("q", 1); + final DataSource dataSource = new CsvFileDataSource(CSV_FILE_c1_c2_PATH); + + final KnowledgeBase kb = new KnowledgeBase(); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + + kb.addStatement(new DataSourceDeclarationImpl(predicateP, dataSource)); + + reasoner.load(); + + kb.addStatement(new DataSourceDeclarationImpl(predicateQ, dataSource)); + + try (final QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicateP, Expressions.makeUniversalVariable("x")), true)) { + assertEquals(this.csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(Correctness.INCORRECT, queryResult.getCorrectness()); + } + + // there is no fact for predicate Q loaded in the reasoner + try (final QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicateQ, Expressions.makeUniversalVariable("x")), true)) { + assertFalse(queryResult.hasNext()); + assertEquals(Correctness.SOUND_BUT_INCOMPLETE, queryResult.getCorrectness()); + } + } + } + + @Test + public void testAddDataSourceAfterReasoning() throws IOException { + final Predicate predicateP = Expressions.makePredicate("p", 1); + final Predicate predicateQ = Expressions.makePredicate("q", 1); + final DataSource dataSource = new CsvFileDataSource(CSV_FILE_c1_c2_PATH); + + final KnowledgeBase kb = new KnowledgeBase(); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + + kb.addStatement(new DataSourceDeclarationImpl(predicateP, dataSource)); + + reasoner.reason(); + + kb.addStatement(new DataSourceDeclarationImpl(predicateQ, dataSource)); + + try (final QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicateP, Expressions.makeUniversalVariable("x")), true)) { + assertEquals(this.csvFile_c1_c2_Content, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(Correctness.INCORRECT, queryResult.getCorrectness()); + } +// there is no fact for predicate Q loaded in the reasoner + try (final QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicateQ, Expressions.makeUniversalVariable("x")), true)) { + assertFalse(queryResult.hasNext()); + assertEquals(Correctness.SOUND_BUT_INCOMPLETE, queryResult.getCorrectness()); + } + } + } + + // FIXME decide how to handle datasources with multiple predicates + @Ignore + @Test + public void testAddDataSourceNoMultipleDataSourcesForPredicate() throws IOException { + final Predicate predicate = Expressions.makePredicate("p", 1); + final DataSource dataSource1 = new CsvFileDataSource(CSV_FILE_c1_c2_PATH); + final DataSource dataSource2 = new CsvFileDataSource(CSV_FILE_c_d_PATH); + + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource1)); + kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource2)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + try (QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicate, Expressions.makeUniversalVariable("x")), true)) { + System.out.println(QueryResultsUtils.collectQueryResults(queryResult)); + } + } + } + + // FIXME decide how to handle datasources with multiple predicates + @Ignore + @Test + public void testAddDataSourceNoFactsForPredicate() throws IOException { + final Predicate predicate = Expressions.makePredicate("p", 1); + final DataSource dataSource = new CsvFileDataSource(CSV_FILE_c1_c2_PATH); + final Fact fact = Expressions.makeFact(Expressions.makePredicate("p", 1), + Arrays.asList(Expressions.makeAbstractConstant("a"))); + + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(fact); + kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + try (QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicate, Expressions.makeUniversalVariable("x")), true)) { + QueryResultsUtils.collectQueryResults(queryResult); + } + } + } + + @Test + public void testAddMultipleDataSourcesForPredicateAfterReasoning() throws IOException { + final Predicate predicate = Expressions.makePredicate("p", 1); + final DataSource dataSource1 = new CsvFileDataSource(CSV_FILE_c1_c2_PATH); + final DataSource dataSource2 = new CsvFileDataSource(FileDataSourceTestUtils.INPUT_FOLDER + "unaryFactsCD.csv"); + + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource1)); + kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource2)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + try (QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicate, Expressions.makeUniversalVariable("x")), true)) { + final Set> expectedAnswers = new HashSet<>(this.csvFile_c1_c2_Content); + expectedAnswers.addAll(this.csvFile_c_d_Content); + + assertEquals(expectedAnswers, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); + + } + } + } + + @Test + public void testAddDataSourceAndFactsForPredicateAfterReasoning() throws IOException { + final Predicate predicate = Expressions.makePredicate("p", 1); + final DataSource dataSource = new CsvFileDataSource(CSV_FILE_c1_c2_PATH); + final Fact fact = Expressions.makeFact(Expressions.makePredicate("p", 1), + Arrays.asList(Expressions.makeAbstractConstant("a"))); + + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(fact); + kb.addStatement(new DataSourceDeclarationImpl(predicate, dataSource)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + try (QueryResultIterator queryResult = reasoner.answerQuery( + Expressions.makePositiveLiteral(predicate, Expressions.makeUniversalVariable("x")), true)) { + final Set> expectedAnswers = new HashSet<>(this.csvFile_c1_c2_Content); + expectedAnswers.add(Arrays.asList(Expressions.makeAbstractConstant("a"))); + + assertEquals(expectedAnswers, QueryResultsUtils.collectQueryResults(queryResult)); + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); + } + } + } + +} diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/AnswerQueryTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/AnswerQueryTest.java new file mode 100644 index 000000000..30d844c3c --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/AnswerQueryTest.java @@ -0,0 +1,341 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertTrue; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import org.junit.Assert; +import org.junit.Test; +import org.mockito.internal.util.collections.Sets; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.TermType; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; + +public class AnswerQueryTest { + + @Test + public void testEDBQuerySameConstantSubstitutesSameVariableName() throws IOException { + final String predicate = "p"; + final Constant constantC = Expressions.makeAbstractConstant("c"); + final Constant constantD = Expressions.makeAbstractConstant("d"); + final Variable x = Expressions.makeUniversalVariable("X"); + final Variable y = Expressions.makeUniversalVariable("Y"); + final Variable z = Expressions.makeUniversalVariable("Z"); + final Fact fact = Expressions.makeFact(predicate, Arrays.asList(constantC, constantC, constantD)); + + final boolean includeBlanks = false; + final Set> factCCD = Collections.singleton(Arrays.asList(constantC, constantC, constantD)); + + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(fact); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + + final PositiveLiteral queryAtomXYZ = Expressions.makePositiveLiteral(predicate, x, y, z); + try (final QueryResultIterator queryResultIteratorXYZ = reasoner.answerQuery(queryAtomXYZ, includeBlanks)) { + final Set> queryResultsXYZ = QueryResultsUtils.collectQueryResults(queryResultIteratorXYZ); + assertEquals(factCCD, queryResultsXYZ); + } + + final PositiveLiteral queryAtomXXZ = Expressions.makePositiveLiteral(predicate, x, x, z); + try (final QueryResultIterator queryResultIteratorXXZ = reasoner.answerQuery(queryAtomXXZ, includeBlanks)) { + final Set> queryResultsXXZ = QueryResultsUtils.collectQueryResults(queryResultIteratorXXZ); + assertEquals(factCCD, queryResultsXXZ); + } + + final PositiveLiteral queryAtomXXX = Expressions.makePositiveLiteral(predicate, x, x, x); + try (final QueryResultIterator queryResultIteratorXXX = reasoner.answerQuery(queryAtomXXX, includeBlanks)) { + assertFalse(queryResultIteratorXXX.hasNext()); + } + + final PositiveLiteral queryAtomXYX = Expressions.makePositiveLiteral(predicate, x, y, x); + try (final QueryResultIterator queryResultIteratorXYX = reasoner.answerQuery(queryAtomXYX, includeBlanks)) { + + assertFalse(queryResultIteratorXYX.hasNext()); + } + } + } + + @Test + public void testIDBQuerySameBlankSubstitutesSameVariableName() throws IOException { + final String predicate = "p"; + final Variable x = Expressions.makeUniversalVariable("X"); + final Variable y = Expressions.makeExistentialVariable("Y"); + final Variable z = Expressions.makeExistentialVariable("Z"); + final PositiveLiteral pYY = Expressions.makePositiveLiteral(predicate, y, y); + final PositiveLiteral pYZ = Expressions.makePositiveLiteral(predicate, y, z); + final Rule pX__pYY_pYZ = Expressions.makeRule(Expressions.makePositiveConjunction(pYY, pYZ), + Expressions.makeConjunction(Expressions.makePositiveLiteral(predicate, x))); + assertEquals(Sets.newSet(y, z), pX__pYY_pYZ.getExistentialVariables().collect(Collectors.toSet())); + + final KnowledgeBase kb = new KnowledgeBase(); + + kb.addStatements(pX__pYY_pYZ); + kb.addStatement(Expressions.makeFact(predicate, Arrays.asList(Expressions.makeAbstractConstant("c")))); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); + reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES); + reasoner.load(); + reasoner.reason(); + + // expected p(_:b1, _:b1), p(_:b1, _:b2) + try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(pYZ, true)) { + final Set> queryResults = QueryResultsUtils.collectQueryResults(queryResultIterator); + assertTrue(queryResults.size() == 2); + final ArrayList> queryResultsArray = new ArrayList<>(queryResults); + assertEquals(queryResultsArray.get(0).get(0), queryResultsArray.get(1).get(0)); // y + assertNotEquals(queryResultsArray.get(0).get(1), queryResultsArray.get(1).get(1)); // y, z + } + + // expected p(_:b1, _:b1) + try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(pYY, true)) { + final Set> queryResults = QueryResultsUtils.collectQueryResults(queryResultIterator); + assertTrue(queryResults.size() == 1); + final ArrayList> queryResultsArray = new ArrayList<>(queryResults); + assertEquals(queryResultsArray.get(0).get(0), queryResultsArray.get(0).get(1)); // y + } + } + } + + @Test + public void testIDBQuerySameIndividualSubstitutesSameVariableName() throws IOException { + final String predicate = "p"; + final Variable x = Expressions.makeUniversalVariable("X"); + final Variable y = Expressions.makeUniversalVariable("Y"); + final Variable z = Expressions.makeExistentialVariable("Z"); + final Variable t = Expressions.makeExistentialVariable("T"); + final PositiveLiteral pXYYZZT = Expressions.makePositiveLiteral(predicate, x, y, y, z, z, t); + final Rule pXY__pXYYZZT = Expressions.makeRule(pXYYZZT, Expressions.makePositiveLiteral(predicate, x, y)); + assertEquals(Sets.newSet(z, t), pXY__pXYYZZT.getExistentialVariables().collect(Collectors.toSet())); + final Constant constantC = Expressions.makeAbstractConstant("c"); + final Constant constantD = Expressions.makeAbstractConstant("d"); + + final Fact factPcd = Expressions.makeFact(predicate, Arrays.asList(constantC, constantD)); + + final KnowledgeBase kb = new KnowledgeBase(); + + kb.addStatements(pXY__pXYYZZT, factPcd); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.reason(); + + final PositiveLiteral queryAtomXYYZZT = pXYYZZT; + try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtomXYYZZT, true)) { + assertTrue(queryResultIterator.hasNext()); + final List queryResultTerms = queryResultIterator.next().getTerms(); + assertEquals(6, queryResultTerms.size()); + + assertEquals(constantC, queryResultTerms.get(0)); // x + assertEquals(constantD, queryResultTerms.get(1)); // y + assertEquals(constantD, queryResultTerms.get(2)); // y + + final Term blankForZ = queryResultTerms.get(3); // z + assertEquals(TermType.NAMED_NULL, blankForZ.getType()); + assertEquals(blankForZ, queryResultTerms.get(4)); // z + + final Term blankForT = queryResultTerms.get(5); // t + assertEquals(TermType.NAMED_NULL, blankForT.getType()); + + assertNotEquals(queryResultTerms.get(4), blankForT); // z, t + + assertFalse(queryResultIterator.hasNext()); + } + + // x and y do not have the same constant substitution + final PositiveLiteral queryAtomXXYZZT = Expressions.makePositiveLiteral(predicate, x, x, y, z, z, t); + try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtomXXYZZT, true)) { + assertFalse(queryResultIterator.hasNext()); + } + // z and t do not have the same blank substitution + final PositiveLiteral queryAtomXYYZZZ = Expressions.makePositiveLiteral(predicate, x, y, y, z, z, z); + try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtomXYYZZZ, true)) { + assertFalse(queryResultIterator.hasNext()); + } + // universal and existential variables do not have the same substitution + // y and z do not have the same constant substitution + final PositiveLiteral queryAtomXYYYZT = Expressions.makePositiveLiteral(predicate, x, y, y, y, z, t); + try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtomXYYYZT, true)) { + assertFalse(queryResultIterator.hasNext()); + } + + // y and t do not have the same constant substitution + final PositiveLiteral queryAtomXYYZZY = Expressions.makePositiveLiteral(predicate, x, y, y, z, z, y); + try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtomXYYZZY, true)) { + assertFalse(queryResultIterator.hasNext()); + } + + } + } + + @Test + public void queryResultWithBlanks() throws IOException { + final Variable vx = Expressions.makeUniversalVariable("x"); + final Variable vy = Expressions.makeExistentialVariable("y"); + // P(x) -> Q(y) + final Rule existentialRule = Expressions.makeRule(Expressions.makePositiveLiteral("q", vy), + Expressions.makePositiveLiteral("p", vx)); + assertEquals(Sets.newSet(vy), existentialRule.getExistentialVariables().collect(Collectors.toSet())); + final Constant constantC = Expressions.makeAbstractConstant("c"); + final Fact fact = Expressions.makeFact("p", Arrays.asList(constantC)); + final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("q", Expressions.makeUniversalVariable("?x")); + + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(existentialRule, fact); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.reason(); + + try (final QueryResultIterator queryResultIteratorIncludeBlanks = reasoner.answerQuery(queryAtom, true)) { + assertTrue(queryResultIteratorIncludeBlanks.hasNext()); + final QueryResult queryResult = queryResultIteratorIncludeBlanks.next(); + assertTrue(queryResult.getTerms().size() == 1); + final Term queryResultTerm = queryResult.getTerms().get(0); + assertEquals(TermType.NAMED_NULL, queryResultTerm.getType()); + assertFalse(queryResultIteratorIncludeBlanks.hasNext()); + } + + try (final QueryResultIterator queryResultIteratorExcludeBlanks = reasoner.answerQuery(queryAtom, false)) { + assertFalse(queryResultIteratorExcludeBlanks.hasNext()); + } + } + } + + @Test + public void queryEmptyKnowledgeBaseBeforeReasoning() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + + final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("P", + Expressions.makeUniversalVariable("?x")); + try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtom, true)) { + final Set> queryResults = QueryResultsUtils.collectQueryResults(queryResultIterator); + assertEquals(Collections.EMPTY_SET, queryResults); + } + } + } + + @Test + public void queryEmptyKnowledgeBaseAfterReasoning() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + + reasoner.reason(); + + final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("P", + Expressions.makeUniversalVariable("?x")); + try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtom, true)) { + final Set> queryResults = QueryResultsUtils.collectQueryResults(queryResultIterator); + assertEquals(Collections.EMPTY_SET, queryResults); + } + } + } + + @Test + public void queryEmptyRules() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + final Fact fact = Expressions.makeFact("P", Arrays.asList(Expressions.makeAbstractConstant("c"))); + kb.addStatement(fact); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + + final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("P", + Expressions.makeUniversalVariable("?x")); + + reasoner.reason(); + + try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtom, true)) { + final Set> queryResults = QueryResultsUtils.collectQueryResults(queryResultIterator); + final Set> expectedQueryResults = Collections + .singleton(Arrays.asList(Expressions.makeAbstractConstant("c"))); + assertEquals(expectedQueryResults, queryResults); + } + } + } + + @Test + public void queryEmptyFacts() throws IOException { + final Variable vx = Expressions.makeUniversalVariable("x"); + final Rule rule = Expressions.makeRule(Expressions.makePositiveLiteral("q", vx), + Expressions.makePositiveLiteral("p", vx)); + + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(rule); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + + final PositiveLiteral queryAtom1 = Expressions.makePositiveLiteral("p", + Expressions.makeUniversalVariable("?x")); + try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtom1, true)) { + Assert.assertFalse(queryResultIterator.hasNext()); + queryResultIterator.close(); + } + + final PositiveLiteral queryAtom2 = Expressions.makePositiveLiteral("q", + Expressions.makeUniversalVariable("?x")); + try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtom2, true)) { + Assert.assertFalse(queryResultIterator.hasNext()); + queryResultIterator.close(); + } + + reasoner.reason(); + + try (final QueryResultIterator queryResultIteratorAfterReason = reasoner.answerQuery(queryAtom1, true)) { + assertFalse(queryResultIteratorAfterReason.hasNext()); + } + + try (final QueryResultIterator queryResultIteratorAfterReason = reasoner.answerQuery(queryAtom2, true)) { + assertFalse(queryResultIteratorAfterReason.hasNext()); + } + } + } + +} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/ExportQueryResultToCsvTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ExportQueryResultToCsvFileTest.java similarity index 71% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/ExportQueryResultToCsvTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ExportQueryResultToCsvFileTest.java index b2bb43fc5..3619ce9c8 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/ExportQueryResultToCsvTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ExportQueryResultToCsvFileTest.java @@ -1,10 +1,10 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * VLog4j Core Components + * Rulewerk VLog Reasoner Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -27,41 +27,40 @@ import java.util.List; import org.junit.Test; -import org.semanticweb.vlog4j.core.reasoner.CsvFileUtils; import karmaresearch.vlog.Atom; import karmaresearch.vlog.EDBConfigurationException; +import karmaresearch.vlog.NonExistingPredicateException; import karmaresearch.vlog.NotStartedException; import karmaresearch.vlog.VLog; -public class ExportQueryResultToCsvTest { - private static final String CSV_EXPORT_FOLDER = "src/test/data/output/"; +public class ExportQueryResultToCsvFileTest { @Test - public void testExportUnaryPredicateFacts() throws EDBConfigurationException, NotStartedException, IOException { + public void testExportUnaryPredicateFacts() throws EDBConfigurationException, NotStartedException, NonExistingPredicateException, IOException { final String[][] argsAMatrix = { { "c1" }, { "c2" } }; final List> expectedQueryResult = Arrays.asList(Arrays.asList("c1"), Arrays.asList("c2")); final VLog vLog = new VLog(); vLog.addData("p", argsAMatrix); - final String csvFilePath = CSV_EXPORT_FOLDER + "unaryFacts.csv"; + final String csvFilePath = FileDataSourceTestUtils.OUTPUT_FOLDER + "unaryFacts.csv"; vLog.writeQueryResultsToCsv(new Atom("p", VLogExpressions.makeVariable("x")), csvFilePath); - final List> queryResult = CsvFileUtils.getCSVContent(csvFilePath); + final List> queryResult = FileDataSourceTestUtils.getCSVContent(csvFilePath); assertEquals(expectedQueryResult, queryResult); } @Test - public void testExportBinaryPredicateFacts() throws EDBConfigurationException, NotStartedException, IOException { + public void testExportBinaryPredicateFacts() throws EDBConfigurationException, NotStartedException, NonExistingPredicateException, IOException { final String[][] argsAMatrix = { { "c1", "c2" }, { "c3", "c4" } }; final List> expectedQueryResult = Arrays.asList(Arrays.asList("c1", "c2"), Arrays.asList("c3", "c4")); final VLog vLog = new VLog(); vLog.addData("p", argsAMatrix); - final String csvFilePath = CSV_EXPORT_FOLDER + "binaryFacts.csv"; + final String csvFilePath = FileDataSourceTestUtils.OUTPUT_FOLDER + "binaryFacts.csv"; vLog.writeQueryResultsToCsv(new Atom("p", VLogExpressions.makeVariable("x"), VLogExpressions.makeVariable("y")), csvFilePath); - final List> queryResult = CsvFileUtils.getCSVContent(csvFilePath); + final List> queryResult = FileDataSourceTestUtils.getCSVContent(csvFilePath); assertEquals(expectedQueryResult, queryResult); } diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/FileDataSourceTestUtils.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/FileDataSourceTestUtils.java new file mode 100644 index 000000000..73128188c --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/FileDataSourceTestUtils.java @@ -0,0 +1,127 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; + +import java.io.FileReader; +import java.io.IOException; +import java.io.Reader; +import java.util.ArrayList; +import java.util.List; + +import org.apache.commons.csv.CSVFormat; +import org.apache.commons.csv.CSVParser; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; + +/** + * Utility class for reading from and writing to data source files. + * + * @author Christian Lewe + * @author Irina Dragoste + * + */ +public final class FileDataSourceTestUtils { + + public static final String INPUT_FOLDER = "src/test/data/input/"; + public static final String OUTPUT_FOLDER = "src/test/data/output/"; + + public static final String unzippedUnaryCsvFileRoot = "unaryFacts"; + public static final String zippedUnaryCsvFileRoot = "unaryFactsZipped"; + public static final String unzippedNtFileRoot = "ternaryFacts"; + public static final String zippedNtFileRoot = "ternaryFactsZipped"; + public static final String binaryCsvFileNameRoot = "binaryFacts"; + public static final String invalidFormatNtFileNameRoot = "invalidFormatNtFacts"; + + /* + * This is a utility class. Therefore, it is best practice to do the following: + * (1) Make the class final, (2) make its constructor private, (3) make all its + * fields and methods static. This prevents the classes instantiation and + * inheritance. + */ + private FileDataSourceTestUtils() { + + } + + /** + * Collects the content of given {@code csvFile} into a List of lines, where + * each line is represented as a List of String entries. + * + * @param csvFile file to be read + * @return content of given {@code csvFile} as a List of lines, where each line + * is represented as a List of String entries. + * @throws IOException if an I/O error occurs regarding given {@code csvFile} + */ + public static List> getCSVContent(final String csvFile) throws IOException { + final List> content = new ArrayList<>(); + + final Reader in = new FileReader(csvFile); + final CSVParser parse = CSVFormat.DEFAULT.parse(in); + parse.forEach(csvRecord -> { + final List line = new ArrayList<>(); + csvRecord.forEach(line::add); + content.add(line); + }); + return content; + } + + public static void testConstructor(final FileDataSource fileDataSource, final String expectedFileName) throws IOException { + assertEquals(expectedFileName, fileDataSource.getName()); + } + + public static void testLoadEmptyFile(final Predicate predicate, final PositiveLiteral queryAtom, + final FileDataSource emptyFileDataSource) throws IOException { + + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(new DataSourceDeclarationImpl(predicate, emptyFileDataSource)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); + reasoner.reason(); + testNoFactsOverPredicate(reasoner, queryAtom); + + reasoner.resetReasoner(); + reasoner.load(); + reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); + reasoner.reason(); + testNoFactsOverPredicate(reasoner, queryAtom); + } + } + + public static void testNoFactsOverPredicate(final Reasoner reasoner, final PositiveLiteral queryAtom) { + try (final QueryResultIterator answerQuery = reasoner.answerQuery(queryAtom, true)) { + assertFalse(answerQuery.hasNext()); + } + try (final QueryResultIterator answerQuery = reasoner.answerQuery(queryAtom, false)) { + assertFalse(answerQuery.hasNext()); + } + } + +} diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/GeneratedAnonymousIndividualsTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/GeneratedAnonymousIndividualsTest.java new file mode 100644 index 000000000..87f5eb910 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/GeneratedAnonymousIndividualsTest.java @@ -0,0 +1,162 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + +import org.junit.Test; +import org.mockito.internal.util.collections.Sets; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; + +public class GeneratedAnonymousIndividualsTest { + + private static final String includeBlanksFilePath = FileDataSourceTestUtils.OUTPUT_FOLDER + "include_blanks.csv"; + private static final String excludeBlanksFilePath = FileDataSourceTestUtils.OUTPUT_FOLDER + "exclude_blanks.csv"; + + private static final Variable vx = Expressions.makeUniversalVariable("x"); + private static final Variable vy = Expressions.makeExistentialVariable("y"); + private static final Variable vz = Expressions.makeExistentialVariable("z"); + private static final String p = "p"; + + // rule: P(?x) -> P(?x,!y), P(?x,!z) + private static final Rule existentialRule = Expressions.makeRule( + Expressions.makePositiveConjunction(Expressions.makePositiveLiteral(p, vx, vy), + Expressions.makePositiveLiteral(p, vx, vz)), + Expressions.makeConjunction(Expressions.makePositiveLiteral(p, vx))); + + private static KnowledgeBase kb = new KnowledgeBase(); + // fact: P(c) + private static final Constant constantC = Expressions.makeAbstractConstant("c"); + private static final Fact fact = Expressions.makeFact(p, Arrays.asList(constantC)); + + // query: P(?x,?y) ? + final PositiveLiteral queryAtom = Expressions.makePositiveLiteral(p, Expressions.makeUniversalVariable("?x"), + Expressions.makeUniversalVariable("?y")); + + static { + // y,z existential variables that can introduce blanks (anonymous individuals) + assertEquals(Sets.newSet(vy, vz), existentialRule.getExistentialVariables().collect(Collectors.toSet())); + + kb.addStatements(existentialRule, fact); + } + + @Test + public void testBlanksSkolemChaseNoRuleRewrite() throws IOException { + + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); + assertEquals(RuleRewriteStrategy.NONE, reasoner.getRuleRewriteStrategy()); + + reasoner.reason(); + reasoner.exportQueryAnswersToCsv(this.queryAtom, includeBlanksFilePath, true); + + checkTwoDistinctBlanksGenerated(reasoner); + + } + } + + @Test + public void testBlanksSkolemChaseSplitHeadPieces() throws IOException { + + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); + // the rule {P(?x) -> P(?x,!y), P(?x,!z)} after split becomes: + // { {P(?x) -> P(?x,!y,!z)}, {P(?x,?y,?z) ->, P(?x,?y)}, {P(?x,?y,?z) ->, + // P(?x,?z)} } + reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES); + + reasoner.reason(); + reasoner.exportQueryAnswersToCsv(this.queryAtom, includeBlanksFilePath, true); + + checkTwoDistinctBlanksGenerated(reasoner); + + } + } + + @Test + public void testBlanksRestrictedChaseNoRuleRewrite() throws IOException { + + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); + assertEquals(RuleRewriteStrategy.NONE, reasoner.getRuleRewriteStrategy()); + + reasoner.reason(); + reasoner.exportQueryAnswersToCsv(this.queryAtom, includeBlanksFilePath, true); + + checkTwoDistinctBlanksGenerated(reasoner); + + } + } + + @Test + public void testBlanksRestrictedChaseSplitHeadPieces() throws IOException { + + try (final Reasoner reasoner = new VLogReasoner(kb)) { + + reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); + // the rule {P(?x) -> P(?x,!y), P(?x,!z)} after split becomes: + // { {P(?x) -> P(?x,!y,!z)}, {P(?x,?y,?z) ->, P(?x,?y)}, {P(?x,?y,?z) ->, + // P(?x,?z)} } + reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES); + + reasoner.reason(); + reasoner.exportQueryAnswersToCsv(this.queryAtom, includeBlanksFilePath, true); + + checkTwoDistinctBlanksGenerated(reasoner); + } + } + + private void checkTwoDistinctBlanksGenerated(final Reasoner reasoner) throws IOException { + // expected facts: P(c, _:b1), P(c, _:b2) + final List> csvContentIncludeBlanks = FileDataSourceTestUtils.getCSVContent(includeBlanksFilePath); + assertTrue(csvContentIncludeBlanks.size() == 2); + for (final List queryResult : csvContentIncludeBlanks) { + assertTrue(queryResult.size() == 2); + assertEquals(queryResult.get(0), "c"); + } + final String blank1 = csvContentIncludeBlanks.get(0).get(1); + final String blank2 = csvContentIncludeBlanks.get(1).get(1); + assertNotEquals(blank1, blank2); + assertNotEquals("c", blank1); + assertNotEquals("c", blank2); + + reasoner.exportQueryAnswersToCsv(this.queryAtom, excludeBlanksFilePath, false); + final List> csvContentExcludeBlanks = FileDataSourceTestUtils.getCSVContent(excludeBlanksFilePath); + assertTrue(csvContentExcludeBlanks.isEmpty()); + } + +} diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/LargeAritiesTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/LargeAritiesTest.java new file mode 100644 index 000000000..5497489b4 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/LargeAritiesTest.java @@ -0,0 +1,139 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +import static org.junit.Assert.assertArrayEquals; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.Test; + +import karmaresearch.vlog.Atom; +import karmaresearch.vlog.EDBConfigurationException; +import karmaresearch.vlog.NonExistingPredicateException; +import karmaresearch.vlog.NotStartedException; +import karmaresearch.vlog.Rule; +import karmaresearch.vlog.Term; +import karmaresearch.vlog.TermQueryResultIterator; +import karmaresearch.vlog.VLog; +import karmaresearch.vlog.VLog.RuleRewriteStrategy; + +/** + * Tests that reasoning and querying with predicates of large arities is + * allowed. + * + * @author Irina Dragoste + * + */ +public class LargeAritiesTest { + + final static int PREDICATE_ARITY_LIMIT = 255; + final static int VARIABLES_PER_RULE_LIMIT = 255; + + @Test + public void testLargeNumberOfVariablesPerRule() throws NotStartedException, EDBConfigurationException, NonExistingPredicateException { + testNumberOfVariablesPerRule(VARIABLES_PER_RULE_LIMIT); + } + + @Test(expected = IllegalArgumentException.class) + public void testNumberOfVariablesPerRuleExceedsLimit() throws NotStartedException, EDBConfigurationException, NonExistingPredicateException { + testNumberOfVariablesPerRule(VARIABLES_PER_RULE_LIMIT + 1); + } + + @Test + public void testLargePredicateArities() throws NotStartedException, EDBConfigurationException, NonExistingPredicateException { + testPredicateArity(PREDICATE_ARITY_LIMIT); + } + + @Test(expected = IllegalArgumentException.class) + public void testPredicateAritiesExceedLimit() throws NotStartedException, EDBConfigurationException, NonExistingPredicateException { + testPredicateArity(PREDICATE_ARITY_LIMIT + 1); + } + + private void testNumberOfVariablesPerRule(int variablesPerRuleLimit) + throws EDBConfigurationException, NotStartedException, NonExistingPredicateException { + final VLog vLog = new VLog(); + + final String[][] pFactArguments = { { "c" } }; + + final List body = new ArrayList<>(); + for (int i = 1; i <= variablesPerRuleLimit; i++) { + final String predicateName = "P" + i; + // Pi(xi) + body.add(VLogExpressions.makeAtom(predicateName, VLogExpressions.makeVariable("x" + i))); + // Pi(c) + vLog.addData(predicateName, pFactArguments); + } + final Atom head = VLogExpressions.makeAtom("q", VLogExpressions.makeVariable("x1")); + + // q(x1) :- P1(x1),...,Pn(xn) + final Rule rule = VLogExpressions.makeRule(head, body.toArray(new Atom[body.size()])); + + vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); + vLog.materialize(true); + try (final TermQueryResultIterator queryResultIterator = vLog.query(head, true, false)) { + assertTrue(queryResultIterator.hasNext()); + final Term[] queryResult = queryResultIterator.next(); + assertArrayEquals(new Term[] { VLogExpressions.makeConstant("c") }, queryResult); + + assertFalse(queryResultIterator.hasNext()); + } + vLog.stop(); + } + + private void testPredicateArity(final int predicateArityLimit) + throws EDBConfigurationException, NotStartedException, NonExistingPredicateException { + final List constants = new ArrayList<>(); + for (int i = 0; i < predicateArityLimit; i++) { + constants.add("c" + i); + } + final String[][] pFactArguments = { constants.toArray(new String[predicateArityLimit]) }; + + final List variables = new ArrayList<>(); + for (int i = 0; i < predicateArityLimit; i++) { + variables.add(VLogExpressions.makeVariable("x" + i)); + } + + final Term[] terms = variables.toArray(new Term[variables.size()]); + final Rule rule = VLogExpressions.makeRule(VLogExpressions.makeAtom("q", terms), + VLogExpressions.makeAtom("p", terms)); + final Atom queryAtomQPredicate = VLogExpressions.makeAtom("q", terms); + + final VLog vLog = new VLog(); + vLog.addData("p", pFactArguments); + + vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); + vLog.materialize(true); + try (final TermQueryResultIterator queryResultIterator = vLog.query(queryAtomQPredicate, true, false)) { + assertTrue(queryResultIterator.hasNext()); + final Term[] queryResult = queryResultIterator.next(); + assertTrue(queryResult.length == predicateArityLimit); + + assertFalse(queryResultIterator.hasNext()); + } + vLog.stop(); + } + +} diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/LoggingTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/LoggingTest.java new file mode 100644 index 000000000..57691fc99 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/LoggingTest.java @@ -0,0 +1,216 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.util.Arrays; + +import org.junit.BeforeClass; +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.LogLevel; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; + +public class LoggingTest { + + public static final String LOGS_DIRECTORY = "src/test/data/logs/"; + + private static final Variable vx = Expressions.makeUniversalVariable("x"); + // p(?x) -> q(?x) + private static final PositiveLiteral ruleHeadQx = Expressions.makePositiveLiteral("q", vx); + private static final PositiveLiteral ruleBodyPx = Expressions.makePositiveLiteral("p", vx); + private static final Rule rule = Expressions.makeRule(ruleHeadQx, ruleBodyPx); + + private static final Constant constantC = Expressions.makeAbstractConstant("c"); + private static final Fact factPc = Expressions.makeFact("p", Arrays.asList(constantC)); + + private static final KnowledgeBase kb = new KnowledgeBase(); + + static { + kb.addStatements(rule, factPc); + } + + @BeforeClass + public static void emptyLogDirectory() { + + final File logsDir = new File(LOGS_DIRECTORY); + + if (!logsDir.exists()) { + logsDir.mkdir(); + } + + final File[] listFiles = logsDir.listFiles(); + for (final File file : listFiles) { + file.delete(); + } + } + + // TODO remaining tests: change log file + // TODO remaining tests: test that the log level and the log files can be set + // any time + + @Test + public void testSetLogFileNull() throws IOException { + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.setLogFile(null); + reasoner.setLogLevel(LogLevel.INFO); + + reasoner.reason(); + } + // TODO test that logging is redirected to system output + } + + @Test + public void testSetLogFileInexistent() throws IOException { + final String inexistentFilePath = LOGS_DIRECTORY + "a/b"; + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.setLogFile(inexistentFilePath); + assertFalse(new File(inexistentFilePath).exists()); + reasoner.setLogLevel(LogLevel.INFO); + + reasoner.reason(); + } + // TODO test that logging is redirected to system output + assertFalse(new File(inexistentFilePath).exists()); + } + + @Test(expected = NullPointerException.class) + public void testSetLogLevelNull() { + try (final Reasoner instance = new VLogReasoner(new KnowledgeBase())) { + instance.setLogLevel(null); + } + } + + @Test + public void testSetLogFileAppendsToFile() throws IOException { + final String logFilePath = LOGS_DIRECTORY + "-testSetLogFileAppendsToFile.log"; + assertFalse(new File(logFilePath).exists()); + int countLinesBeforeReset = 0; + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.setLogLevel(LogLevel.INFO); + reasoner.setLogFile(logFilePath); + reasoner.reason(); + + countLinesBeforeReset = readFile(logFilePath); + assertTrue(countLinesBeforeReset > 0); + + reasoner.resetReasoner(); + reasoner.reason(); + } + final int countLinesAfterReset = readFile(logFilePath); + // the logger appends to the same file after reset + assertTrue(countLinesAfterReset > countLinesBeforeReset); + + } + + @Test + public void testLogLevelInfo() throws IOException { + final String logFilePath = LOGS_DIRECTORY + "-testLogLevelInfo.log"; + assertFalse(new File(logFilePath).exists()); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + + reasoner.setLogLevel(LogLevel.INFO); + reasoner.setLogFile(logFilePath); + reasoner.reason(); + reasoner.setLogLevel(LogLevel.INFO); + } + final int countLinesReasonLogLevelInfo = readFile(logFilePath); + assertTrue(countLinesReasonLogLevelInfo > 0); + + } + + @Test + public void testLogLevelDebug() throws IOException { + final String logFilePath = LOGS_DIRECTORY + "-testLogLevelDebug.log"; + assertFalse(new File(logFilePath).exists()); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + + reasoner.setLogLevel(LogLevel.DEBUG); + reasoner.setLogFile(logFilePath); + reasoner.reason(); + reasoner.setLogLevel(LogLevel.DEBUG); + reasoner.close(); + } + final int countLinesReasonLogLevelDebug = readFile(logFilePath); + assertTrue(countLinesReasonLogLevelDebug > 0); + + } + + @Test + public void testLogLevelDefault() throws IOException { + final String defaultLogFilePath = LOGS_DIRECTORY + "-testLogLevelDefault.log"; + assertFalse(new File(defaultLogFilePath).exists()); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.setLogFile(defaultLogFilePath); + + reasoner.reason(); + reasoner.close(); + } + final int countLinesReasonLogLevelDefault = readFile(defaultLogFilePath); + + final String warningLogFilePath = LOGS_DIRECTORY + "-testLogLevelDefault2.log"; + assertFalse(new File(warningLogFilePath).exists()); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.setLogFile(warningLogFilePath); + reasoner.setLogLevel(LogLevel.WARNING); + reasoner.reason(); + reasoner.close(); + } + final int countLinesReasonLogLevelWarning = readFile(warningLogFilePath); + + assertEquals(countLinesReasonLogLevelDefault, countLinesReasonLogLevelWarning); + } + + private int readFile(final String logFilePath) throws IOException, FileNotFoundException { + int countLines = 0; + assertTrue(new File(logFilePath).exists()); + try (BufferedReader br = new BufferedReader(new FileReader(logFilePath))) { + String sCurrentLine; + while ((sCurrentLine = br.readLine()) != null) { + assertFalse(sCurrentLine.isEmpty()); + countLines++; + } + } + + return countLines; + } + +} diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverterTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverterTest.java new file mode 100644 index 000000000..c3a2f3c99 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ModelToVLogConverterTest.java @@ -0,0 +1,320 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.UUID; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.NamedNull; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.StatementVisitor; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; +import org.semanticweb.rulewerk.core.model.implementation.PositiveLiteralImpl; +import org.semanticweb.rulewerk.core.model.implementation.RenamedNamedNull; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.RuleRewriteStrategy; +import org.semanticweb.rulewerk.core.reasoner.implementation.Skolemization; + +public class ModelToVLogConverterTest { + + @Test + public void testToVLogTermVariable() { + final Variable variable = Expressions.makeUniversalVariable("var"); + final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.VARIABLE, "var"); + + final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(variable); + + assertEquals(expectedVLogTerm, vLogTerm); + } + + @Test + public void testToVLogTermAbstractConstant() { + final Constant constant = Expressions.makeAbstractConstant("const"); + final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.CONSTANT, "const"); + + final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(constant); + + assertEquals(expectedVLogTerm, vLogTerm); + assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstantName(constant.getName())); + assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstant(constant)); + } + + @Test + public void testToVLogTermAbstractConstantIri() { + final Constant constant = Expressions.makeAbstractConstant("http://example.org"); + final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.CONSTANT, ""); + + final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(constant); + + assertEquals(expectedVLogTerm, vLogTerm); + assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstantName(constant.getName())); + assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstant(constant)); + } + + @Test + public void testToVLogTermDatatypeConstant() { + final Constant constant = Expressions.makeDatatypeConstant("c", "http://example.org"); + final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.CONSTANT, "\"c\"^^"); + + final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(constant); + + assertEquals(expectedVLogTerm, vLogTerm); + assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstantName(constant.getName())); + assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstant(constant)); + } + + @Test + public void testToVLogTermLanguageStringConstant() { + final Constant constant = Expressions.makeLanguageStringConstant("c", "en"); + final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.CONSTANT, "\"c\"@en"); + + final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(constant); + + assertEquals(expectedVLogTerm, vLogTerm); + assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstantName(constant.getName())); + assertEquals(expectedVLogTerm.getName(), TermToVLogConverter.getVLogNameForConstant(constant)); + } + + @Test + public void testToVLogTermBlank() { + final NamedNull blank = new NamedNullImpl("blank"); + final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.BLANK, "blank"); + + final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(blank); + + assertEquals(expectedVLogTerm, vLogTerm); + } + + @Test + public void testToVLogTermBlankSkolemization() { + final Skolemization skolemization = new Skolemization(); + final NamedNull blank = new NamedNullImpl("blank"); + + final String vLogSkolemConstant = TermToVLogConverter.getVLogNameForNamedNull(blank); + + assertNotEquals("blank", vLogSkolemConstant); + // generated ids differ by Skolemization instance, but should have the same + // length: + assertEquals(skolemization.getSkolemConstantName(blank).length(), vLogSkolemConstant.length()); + } + + @Test + public void testToVLogTermBlankRenamedSkolemization() { + final Skolemization skolemization = new Skolemization(); + final UUID uuid = UUID.randomUUID(); + final NamedNull blank = new RenamedNamedNull(uuid); + + final String vLogSkolemConstant = TermToVLogConverter.getVLogNameForNamedNull(blank); + + assertEquals(skolemization.getSkolemConstantName(blank), vLogSkolemConstant); + } + + @Test + public void testToVLogTermArray() { + final Variable vx = Expressions.makeUniversalVariable("x"); + final Variable vxToo = Expressions.makeUniversalVariable("x"); + final Variable vy = Expressions.makeUniversalVariable("y"); + final Constant cx = Expressions.makeAbstractConstant("x"); + final NamedNull bx = new NamedNullImpl("x"); + final List terms = Arrays.asList(vx, cx, vxToo, bx, vy); + + final karmaresearch.vlog.Term expectedVx = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.VARIABLE, "x"); + final karmaresearch.vlog.Term expectedVy = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.VARIABLE, "y"); + final karmaresearch.vlog.Term expectedCx = new karmaresearch.vlog.Term( + karmaresearch.vlog.Term.TermType.CONSTANT, "x"); + final karmaresearch.vlog.Term expectedBx = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, + "x"); + final karmaresearch.vlog.Term[] expectedTermArray = { expectedVx, expectedCx, expectedVx, expectedBx, + expectedVy }; + + final karmaresearch.vlog.Term[] vLogTermArray = ModelToVLogConverter.toVLogTermArray(terms); + assertArrayEquals(expectedTermArray, vLogTermArray); + } + + @Test + public void testToVLogTermArrayEmpty() { + final List terms = new ArrayList<>(); + final karmaresearch.vlog.Term[] vLogTermArray = ModelToVLogConverter.toVLogTermArray(terms); + + assertNotNull(vLogTermArray); + assertTrue(vLogTermArray.length == 0); + } + + @Test + public void testToVLogFactTuples() { + final Constant c1 = Expressions.makeAbstractConstant("1"); + final Constant c2 = Expressions.makeAbstractConstant("2"); + final Constant c3 = Expressions.makeAbstractConstant("3"); + final Fact atom1 = Expressions.makeFact("p1", Arrays.asList(c1)); + final Fact atom2 = Expressions.makeFact("p2", Arrays.asList(c2, c3)); + + final String[][] vLogTuples = ModelToVLogConverter.toVLogFactTuples(Arrays.asList(atom1, atom2)); + + final String[][] expectedTuples = { { "1" }, { "2", "3" } }; + assertArrayEquals(expectedTuples, vLogTuples); + } + + @Test + public void testToVLogFactTupleNulls() { + final Skolemization skolemization = new Skolemization(); + final UUID uuid = UUID.randomUUID(); + final NamedNull n = new RenamedNamedNull(uuid); + final Fact atom1 = Expressions.makeFact("p1", Arrays.asList(n)); + + final String[] expectedTuple = { skolemization.getSkolemConstantName(n) }; + + final String[] actualTuple = ModelToVLogConverter.toVLogFactTuple(atom1); + + assertArrayEquals(expectedTuple, actualTuple); + } + + @Test(expected = RulewerkRuntimeException.class) + public void testToVLogFactTupleUnsupported() { + // We need a fact that accepts exception-causing terms in the first place: + class NonValidatingFact extends PositiveLiteralImpl implements Fact { + + public NonValidatingFact(Predicate predicate, List terms) { + super(predicate, terms); + } + + @Override + public T accept(StatementVisitor statementVisitor) { + return statementVisitor.visit(this); + } + + } + + final Variable x = Expressions.makeUniversalVariable("X"); + final Fact atom1 = new NonValidatingFact(Expressions.makePredicate("p1", 1), Arrays.asList(x)); + + ModelToVLogConverter.toVLogFactTuple(atom1); + } + + @Test + public void testToVLogPredicate() { + final Predicate predicate = Expressions.makePredicate("pred", 1); + final String vLogPredicate = ModelToVLogConverter.toVLogPredicate(predicate); + assertEquals("pred-1", vLogPredicate); + } + + @Test + public void testToVLogAtom() { + final Constant c = Expressions.makeAbstractConstant("c"); + final Variable x = Expressions.makeUniversalVariable("x"); + final NamedNull b = new NamedNullImpl("_:b"); + final PositiveLiteral atom = Expressions.makePositiveLiteral("pred", c, x, b); + + final karmaresearch.vlog.Term expectedC = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, + "c"); + final karmaresearch.vlog.Term expectedX = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "x"); + final karmaresearch.vlog.Term expectedB = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, + "_:b"); + + final String expectedPredicateName = "pred" + ModelToVLogConverter.PREDICATE_ARITY_SUFFIX_SEPARATOR + 3; + final karmaresearch.vlog.Term[] expectedTerms = { expectedC, expectedX, expectedB }; + final karmaresearch.vlog.Atom expectedAtom = new karmaresearch.vlog.Atom(expectedPredicateName, expectedTerms); + + final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(atom); + assertEquals(expectedAtom, vLogAtom); + } + + @Test + public void testToVLogRuleArray() { + final Variable x = Expressions.makeUniversalVariable("x"); + final Variable y = Expressions.makeUniversalVariable("y"); + final Variable z = Expressions.makeUniversalVariable("z"); + final Variable w = Expressions.makeUniversalVariable("w"); + final Variable v = Expressions.makeExistentialVariable("v"); + final PositiveLiteral atomP1X = Expressions.makePositiveLiteral("p1", x); + final PositiveLiteral atomP2XY = Expressions.makePositiveLiteral("p2", x, y); + final PositiveLiteral atomP3YZ = Expressions.makePositiveLiteral("p3", y, z); + final Rule rule1 = Expressions.makeRule(atomP1X, atomP2XY, atomP3YZ); + final PositiveLiteral atomQXYZ = Expressions.makePositiveLiteral("q", x, y, z); + final PositiveLiteral atomQYW = Expressions.makePositiveLiteral("q", y, w); + final PositiveLiteral atomQ1XWZ = Expressions.makePositiveLiteral("q1", x, w, z); + final PositiveLiteral atomQ2XV = Expressions.makePositiveLiteral("q2", x, v); + final Rule rule2 = Expressions.makeRule(atomQ2XV, atomQ1XWZ, atomQYW, atomQXYZ); + + final karmaresearch.vlog.Term expX = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "x"); + final karmaresearch.vlog.Term expY = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "y"); + final karmaresearch.vlog.Term expZ = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "z"); + final karmaresearch.vlog.Term expW = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "w"); + final karmaresearch.vlog.Term expV = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "!v"); + final karmaresearch.vlog.Atom expAtomP1X = new karmaresearch.vlog.Atom("p1-1", expX); + final karmaresearch.vlog.Atom expAtomP2XY = new karmaresearch.vlog.Atom("p2-2", expX, expY); + final karmaresearch.vlog.Atom expAtomP3YZ = new karmaresearch.vlog.Atom("p3-2", expY, expZ); + final karmaresearch.vlog.Rule expectedRule1 = new karmaresearch.vlog.Rule( + new karmaresearch.vlog.Atom[] { expAtomP1X }, + new karmaresearch.vlog.Atom[] { expAtomP2XY, expAtomP3YZ }); + final karmaresearch.vlog.Atom expAtomQXYZ = new karmaresearch.vlog.Atom("q-3", expX, expY, expZ); + final karmaresearch.vlog.Atom expAtomQYW = new karmaresearch.vlog.Atom("q-2", expY, expW); + final karmaresearch.vlog.Atom expAtomQ1XWZ = new karmaresearch.vlog.Atom("q1-3", expX, expW, expZ); + final karmaresearch.vlog.Atom expAtomQ2XV = new karmaresearch.vlog.Atom("q2-2", expX, expV); + final karmaresearch.vlog.Rule expectedRule2 = new karmaresearch.vlog.Rule( + new karmaresearch.vlog.Atom[] { expAtomQ2XV }, + new karmaresearch.vlog.Atom[] { expAtomQ1XWZ, expAtomQYW, expAtomQXYZ }); + + final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter + .toVLogRuleArray(Arrays.asList(rule1, rule2)); + final karmaresearch.vlog.Rule[] expectedRuleArray = new karmaresearch.vlog.Rule[] { expectedRule1, + expectedRule2 }; + assertArrayEquals(expectedRuleArray, vLogRuleArray); + } + + @Test + public void testVLogRuleRewritingStrategy() { + assertEquals(karmaresearch.vlog.VLog.RuleRewriteStrategy.NONE, + ModelToVLogConverter.toVLogRuleRewriteStrategy(RuleRewriteStrategy.NONE)); + assertEquals(karmaresearch.vlog.VLog.RuleRewriteStrategy.AGGRESSIVE, + ModelToVLogConverter.toVLogRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES)); + } + +} diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnswerCountTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnswerCountTest.java new file mode 100644 index 000000000..7b2d519f6 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnswerCountTest.java @@ -0,0 +1,314 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; + +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; + +public class QueryAnswerCountTest { + + private static final Predicate predP = Expressions.makePredicate("P", 1); + private static final Predicate predQ = Expressions.makePredicate("Q", 1); + private static final Predicate predR = Expressions.makePredicate("R", 2); + private static final Variable x = Expressions.makeUniversalVariable("x"); + private static final Variable y = Expressions.makeExistentialVariable("y"); + private static final Constant c = Expressions.makeAbstractConstant("c"); + private static final Constant d = Expressions.makeAbstractConstant("d"); + private static final Constant e = Expressions.makeAbstractConstant("e"); + private static final Constant f = Expressions.makeAbstractConstant("f"); + + private static final PositiveLiteral Px = Expressions.makePositiveLiteral(predP, x); + private static final PositiveLiteral Qx = Expressions.makePositiveLiteral(predQ, x); + private static final PositiveLiteral Qy = Expressions.makePositiveLiteral(predQ, y); + private static final PositiveLiteral Rxx = Expressions.makePositiveLiteral(predR, x, x); + private static final PositiveLiteral Rxy = Expressions.makePositiveLiteral(predR, x, y); + private static final PositiveLiteral Ryy = Expressions.makePositiveLiteral(predR, y, y); + + private static final Conjunction conRxyQy = Expressions.makePositiveConjunction(Rxy, Qy); + private static final Conjunction conRxxRxyRyy = Expressions.makePositiveConjunction(Rxx, Rxy, Ryy); + private static final Conjunction conPx = Expressions.makeConjunction(Px); + + private static final Rule QxPx = Expressions.makeRule(Qx, Px); + private static final Rule RxyQyPx = Expressions.makeRule(conRxyQy, conPx); + private static final Rule RxxRxyRyyPx = Expressions.makeRule(conRxxRxyRyy, conPx); + + private static final Fact factPc = Expressions.makeFact(predP, c); + private static final Fact factPd = Expressions.makeFact(predP, d); + private static final Fact factPe = Expressions.makeFact(predP, e); + + private static final Fact factQc = Expressions.makeFact(predQ, c); + private static final Fact factQd = Expressions.makeFact(predQ, d); + private static final Fact factQe = Expressions.makeFact(predQ, e); + private static final Fact factQf = Expressions.makeFact(predQ, f); + + private static final PositiveLiteral Rdy = Expressions.makePositiveLiteral(predR, d, y); + private static final PositiveLiteral Rey = Expressions.makePositiveLiteral(predR, e, y); + private static final PositiveLiteral Rxd = Expressions.makePositiveLiteral(predR, x, d); + private static final PositiveLiteral Rxe = Expressions.makePositiveLiteral(predR, x, e); + + @Test + public void noFactsnoRules() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(0, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factPc, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factPd, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factQe, true).getCount()); + } + } + + @Test + public void noFactsUniversalRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(QxPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(0, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factPc, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factPd, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factQe, true).getCount()); + } + } + + @Test + public void noFactsExistentialRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(0, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factPc, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factPd, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factQe, true).getCount()); + } + } + + @Test + public void pFactsNoRules() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); + + assertEquals(0, reasoner.countQueryAnswers(factQe, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factQc, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factQd, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(factPe, true).getCount()); + + } + } + + @Test + public void pFactsUniversalRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, QxPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getCount()); + assertEquals(1, reasoner.countQueryAnswers(factPc, true).getCount()); + assertEquals(1, reasoner.countQueryAnswers(factPd, true).getCount()); + assertEquals(1, reasoner.countQueryAnswers(factQc, true).getCount()); + assertEquals(1, reasoner.countQueryAnswers(factQd, true).getCount()); + assertEquals(1, reasoner.countQueryAnswers(factPc, false).getCount()); + assertEquals(1, reasoner.countQueryAnswers(factPd, false).getCount()); + assertEquals(1, reasoner.countQueryAnswers(factQc, false).getCount()); + assertEquals(1, reasoner.countQueryAnswers(factQd, false).getCount()); + + } + } + + @Test + public void pFactsExistentialRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.countQueryAnswers(Px).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Qx, false).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Rxy).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getCount()); + } + } + + @Test + public void qFactsUniversalRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factQe, factQf, RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(0, reasoner.countQueryAnswers(Px).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Px, false).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getCount()); + } + } + + @Test + public void qFactsExistentialRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factQe, factQf, RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(0, reasoner.countQueryAnswers(Px).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Px, false).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getCount()); + } + } + + @Test + public void pFactsQFactsUniversalRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, factQe, factQf, QxPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.countQueryAnswers(Px).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getCount()); + assertEquals(4, reasoner.countQueryAnswers(Qx).getCount()); + assertEquals(4, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(4, reasoner.countQueryAnswers(Qx, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getCount()); + } + } + + @Test + public void pFactsQFactsExistentialRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, factQe, factQf, RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.countQueryAnswers(Px).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getCount()); + assertEquals(4, reasoner.countQueryAnswers(Qx).getCount()); + assertEquals(4, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Qx, false).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Rxy).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getCount()); + + assertEquals(1, reasoner.countQueryAnswers(Rdy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rey, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxd, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxe, true).getCount()); + + assertEquals(0, reasoner.countQueryAnswers(Rdy, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rey, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxd, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxe, false).getCount()); + } + } + + @Test + public void pFactsQFactsExistentialAndUniversalRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, factQe, factQf, QxPx, RxyQyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.countQueryAnswers(Px).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getCount()); + assertEquals(6, reasoner.countQueryAnswers(Qx).getCount()); + assertEquals(6, reasoner.countQueryAnswers(Qx, true).getCount()); + assertEquals(4, reasoner.countQueryAnswers(Qx, false).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Rxy).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxy, false).getCount()); + + assertEquals(1, reasoner.countQueryAnswers(Rdy, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rey, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxd, true).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxe, true).getCount()); + + assertEquals(0, reasoner.countQueryAnswers(Rdy, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rey, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxd, false).getCount()); + assertEquals(0, reasoner.countQueryAnswers(Rxe, false).getCount()); + } + } + + @Test + public void pFactsLiteralWithSameVariables() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd, RxxRxyRyyPx); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + assertEquals(2, reasoner.countQueryAnswers(Px, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Px, false).getCount()); + + assertEquals(4, reasoner.countQueryAnswers(Rxx, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Rxx, false).getCount()); + + assertEquals(6, reasoner.countQueryAnswers(Rxy, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Rxy, false).getCount()); + + assertEquals(4, reasoner.countQueryAnswers(Ryy, true).getCount()); + assertEquals(2, reasoner.countQueryAnswers(Ryy, false).getCount()); + + } + } +} diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnsweringCorrectnessTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnsweringCorrectnessTest.java new file mode 100644 index 000000000..9f66c9b0a --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryAnsweringCorrectnessTest.java @@ -0,0 +1,789 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.exceptions.ReasonerStateException; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryAnswerCount; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; + +public class QueryAnsweringCorrectnessTest { + + private static final Predicate predP = Expressions.makePredicate("predP", 1); + private static final Predicate predQ = Expressions.makePredicate("predQ", 1); + private static final Variable x = Expressions.makeUniversalVariable("x"); + private static final Constant c = Expressions.makeAbstractConstant("c"); + private static final Constant d = Expressions.makeAbstractConstant("d"); + private static final Constant g = Expressions.makeAbstractConstant("g"); + private static final Constant h = Expressions.makeAbstractConstant("h"); + private static final Constant e = Expressions.makeAbstractConstant("e"); + private static final Constant f = Expressions.makeAbstractConstant("f"); + + private static final PositiveLiteral ruleHeadQx = Expressions.makePositiveLiteral(predQ, x); + private static final PositiveLiteral ruleBodyPx = Expressions.makePositiveLiteral(predP, x); + + private static final Rule ruleQxPx = Expressions.makeRule(ruleHeadQx, ruleBodyPx); + + private static final Fact factPc = Expressions.makeFact(predP, c); + private static final Fact factPd = Expressions.makeFact(predP, d); + + private static final Fact factQg = Expressions.makeFact(predQ, g); + private static final Fact factQh = Expressions.makeFact(predQ, h); + + private static final InMemoryDataSource datasource = new VLogInMemoryDataSource(1, 2); + + { + datasource.addTuple("e"); + datasource.addTuple("f"); + } + + @Test + public void testCorrectnessKBChanges() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + + reasoner.reason(); + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + assertFalse(resultIterator.hasNext()); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + + kb.addStatement(ruleQxPx); + + // there are no facts for Q-1 predicate + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + assertFalse(resultIterator.hasNext()); + assertEquals(Correctness.SOUND_BUT_INCOMPLETE, resultIterator.getCorrectness()); + } + reasoner.reason(); + + // there are no facts for Q-1 predicate + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + assertFalse(resultIterator.hasNext()); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + + kb.addStatement(factQg); + reasoner.reason(); + + final Set> expectedAnswers_g = new HashSet<>(Arrays.asList(Collections.singletonList(g))); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_g, queryAnswers); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + + kb.addStatement(factQh); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_g, queryAnswers); + assertEquals(Correctness.INCORRECT, resultIterator.getCorrectness()); + } + + reasoner.reason(); + + final Set> expectedAnswers_g_h = new HashSet<>( + Arrays.asList(Collections.singletonList(g), Collections.singletonList(h))); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_g_h, queryAnswers); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + + kb.addStatements(factPc, factPd); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_g_h, queryAnswers); + assertEquals(Correctness.INCORRECT, resultIterator.getCorrectness()); + } + + reasoner.reason(); + + final Set> expectedAnswers_g_h_c_d = new HashSet<>(Arrays.asList(Collections.singletonList(g), + Collections.singletonList(h), Collections.singletonList(c), Collections.singletonList(d))); + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_g_h_c_d, queryAnswers); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + + final Set> expectedAnswers_c_d = new HashSet<>( + Arrays.asList(Collections.singletonList(c), Collections.singletonList(d))); + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBodyPx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_c_d, queryAnswers); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + + kb.addStatement(new DataSourceDeclarationImpl(predP, datasource)); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_g_h_c_d, queryAnswers); + assertEquals(Correctness.INCORRECT, resultIterator.getCorrectness()); + } + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBodyPx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_c_d, queryAnswers); + assertEquals(Correctness.INCORRECT, resultIterator.getCorrectness()); + } + + reasoner.reason(); + + final Set> expectedAnswers_g_h_c_d_e_f = new HashSet<>(Arrays.asList( + Collections.singletonList(g), Collections.singletonList(h), Collections.singletonList(c), + Collections.singletonList(d), Collections.singletonList(e), Collections.singletonList(f))); + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_g_h_c_d_e_f, queryAnswers); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + + final Set> expectedAnswers_c_d_e_f = new HashSet<>(Arrays.asList(Collections.singletonList(c), + Collections.singletonList(d), Collections.singletonList(e), Collections.singletonList(f))); + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBodyPx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_c_d_e_f, queryAnswers); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + + kb.addStatement(new DataSourceDeclarationImpl(predQ, datasource)); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_g_h_c_d_e_f, queryAnswers); + assertEquals(Correctness.INCORRECT, resultIterator.getCorrectness()); + } + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBodyPx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_c_d_e_f, queryAnswers); + assertEquals(Correctness.INCORRECT, resultIterator.getCorrectness()); + } + + reasoner.reason(); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_g_h_c_d_e_f, queryAnswers); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBodyPx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_c_d_e_f, queryAnswers); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + + kb.addStatement(ruleQxPx); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBodyPx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_c_d_e_f, queryAnswers); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + } + } + + @Test + public void testCorrectnessKBChangesNoRules() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + + reasoner.reason(); + + // there are no facts for P-1 predicate + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBodyPx, true)) { + assertFalse(resultIterator.hasNext()); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + + kb.addStatement(factPc); + reasoner.reason(); + + final Set> expectedAnswers_c = new HashSet<>(Arrays.asList(Collections.singletonList(c))); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBodyPx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_c, queryAnswers); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + + kb.addStatement(factPd); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBodyPx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_c, queryAnswers); + assertEquals(Correctness.INCORRECT, resultIterator.getCorrectness()); + } + + reasoner.reason(); + + final Set> expectedAnswers_c_d = new HashSet<>( + Arrays.asList(Collections.singletonList(c), Collections.singletonList(d))); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBodyPx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_c_d, queryAnswers); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + + kb.addStatements(factPc, factPd); + kb.addStatement(factPc); + kb.addStatement(factPd); + kb.addStatements(factPc, factPd, factPc, factPd); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBodyPx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_c_d, queryAnswers); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + + kb.addStatement(new DataSourceDeclarationImpl(predP, datasource)); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBodyPx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_c_d, queryAnswers); + assertEquals(Correctness.INCORRECT, resultIterator.getCorrectness()); + } + + reasoner.reason(); + + final Set> expectedAnswers_c_d_e_f = new HashSet<>(Arrays.asList(Collections.singletonList(c), + Collections.singletonList(d), Collections.singletonList(e), Collections.singletonList(f))); + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBodyPx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_c_d_e_f, queryAnswers); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + + kb.addStatement(new DataSourceDeclarationImpl(predP, datasource)); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBodyPx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_c_d_e_f, queryAnswers); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + + kb.addStatement(ruleQxPx); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBodyPx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_c_d_e_f, queryAnswers); + assertEquals(Correctness.INCORRECT, resultIterator.getCorrectness()); + } + } + + } + + @Test + public void testCorrectnessKBChangesReset() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + + reasoner.reason(); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + assertFalse(resultIterator.hasNext()); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + + kb.addStatement(factPc); + reasoner.reason(); + + final Set> expectedAnswers_c = new HashSet<>(Arrays.asList(Collections.singletonList(c))); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + assertFalse(resultIterator.hasNext()); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBodyPx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_c, queryAnswers); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + + kb.addStatement(ruleQxPx); + reasoner.reason(); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_c, queryAnswers); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBodyPx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_c, queryAnswers); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + + reasoner.resetReasoner(); + reasoner.reason(); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_c, queryAnswers); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBodyPx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_c, queryAnswers); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + + } + } + + @Test + public void testCorrectnessNoKBChanges() throws IOException { + + final KnowledgeBase kb = new KnowledgeBase(); + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + + kb.addStatement(factPc); + reasoner.reason(); + + final Set> expectedAnswers_c = new HashSet<>(Arrays.asList(Collections.singletonList(c))); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBodyPx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_c, queryAnswers); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + + kb.addStatements(factPc, factPc); + kb.addStatement(factPc); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBodyPx, true)) { + final Set> queryAnswers = QueryResultsUtils.collectQueryResults(resultIterator); + assertEquals(expectedAnswers_c, queryAnswers); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + } + } + + @Test + public void answerQuery_PredicateNotLoaded_Materialized() throws IOException { + KnowledgeBase kb = new KnowledgeBase(); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + kb.addStatements(factPc); + reasoner.reason(); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + assertFalse(resultIterator.hasNext()); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + } + } + + @Test + public void answerQuery_PredicateNotLoaded_KbChanged() throws IOException { + KnowledgeBase kb = new KnowledgeBase(); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + kb.addStatements(factPc); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBodyPx, true)) { + assertFalse(resultIterator.hasNext()); + assertEquals(Correctness.SOUND_BUT_INCOMPLETE, resultIterator.getCorrectness()); + } + } + } + + @Test + public void countQueryAnswers_PredicateNotLoaded_Materialized() throws IOException { + KnowledgeBase kb = new KnowledgeBase(); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + kb.addStatements(factPc); + reasoner.reason(); + + final QueryAnswerCount resultIterator = reasoner.countQueryAnswers(ruleHeadQx); + assertEquals(0, resultIterator.getCount()); + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + } + + @Test + public void countQueryAnswers_PredicateNotLoaded_KbChanged() throws IOException { + KnowledgeBase kb = new KnowledgeBase(); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + kb.addStatements(factPc); + + final QueryAnswerCount resultIterator = reasoner.countQueryAnswers(ruleBodyPx); + assertEquals(0, resultIterator.getCount()); + assertEquals(Correctness.SOUND_BUT_INCOMPLETE, resultIterator.getCorrectness()); + } + + } + + @Test + public void exportQueryAnswersToCsv_PredicateNotLoaded_Materialized() throws IOException { + KnowledgeBase kb = new KnowledgeBase(); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + kb.addStatements(factPc); + reasoner.reason(); + + // TODO mock file or something + String csvFilePath = ".csv"; + Correctness correctness = reasoner.exportQueryAnswersToCsv(ruleHeadQx, csvFilePath, true); + + assertEquals(Correctness.SOUND_AND_COMPLETE, correctness); + } + + } + + @Test + public void exportQueryAnswersToCsv_PredicateNotLoaded_KbChanged() throws IOException { + KnowledgeBase kb = new KnowledgeBase(); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + kb.addStatements(factPc); + + // TODO mock file or something + String csvFilePath = ".csv"; + Correctness correctness = reasoner.exportQueryAnswersToCsv(ruleBodyPx, csvFilePath, true); + + assertEquals(Correctness.SOUND_BUT_INCOMPLETE, correctness); + } + } + + @Test + public void testMaterialisationIncomplete() throws IOException { + final Variable y = Expressions.makeUniversalVariable("y"); + final Variable z = Expressions.makeExistentialVariable("z"); + + final Predicate predR = Expressions.makePredicate("predR", 2); + + final PositiveLiteral ruleBody_R_x_y = Expressions.makePositiveLiteral(predR, x, y); + final PositiveLiteral ruleHead_R_y_z = Expressions.makePositiveLiteral(predR, y, z); + // R(?x, ?y) -> R(?y, !z) + final Rule rule = Expressions.makeRule(ruleHead_R_y_z, ruleBody_R_x_y); + + final Fact fact_R_c_d = Expressions.makeFact(predR, c, d); + final Fact fact_R_d_e = Expressions.makeFact(predR, d, e); + final Fact fact_R_e_c = Expressions.makeFact(predR, e, c); + + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc); + kb.addStatements(fact_R_c_d, fact_R_d_e, fact_R_e_c); + kb.addStatements(rule); + + try (VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); + reasoner.setReasoningTimeout(1); + assertFalse(reasoner.reason()); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBody_R_x_y, true)) { + assertEquals(Correctness.SOUND_BUT_INCOMPLETE, resultIterator.getCorrectness()); + } + + reasoner.setReasoningTimeout(2); + assertFalse(reasoner.reason()); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBody_R_x_y, true)) { + assertEquals(Correctness.SOUND_BUT_INCOMPLETE, resultIterator.getCorrectness()); + } + + reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); + reasoner.setReasoningTimeout(null); + assertTrue(reasoner.reason()); + + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBody_R_x_y, true)) { + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + + } + } + + @Test(expected = ReasonerStateException.class) + public void testStatementRemovalBeforeLoad() { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + kb.removeStatement(ruleQxPx); + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + } + } + } + + @Test(expected = ReasonerStateException.class) + public void testStatementsListRemovalBeforeLoad() { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + kb.removeStatements(Arrays.asList(ruleQxPx, factPd)); + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + } + } + } + + @Test(expected = ReasonerStateException.class) + public void testStatementsArrayRemovalBeforeLoad() { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + kb.removeStatements(ruleQxPx, factPd); + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + } + } + } + + @Test + public void testStatementRemovalAfterLoad() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + kb.removeStatement(ruleQxPx); + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + assertEquals(Correctness.INCORRECT, resultIterator.getCorrectness()); + } + } + } + + @Test + public void testStatementsListRemovalAfterLoad() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + kb.removeStatements(Arrays.asList(ruleQxPx, factPd)); + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + assertEquals(Correctness.INCORRECT, resultIterator.getCorrectness()); + } + } + } + + @Test + public void testStatementsArrayRemovalAfterLoad() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + kb.removeStatements(ruleQxPx, factPd); + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + assertEquals(Correctness.INCORRECT, resultIterator.getCorrectness()); + } + } + } + + @Test + public void testStatementRemovalBeforeReason() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + kb.removeStatement(ruleQxPx); + reasoner.reason(); + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + } + } + + @Test + public void testStatementsListRemovalBeforeReason() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + kb.removeStatements(Arrays.asList(ruleQxPx, factPd)); + reasoner.reason(); + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + } + } + + @Test + public void testStatementsArrayRemovalBeforeReason() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + kb.removeStatements(ruleQxPx, factPd); + reasoner.reason(); + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + } + } + + @Test + public void testStatementRemovalAfterReason() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + kb.removeStatement(ruleQxPx); + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + assertEquals(Correctness.INCORRECT, resultIterator.getCorrectness()); + } + } + } + + @Test + public void testStatementsListRemovalAfterReason() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + kb.removeStatements(Arrays.asList(factPc, ruleQxPx)); + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + assertEquals(Correctness.INCORRECT, resultIterator.getCorrectness()); + } + } + } + + @Test + public void testStatementsArrayRemovalAfterReason() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + kb.removeStatements(factPc, ruleQxPx); + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + assertEquals(Correctness.INCORRECT, resultIterator.getCorrectness()); + } + } + } + + @Test + public void testStatementNotRemovedAfterReason() throws IOException { + final Fact newFact = Expressions.makeFact("newPred", c); + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + kb.removeStatement(newFact); + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + } + } + + @Test + public void testStatementsListNotRemovedAfterReason() throws IOException { + final Fact newFact = Expressions.makeFact("newPred", c); + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + kb.removeStatements(Arrays.asList(newFact)); + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + } + } + + @Test + public void testStatementsArrayNotRemovedAfterReason() throws IOException { + final Fact newFact = Expressions.makeFact("newPred", c); + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + kb.removeStatements(newFact, newFact); + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + assertEquals(Correctness.SOUND_AND_COMPLETE, resultIterator.getCorrectness()); + } + } + } + + @Test + public void testRemoveAndAddStatements() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + kb.removeStatements(ruleQxPx); + kb.addStatement(factPd); + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBodyPx, true)) { + assertEquals(Correctness.INCORRECT, resultIterator.getCorrectness()); + } + } + } + + @Test + public void testRemoveAndAddSameStatementOnlyFacts() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + kb.removeStatements(factPc); + kb.addStatement(factPc); + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBodyPx, true)) { + assertEquals(Correctness.INCORRECT, resultIterator.getCorrectness()); + } + } + } + + @Test + public void testRemoveAndAddStatementsOnlyFacts() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + kb.removeStatements(factPc, factPd); + kb.addStatement(factPc); + try (final QueryResultIterator resultIterator = reasoner.answerQuery(ruleBodyPx, true)) { + assertEquals(Correctness.INCORRECT, resultIterator.getCorrectness()); + } + } + } + +} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultsUtils.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryResultsUtils.java similarity index 65% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultsUtils.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryResultsUtils.java index 237f0ec2d..32af07014 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultsUtils.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/QueryResultsUtils.java @@ -1,10 +1,10 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * VLog4j Core Components + * Rulewerk VLog Reasoner Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,31 +26,38 @@ import java.util.List; import java.util.Set; -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; /** * Utility class with static methods for collecting the results of a query for * testing purposes. - * + * * @author Irina Dragoste * */ final class QueryResultsUtils { + /* + * This is a utility class. Therefore, it is best practice to do the following: + * (1) Make the class final, (2) make its constructor private, (3) make all its + * fields and methods static. This prevents the classes instantiation and + * inheritance. + */ private QueryResultsUtils() { + } /** * Iterates trough all the the results and collects their term lists in a Set. * Asserts that there are no duplicate results. Closes the iterator after * collecting the last result. - * - * @param queryResultIterator - * iterator for all {@link QueryResult}s of a query. + * + * @param queryResultIterator iterator for all {@link QueryResult}s of a query. * @return a set of all query results terms ({@link QueryResult#getTerms()}). */ - static Set> collectQueryResults(QueryResultIterator queryResultIterator) { + static Set> collectQueryResults(final QueryResultIterator queryResultIterator) { final Set> results = new HashSet<>(); queryResultIterator.forEachRemaining(queryResult -> { final boolean isUnique = results.add(queryResult.getTerms()); diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ReasonerTimeoutTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ReasonerTimeoutTest.java new file mode 100644 index 000000000..5ec62cfbb --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/ReasonerTimeoutTest.java @@ -0,0 +1,182 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.junit.After; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.rules.Timeout; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +/** + * Test case ensuring {@link Reasoner#setReasoningTimeout(Integer)} works as + * expected and terminates reasoning after the given {@link #timeout}. Results + * are accepted within one second to account for setup and tear down of + * reasoning resources. + * + * @author Adrian Bielefeldt + * + */ +public class ReasonerTimeoutTest { + + /** + * The timeout after which reasoning should be completed in seconds. + */ + private static int timeout = 1; + + /** + * A list of facts to be used in multiple test runs. + */ + private static List facts = new ArrayList<>(); + /** + * A list of rules to be used in multiple test runs. + */ + private static List rules = new ArrayList<>(); + + private Reasoner reasoner; + + private final static KnowledgeBase kb = new KnowledgeBase(); + + /** + * The timeout after which reasoning should be completed. + */ + @org.junit.Rule + public Timeout globalTimeout = Timeout.seconds(timeout * 5); + + private final static Predicate infinite_EDB = Expressions.makePredicate("infinite_EDB", 2); + private final static Predicate infinite_IDB = Expressions.makePredicate("infinite_IDB", 2); + private final static Variable x = Expressions.makeUniversalVariable("x"); + private final static Variable y = Expressions.makeUniversalVariable("y"); + private final static Variable z = Expressions.makeExistentialVariable("z"); + + private final static PositiveLiteral infinite_IDB_xy = Expressions.makePositiveLiteral(infinite_IDB, x, y); + private final static PositiveLiteral infinite_EDB_xy = Expressions.makePositiveLiteral(infinite_EDB, x, y); + private final static PositiveLiteral infinite_IDB_yz = Expressions.makePositiveLiteral(infinite_IDB, y, z); + + private final static Rule infinite_rule = Expressions.makeRule(infinite_IDB_yz, infinite_IDB_xy); + + /** + * This method provides the {@link #facts} and {@link #rules} to be used in all + * test runs. To test if the timeout works as expected, a small set of facts and + * rules is used that results in an infinite chase. Facts: infinite_EDB(A, B) + * Rules: infinite_IDB(?x, ?y) :- infinite_EDB(?x, ?y) infinite_IDB(?y, ?z) :- + * infinite_IDB(?x, ?y) + */ + @BeforeClass + public static void setUpBeforeClass() { + + facts.add(Expressions.makeFact(infinite_EDB, + Arrays.asList(Expressions.makeAbstractConstant("A"), Expressions.makeAbstractConstant("B")))); + + final Rule import_rule = Expressions.makeRule(infinite_IDB_xy, infinite_EDB_xy); + rules.add(import_rule); + + rules.add(infinite_rule); + + kb.addStatements(rules); + kb.addStatements(facts); + } + + @Before + public void setUp() { + this.reasoner = new VLogReasoner(kb); + } + + @Test(expected = IllegalArgumentException.class) + public void testSetReasoningTimeout() { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + reasoner.setReasoningTimeout(-3); + } + } + + @Test + public void skolem() throws IOException { + this.reasoner.setReasoningTimeout(timeout); + this.reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); + + assertFalse(this.reasoner.reason()); + } + + @Test + public void restricted() throws IOException { + this.reasoner.setReasoningTimeout(timeout); + this.reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); + + assertFalse(this.reasoner.reason()); + } + + @Test + public void skolemAfterLoad() throws IOException { + this.reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); + + this.reasoner.setReasoningTimeout(timeout); + + assertFalse(this.reasoner.reason()); + } + + @Test + public void restrictedAfterLoad() throws IOException { + this.reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); + + this.reasoner.setReasoningTimeout(timeout); + + assertFalse(this.reasoner.reason()); + } + + @Test + public void resetReasoningTimeoutToNull() throws IOException { + this.reasoner.setReasoningTimeout(timeout); + + this.reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); + assertFalse(this.reasoner.reason()); + + this.reasoner.resetReasoner(); + + final PositiveLiteral blocking_IDB_yx = Expressions.makePositiveLiteral(infinite_IDB, y, x); + final Rule blockingRule = Expressions.makeRule(blocking_IDB_yx, infinite_IDB_xy); + kb.addStatement(blockingRule); + + this.reasoner.setReasoningTimeout(null); + assertTrue(this.reasoner.reason()); + } + + @After + public void tearDown() { + this.reasoner.close(); + } +} diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/StratifiedNegationTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/StratifiedNegationTest.java new file mode 100644 index 000000000..dcd9243f4 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/StratifiedNegationTest.java @@ -0,0 +1,185 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import org.junit.Test; + +import karmaresearch.vlog.Atom; +import karmaresearch.vlog.EDBConfigurationException; +import karmaresearch.vlog.MaterializationException; +import karmaresearch.vlog.NonExistingPredicateException; +import karmaresearch.vlog.NotStartedException; +import karmaresearch.vlog.Rule; +import karmaresearch.vlog.Term; +import karmaresearch.vlog.TermQueryResultIterator; +import karmaresearch.vlog.VLog; +import karmaresearch.vlog.VLog.LogLevel; +import karmaresearch.vlog.VLog.RuleRewriteStrategy; + +public class StratifiedNegationTest { + + /** + * P(x), Not(Q(x)) -> R(x) Q - EDB. + * + * @throws EDBConfigurationException + * @throws NotStartedException + */ + @Test + public void testSimpleInputNegation() + throws EDBConfigurationException, NotStartedException, NonExistingPredicateException { + final Term varX = VLogExpressions.makeVariable("x"); + + // P(x), Not(Q(x)) -> R(x) . + final Atom isR = VLogExpressions.makeAtom("R", varX); + + final Atom isP = VLogExpressions.makeAtom("P", varX); + final Atom isNotQ = VLogExpressions.makeNegatedAtom("Q", varX); + final Rule rule = VLogExpressions.makeRule(isR, isP, isNotQ); + + final VLog vLog = new VLog(); + vLog.setLogLevel(LogLevel.DEBUG); + + // P(c) . + final String[][] factTermsForP = { { "c" } }; + vLog.addData("P", factTermsForP); + + // Q(d) . => Q is an EDB predicate. + final String[][] factTermsForQ = { { "d" } }; + vLog.addData("Q", factTermsForQ); + + vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); + System.out.println(rule); + + try (final TermQueryResultIterator queryResult = vLog.query(isP, true, false);) { + assertTrue(queryResult.hasNext()); + final Term[] next = queryResult.next(); + assertArrayEquals(new Term[] { VLogExpressions.makeConstant("c") }, next); + } + + final Atom isQ = VLogExpressions.makeAtom("Q", varX); + try (final TermQueryResultIterator queryResult = vLog.query(isQ, true, false);) { + assertTrue(queryResult.hasNext()); + final Term[] next = queryResult.next(); + assertArrayEquals(new Term[] { VLogExpressions.makeConstant("d") }, next); + } + + try (final TermQueryResultIterator queryResult = vLog.query(isR, true, false);) { + assertFalse(queryResult.hasNext()); + } + + vLog.materialize(false); + + try (final TermQueryResultIterator queryResult = vLog.query(isR, true, false);) { + assertTrue(queryResult.hasNext()); + final Term[] next = queryResult.next(); + assertArrayEquals(new Term[] { VLogExpressions.makeConstant("c") }, next); + assertFalse(queryResult.hasNext()); + } + } + + /** + * P(x), Not(Q(x)) -> R(x)
+ * R-IDB. + * + * @throws EDBConfigurationException + * @throws NotStartedException + */ + @Test + public void testStratifiedNegationOnIDB() + throws EDBConfigurationException, NotStartedException, NonExistingPredicateException { + final Term varX = VLogExpressions.makeVariable("x"); + + final Atom isP = VLogExpressions.makeAtom("P", varX); + final Atom isNotQ = VLogExpressions.makeNegatedAtom("Q", varX); + final Atom isR = VLogExpressions.makeAtom("R", varX); + + // P(x), Not(Q(x)) -> R(x) . + final Rule rule = VLogExpressions.makeRule(isR, isP, isNotQ); + System.out.println(rule); + + final VLog vLog = new VLog(); + final String[][] factTerms = { { "c" } }; + + // P(c) . + vLog.addData("P", factTerms); + vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); + + try (final TermQueryResultIterator queryResult = vLog.query(isR, true, false);) { + assertFalse(queryResult.hasNext()); + } + + try (final TermQueryResultIterator queryResult = vLog.query(VLogExpressions.makeAtom("Q", varX), true, + false);) { + assertFalse(queryResult.hasNext()); + } + + try (final TermQueryResultIterator queryResult = vLog.query(isP, true, false);) { + assertTrue(queryResult.hasNext()); + } + vLog.materialize(true); + + try (final TermQueryResultIterator queryResult = vLog.query(isR, true, false);) { + assertTrue(queryResult.hasNext()); + final Term[] next = queryResult.next(); + assertArrayEquals(new Term[] { VLogExpressions.makeConstant("c") }, next); + assertFalse(queryResult.hasNext()); + } + } + + /** + * P(x), Not(Q(x)) -> Q(x)
+ * Q - IDB. + * + * @throws EDBConfigurationException + * @throws NotStartedException + */ + @Test(expected = MaterializationException.class) + public void testNegationOnIDBUnstratifiable() + throws EDBConfigurationException, NotStartedException, NonExistingPredicateException { + final Term varX = VLogExpressions.makeVariable("x"); + final String predP = "P"; + final String predQ = "Q"; + + final Atom isQ = VLogExpressions.makeAtom(predQ, varX); + final Atom isP = VLogExpressions.makeAtom(predP, varX); + final Atom isNotQ = VLogExpressions.makeNegatedAtom(predQ, varX); + + // P(x), Not(Q(x)) -> Q(x) . + final Rule rule = VLogExpressions.makeRule(isQ, isP, isNotQ); + + final VLog vLog = new VLog(); + final String[][] factTerms = { { "c" } }; + + // P(c) . + vLog.addData(predP, factTerms); + vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); + + try (final TermQueryResultIterator queryResult = vLog.query(isQ, true, false);) { + assertFalse(queryResult.hasNext()); + } + vLog.materialize(true); + } + +} diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromCsvFileTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromCsvFileTest.java new file mode 100644 index 000000000..1f6ee36ef --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromCsvFileTest.java @@ -0,0 +1,128 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.PredicateImpl; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.ReasonerDataSource; + +import karmaresearch.vlog.AlreadyStartedException; +import karmaresearch.vlog.Atom; +import karmaresearch.vlog.EDBConfigurationException; +import karmaresearch.vlog.NonExistingPredicateException; +import karmaresearch.vlog.NotStartedException; +import karmaresearch.vlog.Term; +import karmaresearch.vlog.TermQueryResultIterator; +import karmaresearch.vlog.VLog; + +public class VLogDataFromCsvFileTest { + + private static final String unzippedUnaryPredicateName1 = "p"; + private static final String unzippedUnaryPredicateName2 = "q"; + private static final String zippedUnaryPredicateName1 = "p_z"; + private static final String zippedUnaryPredicateName2 = "q_z"; + private static final String emptyUnaryPredicateName = "empty"; + + private static final List> expectedUnaryQueryResult = Arrays.asList( + Arrays.asList(VLogExpressions.makeConstant("c1")), Arrays.asList(VLogExpressions.makeConstant("c2"))); + + private static List> getUnaryQueryResults(final VLog vLog, final String predicateName) + throws NotStartedException, NonExistingPredicateException { + final TermQueryResultIterator queryResultsPIterator = vLog + .query(new Atom(predicateName, VLogExpressions.makeVariable("x"))); + final List> queryResults = new ArrayList<>( + VLogQueryResultUtils.collectResults(queryResultsPIterator)); + return queryResults; + } + + @Test + public void testLoadDataFomCsvString() throws AlreadyStartedException, EDBConfigurationException, IOException, + NotStartedException, NonExistingPredicateException { + + final String vLogDataSourcesConfigurationString = this.generateVLogDataSourceConfig(); + + final VLog vLog = new VLog(); + vLog.start(vLogDataSourcesConfigurationString, false); + + final List> queryResult1 = getUnaryQueryResults(vLog, unzippedUnaryPredicateName1 + "-1"); + final List> queryResultZipped1 = getUnaryQueryResults(vLog, zippedUnaryPredicateName1 + "-1"); + assertEquals(expectedUnaryQueryResult, queryResult1); + assertEquals(queryResult1, queryResultZipped1); + + final List> queryResult2 = getUnaryQueryResults(vLog, unzippedUnaryPredicateName2 + "-1"); + final List> queryResultZipped2 = getUnaryQueryResults(vLog, zippedUnaryPredicateName2 + "-1"); + assertEquals(expectedUnaryQueryResult, queryResult2); + assertEquals(queryResult2, queryResultZipped2); + + vLog.stop(); + } + + private String generateVLogDataSourceConfig() throws IOException { + final ReasonerDataSource unzippedCSV = new CsvFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER, + FileDataSourceTestUtils.unzippedUnaryCsvFileRoot + ".csv").getPath()); + final DataSourceDeclaration unaryUnzippedCSV1 = new DataSourceDeclarationImpl( + new PredicateImpl(unzippedUnaryPredicateName1, 1), unzippedCSV); + final DataSourceDeclaration unaryUnzippedCSV2 = new DataSourceDeclarationImpl( + new PredicateImpl(unzippedUnaryPredicateName2, 1), unzippedCSV); + + final ReasonerDataSource zippedCSV = new CsvFileDataSource(new File(FileDataSourceTestUtils.INPUT_FOLDER, + FileDataSourceTestUtils.zippedUnaryCsvFileRoot + ".csv.gz").getPath()); + final DataSourceDeclaration unaryZippedCSV1 = new DataSourceDeclarationImpl( + new PredicateImpl(zippedUnaryPredicateName1, 1), zippedCSV); + final DataSourceDeclaration unaryZippedCSV2 = new DataSourceDeclarationImpl( + new PredicateImpl(zippedUnaryPredicateName2, 1), zippedCSV); + + final KnowledgeBase knowledgeBase = new KnowledgeBase(); + knowledgeBase.addStatements(unaryUnzippedCSV1, unaryUnzippedCSV2, unaryZippedCSV1, unaryZippedCSV2); + final VLogKnowledgeBase vLogKnowledgeBase = new VLogKnowledgeBase(knowledgeBase); + + final String vLogDataSourcesConfigurationString = vLogKnowledgeBase.getVLogDataSourcesConfigurationString(); + return vLogDataSourcesConfigurationString; + } + + @Test(expected = NonExistingPredicateException.class) + public void testLoadDataFomCsvStringNonExistingPredicate() throws AlreadyStartedException, + EDBConfigurationException, IOException, NotStartedException, NonExistingPredicateException { + + final String vLogDataSourcesConfigurationString = this.generateVLogDataSourceConfig(); + + final VLog vLog = new VLog(); + try { + vLog.start(vLogDataSourcesConfigurationString, false); + getUnaryQueryResults(vLog, emptyUnaryPredicateName); + } finally { + vLog.stop(); + } + } + +} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromMemoryTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromMemoryTest.java similarity index 77% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromMemoryTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromMemoryTest.java index 02b5411e9..080030601 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromMemoryTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromMemoryTest.java @@ -1,258 +1,248 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; - -/* - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import org.apache.commons.lang3.StringUtils; -import org.junit.Assert; -import org.junit.Test; - -import karmaresearch.vlog.AlreadyStartedException; -import karmaresearch.vlog.EDBConfigurationException; -import karmaresearch.vlog.NotStartedException; -import karmaresearch.vlog.Rule; -import karmaresearch.vlog.Term; -import karmaresearch.vlog.TermQueryResultIterator; -import karmaresearch.vlog.VLog; -import karmaresearch.vlog.VLog.RuleRewriteStrategy; - -/** - * Tests VLog functionality when data (facts) is loaded exclusively from memory. - * - * @author Irina.Dragoste - * - */ -public class VLogDataFromMemoryTest { - - @Test - public void testVLogSimpleInference() - throws AlreadyStartedException, EDBConfigurationException, IOException, NotStartedException { - - final String[][] argsAMatrix = { { "a" }, { "b" } }; - final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("x"); - final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("B", varX); - final karmaresearch.vlog.Atom atomAx = new karmaresearch.vlog.Atom("A", varX); - final Rule rule = VLogExpressions.makeRule(atomBx, atomAx); - // tuples: [[a], [b]] - final Set> tuples = new HashSet<>(); - tuples.add(Arrays.asList(VLogExpressions.makeConstant("a"))); - tuples.add(Arrays.asList(VLogExpressions.makeConstant("b"))); - - // Start VLog - final VLog vLog = new VLog(); - vLog.addData("A", argsAMatrix); // Assert A(a), A(b) - vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); - - // Querying A(?X) before materialize - final TermQueryResultIterator queryResultIteratorAx1 = vLog.query(atomAx); - final Set> queryAxResults1 = VLogQueryResultUtils.collectResults(queryResultIteratorAx1); - assertEquals(tuples, queryAxResults1); - - // Querying B(?X) before materialize - final TermQueryResultIterator queryResultIteratorBx1 = vLog.query(atomBx); - assertFalse(queryResultIteratorBx1.hasNext()); - queryResultIteratorBx1.close(); - - vLog.materialize(true); - - // Querying B(?X) after materialize - final TermQueryResultIterator queryResultIteratorBx2 = vLog.query(atomBx); - final Set> queryResultsBx = VLogQueryResultUtils.collectResults(queryResultIteratorBx2); - assertEquals(tuples, queryResultsBx); - - final TermQueryResultIterator queryResultIteratorAx2 = vLog.query(atomAx); - final Set> queryAxResults2 = VLogQueryResultUtils.collectResults(queryResultIteratorAx2); - assertEquals(tuples, queryAxResults2); - - vLog.stop(); - } - - @Test - public void testBooleanQueryTrueIncludeConstantsFalse() - throws AlreadyStartedException, EDBConfigurationException, IOException, NotStartedException { - // Creating rules and facts - final String[][] argsAMatrix = { { "a", "a" } }; - final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("X"); - final karmaresearch.vlog.Term varY = VLogExpressions.makeVariable("Y"); - final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("B", varX, varY); - final karmaresearch.vlog.Atom atomAx = new karmaresearch.vlog.Atom("A", varX, varY); - final Rule rule = VLogExpressions.makeRule(atomBx, atomAx); - - // Start VLog - final VLog vLog = new VLog(); - vLog.addData("A", argsAMatrix); - vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); - vLog.materialize(true); - - // Querying B(a) - final karmaresearch.vlog.Term constantA = VLogExpressions.makeConstant("a"); - final karmaresearch.vlog.Atom booleanQueryAtomBa = new karmaresearch.vlog.Atom("B", constantA, constantA); - - final TermQueryResultIterator defaultIteratorWithConstantsAndBlanks = vLog.query(booleanQueryAtomBa); - assertTrue(defaultIteratorWithConstantsAndBlanks.hasNext()); - final Term[] actualQueryResult = defaultIteratorWithConstantsAndBlanks.next(); - final Term[] expectedQueryResult = { constantA, constantA }; - Assert.assertArrayEquals(expectedQueryResult, actualQueryResult); - assertFalse(defaultIteratorWithConstantsAndBlanks.hasNext()); - defaultIteratorWithConstantsAndBlanks.close(); - - final TermQueryResultIterator iteratorNoConstantsNoBlanks = vLog.query(booleanQueryAtomBa, false, false); - assertTrue(iteratorNoConstantsNoBlanks.hasNext()); - assertTrue(iteratorNoConstantsNoBlanks.next().length == 0); - iteratorNoConstantsNoBlanks.close(); - - final TermQueryResultIterator iteratorNoConstantsWithBlanks = vLog.query(booleanQueryAtomBa, false, true); - assertTrue(iteratorNoConstantsWithBlanks.hasNext()); - Assert.assertTrue(iteratorNoConstantsWithBlanks.next().length == 0); - assertFalse(iteratorNoConstantsWithBlanks.hasNext()); - iteratorNoConstantsWithBlanks.close(); - - vLog.stop(); - } - - @Test - public void testBooleanQueryTrueIncludeConstantsTrue() - throws AlreadyStartedException, EDBConfigurationException, IOException, NotStartedException { - // Creating rules and facts - final String[][] argsAMatrix = { { "a", "a" } }; - final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("X"); - final karmaresearch.vlog.Term varY = VLogExpressions.makeVariable("Y"); - final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("B", varX, varY); - final karmaresearch.vlog.Atom atomAx = new karmaresearch.vlog.Atom("A", varX, varY); - final Rule rule = VLogExpressions.makeRule(atomBx, atomAx); // A(x,x) -> B(x,x) - - // Start VLog - final VLog vLog = new VLog(); - vLog.addData("A", argsAMatrix); // assert A(a,a) - vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); - vLog.materialize(true); - - // Querying B(a) - final karmaresearch.vlog.Term constantA = VLogExpressions.makeConstant("a"); - final karmaresearch.vlog.Atom booleanQueryAtomBa = new karmaresearch.vlog.Atom("B", constantA, constantA); - - final Term[] expectedQueryResult = { constantA, constantA }; - - final TermQueryResultIterator defaultIteratorWithConstantsAndBlanks = vLog.query(booleanQueryAtomBa); - assertTrue(defaultIteratorWithConstantsAndBlanks.hasNext()); - final Term[] actualQueryResult = defaultIteratorWithConstantsAndBlanks.next(); - Assert.assertArrayEquals(expectedQueryResult, actualQueryResult); - assertFalse(defaultIteratorWithConstantsAndBlanks.hasNext()); - defaultIteratorWithConstantsAndBlanks.close(); - - final TermQueryResultIterator iteratorWithConstantsAndBlanks = vLog.query(booleanQueryAtomBa, true, false); - assertTrue(iteratorWithConstantsAndBlanks.hasNext()); - final Term[] actualQueryResult3 = iteratorWithConstantsAndBlanks.next(); - Assert.assertArrayEquals(expectedQueryResult, actualQueryResult3); - assertFalse(iteratorWithConstantsAndBlanks.hasNext()); - iteratorWithConstantsAndBlanks.close(); - - final TermQueryResultIterator iteratorWithConstantsNoBlanks = vLog.query(booleanQueryAtomBa, true, true); - assertTrue(iteratorWithConstantsNoBlanks.hasNext()); - final Term[] actualQueryResult2 = iteratorWithConstantsNoBlanks.next(); - Assert.assertArrayEquals(expectedQueryResult, actualQueryResult2); - assertFalse(iteratorWithConstantsNoBlanks.hasNext()); - iteratorWithConstantsNoBlanks.close(); - - vLog.stop(); - } - - @Test - public void testBooleanQueryFalse() - throws AlreadyStartedException, EDBConfigurationException, IOException, NotStartedException { - final String[][] argsAMatrix = { { "a" } }; - final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("X"); - final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("B", varX); - final karmaresearch.vlog.Atom atomAx = new karmaresearch.vlog.Atom("A", varX); - final Rule rule = VLogExpressions.makeRule(atomBx, atomAx); - - // Start VLog - final VLog vLog = new VLog(); - vLog.addData("A", argsAMatrix); - vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); - vLog.materialize(true); - - // Querying B(a) - final karmaresearch.vlog.Term constantB = VLogExpressions.makeConstant("b"); - final karmaresearch.vlog.Atom booleanQueryAtomBb = new karmaresearch.vlog.Atom("B", constantB); - - final TermQueryResultIterator queryResultEnnumeration = vLog.query(booleanQueryAtomBb); - assertFalse(queryResultEnnumeration.hasNext()); - - queryResultEnnumeration.close(); - vLog.stop(); - } - - @Test - public void queryEmptyKnowledgeBase() - throws NotStartedException, AlreadyStartedException, EDBConfigurationException, IOException { - // Start VLog - final VLog vLog = new VLog(); - vLog.start(StringUtils.EMPTY, false); - - final karmaresearch.vlog.Atom queryAtom = new karmaresearch.vlog.Atom("P", VLogExpressions.makeVariable("?x")); - - final TermQueryResultIterator stringQueryResultIterator = vLog.query(queryAtom); - Assert.assertFalse(stringQueryResultIterator.hasNext()); - stringQueryResultIterator.close(); - - vLog.materialize(true); - - final TermQueryResultIterator queryResultIteratorAfterReason = vLog.query(queryAtom); - Assert.assertFalse(queryResultIteratorAfterReason.hasNext()); - queryResultIteratorAfterReason.close(); - - vLog.stop(); - } - - @Test - public void queryEmptyKnowledgeBaseSetRules() - throws NotStartedException, AlreadyStartedException, EDBConfigurationException, IOException { - // Start VLog - final VLog vLog = new VLog(); - vLog.start(StringUtils.EMPTY, false); - - vLog.setRules(new Rule[] {}, VLog.RuleRewriteStrategy.NONE); - - final karmaresearch.vlog.Atom queryAtom = new karmaresearch.vlog.Atom("P", VLogExpressions.makeVariable("?x")); - - final TermQueryResultIterator stringQueryResultIterator = vLog.query(queryAtom); - Assert.assertFalse(stringQueryResultIterator.hasNext()); - stringQueryResultIterator.close(); - - vLog.materialize(true); - - final TermQueryResultIterator queryResultIteratorAfterReason = vLog.query(queryAtom); - Assert.assertFalse(queryResultIteratorAfterReason.hasNext()); - queryResultIteratorAfterReason.close(); - - vLog.stop(); - } - -} \ No newline at end of file +package org.semanticweb.rulewerk.reasoner.vlog; + +/* + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.apache.commons.lang3.StringUtils; +import org.junit.Assert; +import org.junit.Test; + +import karmaresearch.vlog.AlreadyStartedException; +import karmaresearch.vlog.EDBConfigurationException; +import karmaresearch.vlog.NonExistingPredicateException; +import karmaresearch.vlog.NotStartedException; +import karmaresearch.vlog.Rule; +import karmaresearch.vlog.Term; +import karmaresearch.vlog.TermQueryResultIterator; +import karmaresearch.vlog.VLog; +import karmaresearch.vlog.VLog.RuleRewriteStrategy; + +/** + * Tests VLog functionality when data (facts) is loaded exclusively from memory. + * + * @author Irina.Dragoste + * + */ +public class VLogDataFromMemoryTest { + + @Test + public void testVLogSimpleInference() throws AlreadyStartedException, EDBConfigurationException, IOException, + NotStartedException, NonExistingPredicateException { + + final String[][] argsAMatrix = { { "a" }, { "b" } }; + final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("x"); + final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("B", varX); + final karmaresearch.vlog.Atom atomAx = new karmaresearch.vlog.Atom("A", varX); + final Rule rule = VLogExpressions.makeRule(atomBx, atomAx); + // tuples: [[a], [b]] + final Set> tuples = new HashSet<>(); + tuples.add(Arrays.asList(VLogExpressions.makeConstant("a"))); + tuples.add(Arrays.asList(VLogExpressions.makeConstant("b"))); + + // Start VLog + final VLog vLog = new VLog(); + vLog.addData("A", argsAMatrix); // Assert A(a), A(b) + vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); + + // Querying A(?X) before materialize + final TermQueryResultIterator queryResultIteratorAx1 = vLog.query(atomAx); + final Set> queryAxResults1 = VLogQueryResultUtils.collectResults(queryResultIteratorAx1); + assertEquals(tuples, queryAxResults1); + + // Querying B(?X) before materialize + final TermQueryResultIterator queryResultIteratorBx1 = vLog.query(atomBx); + assertFalse(queryResultIteratorBx1.hasNext()); + queryResultIteratorBx1.close(); + + vLog.materialize(true); + + // Querying B(?X) after materialize + final TermQueryResultIterator queryResultIteratorBx2 = vLog.query(atomBx); + final Set> queryResultsBx = VLogQueryResultUtils.collectResults(queryResultIteratorBx2); + assertEquals(tuples, queryResultsBx); + + final TermQueryResultIterator queryResultIteratorAx2 = vLog.query(atomAx); + final Set> queryAxResults2 = VLogQueryResultUtils.collectResults(queryResultIteratorAx2); + assertEquals(tuples, queryAxResults2); + + vLog.stop(); + } + + @Test + public void testBooleanQueryTrueIncludeConstantsFalse() throws AlreadyStartedException, EDBConfigurationException, + IOException, NotStartedException, NonExistingPredicateException { + // Creating rules and facts + final String[][] argsAMatrix = { { "a", "a" } }; + final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("X"); + final karmaresearch.vlog.Term varY = VLogExpressions.makeVariable("Y"); + final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("B", varX, varY); + final karmaresearch.vlog.Atom atomAx = new karmaresearch.vlog.Atom("A", varX, varY); + final Rule rule = VLogExpressions.makeRule(atomBx, atomAx); + + // Start VLog + final VLog vLog = new VLog(); + vLog.addData("A", argsAMatrix); + vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); + vLog.materialize(true); + + // Querying B(a) + final karmaresearch.vlog.Term constantA = VLogExpressions.makeConstant("a"); + final karmaresearch.vlog.Atom booleanQueryAtomBa = new karmaresearch.vlog.Atom("B", constantA, constantA); + + final TermQueryResultIterator defaultIteratorWithConstantsAndBlanks = vLog.query(booleanQueryAtomBa); + assertTrue(defaultIteratorWithConstantsAndBlanks.hasNext()); + final Term[] actualQueryResult = defaultIteratorWithConstantsAndBlanks.next(); + final Term[] expectedQueryResult = { constantA, constantA }; + Assert.assertArrayEquals(expectedQueryResult, actualQueryResult); + assertFalse(defaultIteratorWithConstantsAndBlanks.hasNext()); + defaultIteratorWithConstantsAndBlanks.close(); + + final TermQueryResultIterator iteratorNoConstantsNoBlanks = vLog.query(booleanQueryAtomBa, false, false); + assertTrue(iteratorNoConstantsNoBlanks.hasNext()); + assertTrue(iteratorNoConstantsNoBlanks.next().length == 0); + iteratorNoConstantsNoBlanks.close(); + + final TermQueryResultIterator iteratorNoConstantsWithBlanks = vLog.query(booleanQueryAtomBa, false, true); + assertTrue(iteratorNoConstantsWithBlanks.hasNext()); + Assert.assertTrue(iteratorNoConstantsWithBlanks.next().length == 0); + assertFalse(iteratorNoConstantsWithBlanks.hasNext()); + iteratorNoConstantsWithBlanks.close(); + + vLog.stop(); + } + + @Test + public void testBooleanQueryTrueIncludeConstantsTrue() throws AlreadyStartedException, EDBConfigurationException, + IOException, NotStartedException, NonExistingPredicateException { + // Creating rules and facts + final String[][] argsAMatrix = { { "a", "a" } }; + final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("X"); + final karmaresearch.vlog.Term varY = VLogExpressions.makeVariable("Y"); + final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("B", varX, varY); + final karmaresearch.vlog.Atom atomAx = new karmaresearch.vlog.Atom("A", varX, varY); + final Rule rule = VLogExpressions.makeRule(atomBx, atomAx); // A(x,x) -> B(x,x) + + // Start VLog + final VLog vLog = new VLog(); + vLog.addData("A", argsAMatrix); // assert A(a,a) + vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); + vLog.materialize(true); + + // Querying B(a) + final karmaresearch.vlog.Term constantA = VLogExpressions.makeConstant("a"); + final karmaresearch.vlog.Atom booleanQueryAtomBa = new karmaresearch.vlog.Atom("B", constantA, constantA); + + final Term[] expectedQueryResult = { constantA, constantA }; + + final TermQueryResultIterator defaultIteratorWithConstantsAndBlanks = vLog.query(booleanQueryAtomBa); + assertTrue(defaultIteratorWithConstantsAndBlanks.hasNext()); + final Term[] actualQueryResult = defaultIteratorWithConstantsAndBlanks.next(); + Assert.assertArrayEquals(expectedQueryResult, actualQueryResult); + assertFalse(defaultIteratorWithConstantsAndBlanks.hasNext()); + defaultIteratorWithConstantsAndBlanks.close(); + + final TermQueryResultIterator iteratorWithConstantsAndBlanks = vLog.query(booleanQueryAtomBa, true, false); + assertTrue(iteratorWithConstantsAndBlanks.hasNext()); + final Term[] actualQueryResult3 = iteratorWithConstantsAndBlanks.next(); + Assert.assertArrayEquals(expectedQueryResult, actualQueryResult3); + assertFalse(iteratorWithConstantsAndBlanks.hasNext()); + iteratorWithConstantsAndBlanks.close(); + + final TermQueryResultIterator iteratorWithConstantsNoBlanks = vLog.query(booleanQueryAtomBa, true, true); + assertTrue(iteratorWithConstantsNoBlanks.hasNext()); + final Term[] actualQueryResult2 = iteratorWithConstantsNoBlanks.next(); + Assert.assertArrayEquals(expectedQueryResult, actualQueryResult2); + assertFalse(iteratorWithConstantsNoBlanks.hasNext()); + iteratorWithConstantsNoBlanks.close(); + + vLog.stop(); + } + + @Test + public void testBooleanQueryFalse() throws AlreadyStartedException, EDBConfigurationException, IOException, + NotStartedException, NonExistingPredicateException { + final String[][] argsAMatrix = { { "a" } }; + final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("X"); + final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("B", varX); + final karmaresearch.vlog.Atom atomAx = new karmaresearch.vlog.Atom("A", varX); + final Rule rule = VLogExpressions.makeRule(atomBx, atomAx); + + // Start VLog + final VLog vLog = new VLog(); + vLog.addData("A", argsAMatrix); + vLog.setRules(new Rule[] { rule }, RuleRewriteStrategy.NONE); + vLog.materialize(true); + + // Querying B(a) + final karmaresearch.vlog.Term constantB = VLogExpressions.makeConstant("b"); + final karmaresearch.vlog.Atom booleanQueryAtomBb = new karmaresearch.vlog.Atom("B", constantB); + + final TermQueryResultIterator queryResultEnnumeration = vLog.query(booleanQueryAtomBb); + assertFalse(queryResultEnnumeration.hasNext()); + + queryResultEnnumeration.close(); + vLog.stop(); + } + + @Test(expected = NonExistingPredicateException.class) + public void queryEmptyKnowledgeBaseBeforeReasoning() throws NotStartedException, AlreadyStartedException, + EDBConfigurationException, IOException, NonExistingPredicateException { + // Start VLog + final VLog vLog = new VLog(); + try { + vLog.start(StringUtils.EMPTY, false); + + final karmaresearch.vlog.Atom queryAtom = new karmaresearch.vlog.Atom("P", + VLogExpressions.makeVariable("?x")); + + vLog.query(queryAtom); + } finally { + vLog.stop(); + } + } + + @Test(expected = NonExistingPredicateException.class) + public void queryEmptyKnowledgeBaseAfterReasoning() throws NotStartedException, AlreadyStartedException, + EDBConfigurationException, IOException, NonExistingPredicateException { + // Start VLog + final VLog vLog = new VLog(); + try { + vLog.start(StringUtils.EMPTY, false); + vLog.materialize(true); + + final karmaresearch.vlog.Atom queryAtom = new karmaresearch.vlog.Atom("P", + VLogExpressions.makeVariable("?x")); + + vLog.query(queryAtom); + } finally { + vLog.stop(); + } + } + +} diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromRdfFileTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromRdfFileTest.java new file mode 100644 index 000000000..7109ce6c2 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataFromRdfFileTest.java @@ -0,0 +1,132 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.PredicateImpl; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; + +import karmaresearch.vlog.AlreadyStartedException; +import karmaresearch.vlog.Atom; +import karmaresearch.vlog.EDBConfigurationException; +import karmaresearch.vlog.NonExistingPredicateException; +import karmaresearch.vlog.NotStartedException; +import karmaresearch.vlog.Term; +import karmaresearch.vlog.TermQueryResultIterator; +import karmaresearch.vlog.VLog; + +public class VLogDataFromRdfFileTest { + + private static final String unzippedTernaryPredicateName = "triple"; + private static final String zippedTernaryPredicateName = "triple_z"; + private static final String emptyTernaryPredicateName = "empty"; + + private static final List> expectedTernaryQueryResult = Arrays.asList( + Arrays.asList(VLogExpressions.makeConstant(""), + VLogExpressions.makeConstant(""), + VLogExpressions.makeConstant("")), + Arrays.asList(VLogExpressions.makeConstant(""), + VLogExpressions.makeConstant(""), + // TODO: see comments of https://github.com/karmaresearch/vlog/issues/73 for + // expected value +// VLogExpressions.makeConstant("\"test string\"^^") + VLogExpressions.makeConstant("\"test string\""))); + + private static List> getTernaryQueryResults(final VLog vLog, final String predicateName) + throws NotStartedException, NonExistingPredicateException { + final TermQueryResultIterator queryResultsPIterator = vLog + .query(new Atom(predicateName, VLogExpressions.makeVariable("s"), VLogExpressions.makeVariable("p"), + VLogExpressions.makeVariable("o"))); + final List> queryResults = new ArrayList<>( + VLogQueryResultUtils.collectResults(queryResultsPIterator)); + return queryResults; + } + + @Test + public void testLoadDataFromRdfStringUnzipped() throws AlreadyStartedException, EDBConfigurationException, + IOException, NotStartedException, NonExistingPredicateException { + + final VLog vLog = new VLog(); + vLog.start(this.generateVLogDataSourceConfig(), false); + + final List> queryResult = getTernaryQueryResults(vLog, unzippedTernaryPredicateName + "-3"); + assertEquals(expectedTernaryQueryResult, queryResult); + + vLog.stop(); + } + + @Test + public void testLoadDataFromRdfStringZipped() throws AlreadyStartedException, EDBConfigurationException, + IOException, NotStartedException, NonExistingPredicateException { + + final VLog vLog = new VLog(); + vLog.start(this.generateVLogDataSourceConfig(), false); + + final List> queryResultZipped = getTernaryQueryResults(vLog, zippedTernaryPredicateName + "-3"); + assertEquals(expectedTernaryQueryResult, queryResultZipped); + + vLog.stop(); + } + + @Test(expected = NonExistingPredicateException.class) + public void testLoadDataFromRdfStringNonExistingPredicate() throws AlreadyStartedException, + EDBConfigurationException, IOException, NotStartedException, NonExistingPredicateException { + + final VLog vLog = new VLog(); + try { + vLog.start(this.generateVLogDataSourceConfig(), false); + getTernaryQueryResults(vLog, emptyTernaryPredicateName); + } finally { + vLog.stop(); + } + } + + private String generateVLogDataSourceConfig() throws IOException { + final RdfFileDataSource unzippedRDFDataSource = new RdfFileDataSource( + new File(FileDataSourceTestUtils.INPUT_FOLDER, FileDataSourceTestUtils.unzippedNtFileRoot + ".nt") + .getPath()); + final DataSourceDeclarationImpl unzippedRDF = new DataSourceDeclarationImpl( + new PredicateImpl(unzippedTernaryPredicateName, 3), unzippedRDFDataSource); + + final RdfFileDataSource zippedRDFDataSource = new RdfFileDataSource( + new File(FileDataSourceTestUtils.INPUT_FOLDER, FileDataSourceTestUtils.zippedNtFileRoot + ".nt.gz") + .getPath()); + final DataSourceDeclarationImpl zippedRDF = new DataSourceDeclarationImpl( + new PredicateImpl(zippedTernaryPredicateName, 3), zippedRDFDataSource); + + final KnowledgeBase knowledgeBase = new KnowledgeBase(); + knowledgeBase.addStatements(unzippedRDF, zippedRDF); + final VLogKnowledgeBase vLogKnowledgeBase = new VLogKnowledgeBase(knowledgeBase); + return vLogKnowledgeBase.getVLogDataSourcesConfigurationString(); + + } + +} diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java new file mode 100644 index 000000000..c11752655 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogDataSourceConfigurationVisitorTest.java @@ -0,0 +1,120 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; + +import java.io.File; +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.Arrays; +import java.util.LinkedHashSet; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.ReasonerDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; + +public class VLogDataSourceConfigurationVisitorTest { + private final String csvFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.csv"; + private final String unzippedRdfFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.nt"; + private final String zippedRdfFile = FileDataSourceTestUtils.INPUT_FOLDER + "file.nt.gz"; + private final String gzFile = csvFile + ".gz"; + final URL endpoint = new URL("http://query.wikidata.org/sparql"); + + public VLogDataSourceConfigurationVisitorTest() throws MalformedURLException { + } + + @Test + public void visit_CsvFileDataSource_succeeds() throws IOException { + final CsvFileDataSource unzippedCsvFileDataSource = new CsvFileDataSource(csvFile); + final CsvFileDataSource zippedCsvFileDataSource = new CsvFileDataSource(gzFile); + + final String expectedDirCanonicalPath = new File(FileDataSourceTestUtils.INPUT_FOLDER).getCanonicalPath(); + final String expectedConfigString = "EDB%1$d_predname=%2$s\n" + "EDB%1$d_type=INMEMORY\n" + "EDB%1$d_param0=" + + expectedDirCanonicalPath + "\n" + "EDB%1$d_param1=file\n"; + + assertEquals(expectedConfigString, toConfigString(unzippedCsvFileDataSource)); + assertEquals(expectedConfigString, toConfigString(zippedCsvFileDataSource)); + } + + @Test + public void visit_RdfFileDataSource_succeeds() throws IOException { + final RdfFileDataSource unzippedRdfFileDataSource = new RdfFileDataSource(unzippedRdfFile); + final RdfFileDataSource zippedRdfFileDataSource = new RdfFileDataSource(zippedRdfFile); + + final String expectedDirCanonicalPath = new File(FileDataSourceTestUtils.INPUT_FOLDER).getCanonicalPath(); + final String expectedConfigString = "EDB%1$d_predname=%2$s\n" + "EDB%1$d_type=INMEMORY\n" + "EDB%1$d_param0=" + + expectedDirCanonicalPath + "\n" + "EDB%1$d_param1=file\n"; + + assertEquals(expectedConfigString, toConfigString(unzippedRdfFileDataSource)); + assertEquals(expectedConfigString, toConfigString(zippedRdfFileDataSource)); + } + + @Test + public void visit_SparqlQueryResultDataSource_succeeds() throws IOException, MalformedURLException { + final SparqlQueryResultDataSource simpleDataSource = new SparqlQueryResultDataSource(endpoint, "b,a", + "?a wdt:P22 ?b"); + final LinkedHashSet queryVariables = new LinkedHashSet<>( + Arrays.asList(Expressions.makeUniversalVariable("b"), Expressions.makeUniversalVariable("a"))); + final SparqlQueryResultDataSource listDataSource = new SparqlQueryResultDataSource(endpoint, queryVariables, + "?a wdt:P22 ?b"); + final String expectedConfigString = "EDB%1$d_predname=%2$s\n" + "EDB%1$d_type=SPARQL\n" + + "EDB%1$d_param0=http://query.wikidata.org/sparql\n" + "EDB%1$d_param1=b,a\n" + + "EDB%1$d_param2=?a wdt:P22 ?b\n"; + assertEquals(expectedConfigString, toConfigString(simpleDataSource)); + assertEquals(expectedConfigString, toConfigString(listDataSource)); + } + + @Test + public void visit_InMemoryDataSource_returnsNull() throws IOException { + final InMemoryDataSource inMemoryDataSource = new VLogInMemoryDataSource(1, 1); + assertEquals(null, toConfigString(inMemoryDataSource)); + } + + @Test + public void getDirCanonicalPath_relativePath_succeeds() throws IOException { + final VLogDataSourceConfigurationVisitor visitor = new VLogDataSourceConfigurationVisitor(); + final FileDataSource fileDataSource = new CsvFileDataSource("file.csv"); + final String currentFolder = new File(".").getCanonicalPath(); + assertEquals(currentFolder, visitor.getDirCanonicalPath(fileDataSource)); + } + + @Test + public void getDirCanonicalPath_nonNormalisedPath_succeeds() throws IOException { + final VLogDataSourceConfigurationVisitor visitor = new VLogDataSourceConfigurationVisitor(); + final FileDataSource fileDataSource = new CsvFileDataSource("./././file.csv"); + final String currentFolder = new File(".").getCanonicalPath(); + assertEquals(currentFolder, visitor.getDirCanonicalPath(fileDataSource)); + } + + private String toConfigString(ReasonerDataSource dataSource) throws IOException { + VLogDataSourceConfigurationVisitor visitor = new VLogDataSourceConfigurationVisitor(); + dataSource.accept(visitor); + return visitor.getConfigString(); + } +} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogExpressions.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogExpressions.java similarity index 70% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogExpressions.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogExpressions.java index 89c3a1fd1..0aee3638f 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogExpressions.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogExpressions.java @@ -1,10 +1,10 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * VLog4j Core Components + * Rulewerk VLog Reasoner Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -36,8 +36,7 @@ private VLogExpressions() { * Creates a {@link karmaresearch.vlog.Term} object with given name and type * {@link karmaresearch.vlog.Term.TermType#VARIABLE} * - * @param name - * term name + * @param name term name * @return a {@link karmaresearch.vlog.Term.TermType#VARIABLE} type term with * given name. */ @@ -51,8 +50,7 @@ static karmaresearch.vlog.Term makeVariable(final String name) { * Creates a {@link karmaresearch.vlog.Term} object with given name and type * {@link karmaresearch.vlog.Term.TermType#CONSTANT} * - * @param name - * term name + * @param name term name * @return a {@link karmaresearch.vlog.Term.TermType#CONSTANT} type term with * given name. */ @@ -63,13 +61,11 @@ static karmaresearch.vlog.Term makeConstant(final String name) { } /** - * Creates an {@link karmaresearch.vlog.Atom} object with given predicate name - * and terms. + * Creates a positive {@link karmaresearch.vlog.Atom} object with given + * predicate name and terms. * - * @param predicateName - * the name of the internal vlog atom predicate. - * @param terms - * atom terms. + * @param predicateName the name of the internal vlog atom predicate. + * @param terms atom terms. * @return an {@link karmaresearch.vlog.Atom} object with given * {@code predicateName} and {@link karmaresearch.vlog.Term} * {@code terms}. @@ -78,14 +74,26 @@ static karmaresearch.vlog.Atom makeAtom(final String predicateName, final karmar return new karmaresearch.vlog.Atom(predicateName, terms); } + /** + * Creates a negated {@link karmaresearch.vlog.Atom} object with given predicate + * name and terms, and negated value {@code true}. + * + * @param predicateName the name of the internal vlog atom predicate. + * @param terms atom terms. + * @return an {@link karmaresearch.vlog.Atom} object with given + * {@code predicateName} and {@link karmaresearch.vlog.Term} + * {@code terms}. + */ + static karmaresearch.vlog.Atom makeNegatedAtom(final String predicateName, final karmaresearch.vlog.Term... terms) { + return new karmaresearch.vlog.Atom(predicateName, true, terms); + } + /** * Creates a {@link karmaresearch.vlog.Rule} object with given head and body * conjuncts. * - * @param headAtom - * rule head atom - * @param bodyAtoms - * rule body conjuncts + * @param headAtom rule head atom + * @param bodyAtoms rule body conjuncts * @return a {@link karmaresearch.vlog.Rule} object with given {@code headAtom} * and body conjuncts ({@code bodyAtoms}). */ diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBaseTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBaseTest.java new file mode 100644 index 000000000..81ccbc147 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogKnowledgeBaseTest.java @@ -0,0 +1,75 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; + +public class VLogKnowledgeBaseTest { + private KnowledgeBase knowledgeBase = new KnowledgeBase(); + private Predicate p = Expressions.makePredicate("P", 1); + private Predicate q = Expressions.makePredicate("Q", 1); + private UniversalVariable x = Expressions.makeUniversalVariable("x"); + private AbstractConstant c = Expressions.makeAbstractConstant("c"); + private Fact fact = Expressions.makeFact(p, c); + private PositiveLiteral literal = Expressions.makePositiveLiteral(p, x); + private Rule rule = Expressions.makeRule(literal, literal); + + @Test + public void hasData_noData_returnsFalse() { + VLogKnowledgeBase vKB = new VLogKnowledgeBase(knowledgeBase); + assertFalse(vKB.hasData()); + } + + @Test + public void hasData_noAliasedPredicates_returnsTrue() { + knowledgeBase.addStatement(fact); + VLogKnowledgeBase vKB = new VLogKnowledgeBase(knowledgeBase); + assertTrue(vKB.hasData()); + } + + @Test + public void hasData_onlyAliasedPredicates_returnsTrue() { + knowledgeBase.addStatement(rule); + knowledgeBase.addStatement(fact); + VLogKnowledgeBase vKB = new VLogKnowledgeBase(knowledgeBase); + assertTrue(vKB.hasData()); + } + + @Test + public void hasData_bothUnaliasedAndAliasedPredicates_returnsTrue() { + knowledgeBase.addStatement(Expressions.makeFact(q, c)); + knowledgeBase.addStatement(rule); + knowledgeBase.addStatement(fact); + VLogKnowledgeBase vKB = new VLogKnowledgeBase(knowledgeBase); + assertTrue(vKB.hasData()); + } +} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogQueryResultUtils.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultUtils.java similarity index 77% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogQueryResultUtils.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultUtils.java index f2c4614ae..8f3d65308 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogQueryResultUtils.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryResultUtils.java @@ -1,17 +1,17 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * VLog4j Core Components + * Rulewerk VLog Reasoner Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,6 +22,7 @@ import static org.junit.Assert.assertTrue; +import java.io.IOException; import java.util.Arrays; import java.util.HashSet; import java.util.List; @@ -33,7 +34,7 @@ /** * Utility class with static methods used for collecting query results for * testing purposes. - * + * * @author Irina Dragoste * */ @@ -46,7 +47,7 @@ private VLogQueryResultUtils() { * Collects TermQueryResultIterator results into a Set. Transforms the array of * {@link Term}s into a set of {@link Term}s. Asserts that the results do not * contain duplicates. Closes the iterator after collecting the results. - * + * * @param queryResultIterator * @return a set of unique query result. A query result is a List of Term * tuples. @@ -61,4 +62,16 @@ static Set> collectResults(final TermQueryResultIterator queryResultI return answers; } + @SuppressWarnings("unchecked") + private static void sneakyThrow(Throwable e) throws E { + throw (E) e; + } + + /** + * Throw an {@link IOException}, uncheckedly. Needed for testing + * {@link VLogReasoner#unsafeForEachInference}. + */ + static void sneakilyThrowIOException() { + sneakyThrow(new IOException()); + } } diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogQueryTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryTest.java similarity index 89% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogQueryTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryTest.java index d863770a3..af7133bca 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogQueryTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogQueryTest.java @@ -1,10 +1,10 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * VLog4j Core Components + * Rulewerk VLog Reasoner Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -28,6 +28,7 @@ import karmaresearch.vlog.Atom; import karmaresearch.vlog.EDBConfigurationException; +import karmaresearch.vlog.NonExistingPredicateException; import karmaresearch.vlog.NotStartedException; import karmaresearch.vlog.Rule; import karmaresearch.vlog.Term; @@ -48,7 +49,7 @@ public class VLogQueryTest { private final Atom queryAtomQPredicate = VLogExpressions.makeAtom("q", variableZ); @Test - public void queryResultWithBlanksExcludeBlanks() throws EDBConfigurationException, NotStartedException { + public void queryResultWithBlanksExcludeBlanks() throws EDBConfigurationException, NotStartedException, NonExistingPredicateException { final VLog vLog = new VLog(); vLog.addData("p", pFactArguments); @@ -61,7 +62,7 @@ public void queryResultWithBlanksExcludeBlanks() throws EDBConfigurationExceptio } @Test - public void queryResultWithBlanksInludeBlanks() throws EDBConfigurationException, NotStartedException { + public void queryResultWithBlanksInludeBlanks() throws EDBConfigurationException, NotStartedException, NonExistingPredicateException { final VLog vLog = new VLog(); vLog.addData("p", pFactArguments); vLog.setRules(new Rule[] { ruleWithExistentials }, RuleRewriteStrategy.NONE); diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerBasics.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerBasics.java new file mode 100644 index 000000000..8e2b07f57 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerBasics.java @@ -0,0 +1,108 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; + +/* + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; + +public class VLogReasonerBasics { + + final String constantNameC = "c"; + final String constantNameD = "d"; + + final Constant constantC = Expressions.makeAbstractConstant(constantNameC); + final Constant constantD = Expressions.makeAbstractConstant(constantNameD); + final Variable x = Expressions.makeUniversalVariable("x"); + final Fact factAc = Expressions.makeFact("A", Arrays.asList(constantC)); + final Fact factAd = Expressions.makeFact("A", Arrays.asList(constantD)); + final PositiveLiteral atomAx = Expressions.makePositiveLiteral("A", x); + final PositiveLiteral atomBx = Expressions.makePositiveLiteral("B", x); + final PositiveLiteral atomCx = Expressions.makePositiveLiteral("C", x); + final Rule ruleBxAx = Expressions.makeRule(atomBx, atomAx); + final Rule ruleCxBx = Expressions.makeRule(atomCx, atomBx); + + @Test(expected = NullPointerException.class) + public void testSetAlgorithmNull() { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + reasoner.setAlgorithm(null); + } + } + + @Test(expected = NullPointerException.class) + public void setRuleRewriteStrategy1() { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + reasoner.setRuleRewriteStrategy(null); + } + } + + @Test + public void testLoadRules() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleBxAx, ruleCxBx, ruleBxAx); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + assertEquals(Arrays.asList(ruleBxAx, ruleCxBx), kb.getRules()); + } + } + + @Test + public void testSimpleInference() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleBxAx, ruleCxBx, factAc, factAd); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + + final QueryResultIterator cxQueryResultEnumBeforeReasoning = reasoner.answerQuery(atomCx, true); + assertFalse(cxQueryResultEnumBeforeReasoning.hasNext()); + + reasoner.reason(); + + final QueryResultIterator cxQueryResultEnumAfterReasoning = reasoner.answerQuery(atomCx, true); + final Set> actualResults = QueryResultsUtils + .collectQueryResults(cxQueryResultEnumAfterReasoning); + + final Set> expectedResults = new HashSet<>( + Arrays.asList(Arrays.asList(constantC), Arrays.asList(constantD))); + + assertEquals(expectedResults, actualResults); + } + } + +} diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCombinedInputs.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCombinedInputs.java new file mode 100644 index 000000000..17f5eac18 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCombinedInputs.java @@ -0,0 +1,161 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +import static org.junit.Assert.assertEquals; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.DataSourceDeclaration; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; + +public class VLogReasonerCombinedInputs { + + final Variable vx = Expressions.makeUniversalVariable("x"); + final Predicate q = Expressions.makePredicate("q", 1); + final Rule rulePimpliesQ = Expressions.makeRule(Expressions.makePositiveLiteral("q", vx), + Expressions.makePositiveLiteral("p", vx)); + + final Fact factQc = Expressions.makeFact(q, Arrays.asList(Expressions.makeAbstractConstant("c"))); + final Fact factQc1 = Expressions.makeFact(q, Arrays.asList(Expressions.makeAbstractConstant("c1"))); + final Fact factQc2 = Expressions.makeFact(q, Arrays.asList(Expressions.makeAbstractConstant("c2"))); + final Fact factQd = Expressions.makeFact(q, Arrays.asList(Expressions.makeAbstractConstant("d"))); + final Fact factPd = Expressions.makeFact("p", Arrays.asList(Expressions.makeAbstractConstant("d"))); + final PositiveLiteral queryQx = Expressions.makePositiveLiteral(q, + Arrays.asList(Expressions.makeUniversalVariable("x"))); + + final Set> resultsCC1C2D = new HashSet<>( + Arrays.asList(Collections.singletonList(Expressions.makeAbstractConstant("c")), + Collections.singletonList(Expressions.makeAbstractConstant("c1")), + Collections.singletonList(Expressions.makeAbstractConstant("c2")), + Collections.singletonList(Expressions.makeAbstractConstant("d")))); + + final DataSourceDeclaration qFromCsv; + final DataSourceDeclaration qCDFromCsv; + + public VLogReasonerCombinedInputs() throws IOException { + qFromCsv = new DataSourceDeclarationImpl(q, new CsvFileDataSource( + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedUnaryCsvFileRoot + ".csv")); + qCDFromCsv = new DataSourceDeclarationImpl(q, + new CsvFileDataSource(FileDataSourceTestUtils.INPUT_FOLDER + "unaryFactsCD.csv")); + } + + @Test + public void samePredicateSourceFactRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(qFromCsv, factQc, factPd, rulePimpliesQ); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.reason(); + final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryQx, true); + final Set> queryResult = QueryResultsUtils.collectQueryResults(queryResultIterator); + assertEquals(resultsCC1C2D, queryResult); + } + } + + @Test + public void samePredicateFactSourceRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factQc, factPd, qFromCsv, rulePimpliesQ); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.reason(); + final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryQx, true); + final Set> queryResult = QueryResultsUtils.collectQueryResults(queryResultIterator); + assertEquals(resultsCC1C2D, queryResult); + } + } + + @Test + public void samePredicateRuleFactSource() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(rulePimpliesQ, factQc, factPd, qFromCsv); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.reason(); + final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryQx, true); + final Set> queryResult = QueryResultsUtils.collectQueryResults(queryResultIterator); + assertEquals(resultsCC1C2D, queryResult); + } + } + + @Test + public void samePredicateSourceSource() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(qFromCsv, qCDFromCsv); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.reason(); + final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryQx, true); + final Set> queryResult = QueryResultsUtils.collectQueryResults(queryResultIterator); + assertEquals(resultsCC1C2D, queryResult); + } + } + + @Test + public void samePredicateSourceFactFact() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(qFromCsv, factQc, factQd); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.reason(); + final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryQx, true); + final Set> queryResult = QueryResultsUtils.collectQueryResults(queryResultIterator); + assertEquals(resultsCC1C2D, queryResult); + } + } + + @Test + public void samePredicateFactsRule() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(factPd, factQc, factQc1, factQc2, rulePimpliesQ); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.reason(); + final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryQx, true); + final Set> queryResult = QueryResultsUtils.collectQueryResults(queryResultIterator); + assertEquals(resultsCC1C2D, queryResult); + } + } + +} diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCsvInput.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCsvInput.java new file mode 100644 index 000000000..b5b365174 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCsvInput.java @@ -0,0 +1,133 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; + +import java.io.File; +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Set; + +import org.junit.Test; +import org.mockito.internal.util.collections.Sets; +import org.semanticweb.rulewerk.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.rulewerk.core.exceptions.ReasonerStateException; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; + +public class VLogReasonerCsvInput { + + private static final Predicate unaryPredicate1 = Expressions.makePredicate("p", 1); + private static final Predicate unaryPredicate2 = Expressions.makePredicate("q", 1); + + private final Variable x = Expressions.makeUniversalVariable("x"); + private final Constant c1 = Expressions.makeAbstractConstant("c1"); + private final Constant c2 = Expressions.makeAbstractConstant("c2"); + + @SuppressWarnings("unchecked") + private final Set> expectedUnaryQueryResult = Sets.newSet(Arrays.asList(this.c1), + Arrays.asList(this.c2)); + + @Test + public void testLoadEmptyCsvFile() throws IOException { + final PositiveLiteral queryAtom = Expressions.makePositiveLiteral(unaryPredicate1, this.x); + + FileDataSourceTestUtils.testLoadEmptyFile(unaryPredicate1, queryAtom, + new CsvFileDataSource(FileDataSourceTestUtils.INPUT_FOLDER + "empty.csv")); + FileDataSourceTestUtils.testLoadEmptyFile(unaryPredicate1, queryAtom, + new CsvFileDataSource(FileDataSourceTestUtils.INPUT_FOLDER + "empty.csv.gz")); + } + + @Test + public void testLoadUnaryFactsFromCsvFile() throws IOException { + testLoadUnaryFactsFromSingleCsvDataSource(new CsvFileDataSource( + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedUnaryCsvFileRoot + ".csv")); + testLoadUnaryFactsFromSingleCsvDataSource(new CsvFileDataSource( + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.zippedUnaryCsvFileRoot + ".csv.gz")); + } + + private void testLoadUnaryFactsFromSingleCsvDataSource(final FileDataSource fileDataSource) throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(new DataSourceDeclarationImpl(unaryPredicate1, fileDataSource)); + kb.addStatement(new DataSourceDeclarationImpl(unaryPredicate2, fileDataSource)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + + final QueryResultIterator queryResultIterator1 = reasoner + .answerQuery(Expressions.makePositiveLiteral(unaryPredicate1, this.x), true); + final Set> queryResult1 = QueryResultsUtils.collectQueryResults(queryResultIterator1); + final QueryResultIterator queryResultIterator2 = reasoner + .answerQuery(Expressions.makePositiveLiteral(unaryPredicate2, this.x), true); + final Set> queryResult2 = QueryResultsUtils.collectQueryResults(queryResultIterator2); + + assertEquals(this.expectedUnaryQueryResult, queryResult1); + assertEquals(this.expectedUnaryQueryResult, queryResult2); + } + } + + /** + * Tries to add a {@code CsvFileDataSource} from a file that does not exist on + * disk. + * + * @throws IOException + * @throws ReasonerStateException + * @throws EdbIdbSeparationException + * @throws IncompatiblePredicateArityException + */ + @Test(expected = IOException.class) + public void testLoadNonexistingCsvFile() throws IOException { + final File nonexistingFile = new File("nonexistingFile.csv"); + assertFalse(nonexistingFile.exists()); + final FileDataSource fileDataSource = new CsvFileDataSource(nonexistingFile.getName()); + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(new DataSourceDeclarationImpl(unaryPredicate1, fileDataSource)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + } + } + + @Test(expected = IncompatiblePredicateArityException.class) + public void testLoadCsvFileWrongArity() throws IOException { + final FileDataSource fileDataSource = new CsvFileDataSource( + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.binaryCsvFileNameRoot + ".csv"); + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(new DataSourceDeclarationImpl(unaryPredicate1, fileDataSource)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + } + } + +} diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCsvOutput.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCsvOutput.java new file mode 100644 index 000000000..45981bb6c --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerCsvOutput.java @@ -0,0 +1,153 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Arrays; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; + +public class VLogReasonerCsvOutput { + + private final static String nonExistingFilePath = FileDataSourceTestUtils.OUTPUT_FOLDER + "empty.csv"; + + @Test + public void testEDBQuerySameConstantSubstitutesSameVariableName() throws IOException { + final String predicate = "p"; + final Constant constantC = Expressions.makeAbstractConstant("c"); + final Constant constantD = Expressions.makeAbstractConstant("d"); + final Variable x = Expressions.makeUniversalVariable("X"); + final Variable y = Expressions.makeUniversalVariable("Y"); + final Variable z = Expressions.makeUniversalVariable("Z"); + final Fact fact = Expressions.makeFact(predicate, Arrays.asList(constantC, constantC, constantD)); + + final boolean includeBlanks = false; + // final String csvFilePath = CSV_EXPORT_FOLDER + "output"; + final List> factCCD = Arrays.asList(Arrays.asList("c", "c", "d")); + + final KnowledgeBase kb = new KnowledgeBase(); + + kb.addStatement(fact); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + + final PositiveLiteral queryAtomXYZ = Expressions.makePositiveLiteral(predicate, x, y, z); + final String csvFilePathXYZ = FileDataSourceTestUtils.OUTPUT_FOLDER + "outputXYZ.csv"; + reasoner.exportQueryAnswersToCsv(queryAtomXYZ, csvFilePathXYZ, includeBlanks); + final List> csvContentXYZ = FileDataSourceTestUtils.getCSVContent(csvFilePathXYZ); + assertEquals(factCCD, csvContentXYZ); + + final PositiveLiteral queryAtomXXZ = Expressions.makePositiveLiteral(predicate, x, x, z); + final String csvFilePathXXZ = FileDataSourceTestUtils.OUTPUT_FOLDER + "outputXXZ.csv"; + reasoner.exportQueryAnswersToCsv(queryAtomXXZ, csvFilePathXXZ, includeBlanks); + final List> csvContentXXZ = FileDataSourceTestUtils.getCSVContent(csvFilePathXXZ); + assertEquals(factCCD, csvContentXXZ); + + final PositiveLiteral queryAtomXXX = Expressions.makePositiveLiteral(predicate, x, x, x); + final String csvFilePathXXX = FileDataSourceTestUtils.OUTPUT_FOLDER + "outputXXX.csv"; + reasoner.exportQueryAnswersToCsv(queryAtomXXX, csvFilePathXXX, includeBlanks); + final List> csvContentXXX = FileDataSourceTestUtils.getCSVContent(csvFilePathXXX); + assertTrue(csvContentXXX.isEmpty()); + + final PositiveLiteral queryAtomXYX = Expressions.makePositiveLiteral(predicate, x, y, x); + final String csvFilePathXYX = FileDataSourceTestUtils.OUTPUT_FOLDER + "outputXYX.csv"; + reasoner.exportQueryAnswersToCsv(queryAtomXYX, csvFilePathXYX, includeBlanks); + final List> csvContentXYX = FileDataSourceTestUtils.getCSVContent(csvFilePathXYX); + assertTrue(csvContentXYX.isEmpty()); + } + + } + + @Test + public void testExportQueryEmptyKnowledgeBaseBeforeReasoningIncludeBlanks() throws IOException { + final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeUniversalVariable("?x"), + Expressions.makeUniversalVariable("?y")); + + final KnowledgeBase kb = new KnowledgeBase(); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.exportQueryAnswersToCsv(queryAtom, nonExistingFilePath, true); + } + assertFalse(Files.exists(Paths.get(nonExistingFilePath))); + } + + @Test + public void testExportQueryEmptyKnowledgeBaseBeforeReasoningExcludeBlanks() throws IOException { + final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeUniversalVariable("?x"), + Expressions.makeUniversalVariable("?y")); + + final KnowledgeBase kb = new KnowledgeBase(); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + + reasoner.exportQueryAnswersToCsv(queryAtom, nonExistingFilePath, false); + } + assertFalse(Files.exists(Paths.get(nonExistingFilePath))); + } + + @Test + public void testExportQueryEmptyKnowledgeBaseAfterReasoningIncludeBlanks() throws IOException { + final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeUniversalVariable("?x"), + Expressions.makeUniversalVariable("?y")); + + final KnowledgeBase kb = new KnowledgeBase(); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.reason(); + + reasoner.exportQueryAnswersToCsv(queryAtom, nonExistingFilePath, true); + } + assertFalse(Files.exists(Paths.get(nonExistingFilePath))); + } + + public void testExportQueryEmptyKnowledgeBaseAfterReasoningExcludeBlanks() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + + final PositiveLiteral queryAtom = Expressions.makePositiveLiteral("p", Expressions.makeUniversalVariable("?x"), + Expressions.makeUniversalVariable("?y")); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.reason(); + + reasoner.exportQueryAnswersToCsv(queryAtom, nonExistingFilePath, false); + } + assertFalse(Files.exists(Paths.get(nonExistingFilePath))); + } + +} diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerNegation.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerNegation.java new file mode 100644 index 000000000..215590006 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerNegation.java @@ -0,0 +1,121 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.Arrays; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Literal; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; + +public class VLogReasonerNegation { + + private final Variable x = Expressions.makeUniversalVariable("x"); + private final Variable y = Expressions.makeUniversalVariable("y"); + + private final Constant c = Expressions.makeAbstractConstant("c"); + private final Constant d = Expressions.makeAbstractConstant("d"); + private final Constant e = Expressions.makeAbstractConstant("e"); + private final Constant f = Expressions.makeAbstractConstant("f"); + + private final Literal pXY = Expressions.makePositiveLiteral("P", x, y); + private final Literal notQXY = Expressions.makeNegativeLiteral("Q", x, y); + + private final Literal notRXY = Expressions.makeNegativeLiteral("R", x, y); + private final PositiveLiteral sXY = Expressions.makePositiveLiteral("S", x, y); + + private final Fact pCD = Expressions.makeFact("P", Arrays.asList(c, d)); + private final Fact pEF = Expressions.makeFact("P", Arrays.asList(e, f)); + private final Fact qCD = Expressions.makeFact("Q", Arrays.asList(c, d)); + + @Test(expected = RulewerkRuntimeException.class) + public void testNotStratifiable() throws IOException { + + final PositiveLiteral qXY = Expressions.makePositiveLiteral("Q", x, y); + + final Rule rule = Expressions.makeRule(qXY, pXY, notQXY); + + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(rule); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.reason(); + } + } + + @Test + public void testStratifiable() throws IOException { + + final Rule rule = Expressions.makeRule(sXY, pXY, notQXY, notRXY); + + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(rule, pCD, pEF, qCD); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.reason(); + + try (QueryResultIterator result = reasoner.answerQuery(sXY, true)) { + assertTrue(result.hasNext()); + final QueryResult answer = result.next(); + assertEquals(answer.getTerms(), Arrays.asList(e, f)); + assertFalse(result.hasNext()); + } + } + } + + @Test + public void testInputNegation() throws IOException { + + final Rule rule = Expressions.makeRule(sXY, pXY, notQXY); + + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(rule, pCD, pEF, qCD); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.reason(); + + try (QueryResultIterator result = reasoner.answerQuery(sXY, true)) { + assertTrue(result.hasNext()); + final QueryResult answer = result.next(); + assertEquals(answer.getTerms(), Arrays.asList(e, f)); + assertFalse(result.hasNext()); + } + } + } + +} diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerRdfInput.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerRdfInput.java new file mode 100644 index 000000000..b5d068ab6 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerRdfInput.java @@ -0,0 +1,160 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; + +import java.io.File; +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Set; + +import org.junit.Ignore; +import org.junit.Test; +import org.mockito.internal.util.collections.Sets; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.FileDataSource; +import org.semanticweb.rulewerk.core.reasoner.implementation.RdfFileDataSource; + +public class VLogReasonerRdfInput { + + private static final Predicate ternaryPredicate = Expressions.makePredicate("triple", 3); + private static final PositiveLiteral queryAtom = Expressions.makePositiveLiteral(ternaryPredicate, + Expressions.makeUniversalVariable("s"), Expressions.makeUniversalVariable("p"), + Expressions.makeUniversalVariable("o")); + private static final PositiveLiteral queryAtomString = Expressions.makePositiveLiteral(ternaryPredicate, + Expressions.makeUniversalVariable("s"), Expressions.makeUniversalVariable("p"), + Expressions.makeDatatypeConstant("test string", PrefixDeclarationRegistry.XSD_STRING)); + + @SuppressWarnings("unchecked") + private static final Set> expectedTernaryQueryResult = Sets.newSet( + Arrays.asList(Expressions.makeAbstractConstant("http://example.org/c1"), + Expressions.makeAbstractConstant("http://example.org/p"), + Expressions.makeAbstractConstant("http://example.org/c2")), + Arrays.asList(Expressions.makeAbstractConstant("http://example.org/c1"), + Expressions.makeAbstractConstant("http://example.org/q"), + Expressions.makeDatatypeConstant("test string", "http://www.w3.org/2001/XMLSchema#string"))); + @SuppressWarnings("unchecked") + private static final Set> expectedTernaryQueryResultString = Sets + .newSet(Arrays.asList(Expressions.makeAbstractConstant("http://example.org/c1"), + Expressions.makeAbstractConstant("http://example.org/q"), + Expressions.makeDatatypeConstant("test string", "http://www.w3.org/2001/XMLSchema#string"))); + + @Ignore + // TODO test fails for now, because of a VLog bug. Remove the @Ignore annotation + // after VLog bug is fixed. + @Test + public void testLoadEmptyRdfFile() throws IOException { + FileDataSourceTestUtils.testLoadEmptyFile(ternaryPredicate, queryAtom, + new RdfFileDataSource(FileDataSourceTestUtils.INPUT_FOLDER + "empty.nt")); + } + + @Ignore + // TODO test fails for now, because of a VLog bug. Remove the @Ignore annotation + // after VLog bug is fixed. + @Test + public void testLoadEmptyRdfFileGz() throws IOException { + FileDataSourceTestUtils.testLoadEmptyFile(ternaryPredicate, queryAtom, + new RdfFileDataSource(FileDataSourceTestUtils.INPUT_FOLDER + "empty.nt.gz")); + } + + @Test + public void testLoadTernaryFactsFromRdfFile() throws IOException { + testLoadTernaryFactsFromSingleRdfDataSource(new RdfFileDataSource( + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedNtFileRoot + ".nt")); + } + + @Test + public void queryStringFromRdf_succeeds() throws IOException { + testQueryStringFromSingleRdfDataSource(new RdfFileDataSource( + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.unzippedNtFileRoot + ".nt")); + } + + @Test + public void testLoadTernaryFactsFromRdfFileGz() throws IOException { + testLoadTernaryFactsFromSingleRdfDataSource(new RdfFileDataSource( + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.zippedNtFileRoot + ".nt.gz")); + } + + public void testLoadTernaryFactsFromSingleRdfDataSource(final FileDataSource fileDataSource) throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(new DataSourceDeclarationImpl(ternaryPredicate, fileDataSource)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + + final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtom, true); + final Set> queryResult = QueryResultsUtils.collectQueryResults(queryResultIterator); + + assertEquals(expectedTernaryQueryResult, queryResult); + } + } + + public void testQueryStringFromSingleRdfDataSource(final FileDataSource fileDataSource) throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(new DataSourceDeclarationImpl(ternaryPredicate, fileDataSource)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + + final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtomString, true); + final Set> queryResult = QueryResultsUtils.collectQueryResults(queryResultIterator); + + assertEquals(expectedTernaryQueryResultString, queryResult); + } + } + + @Test(expected = IOException.class) + public void testLoadNonexistingRdfFile() throws IOException { + final File nonexistingFile = new File("nonexistingFile.nt"); + assertFalse(nonexistingFile.exists()); + final FileDataSource fileDataSource = new RdfFileDataSource(nonexistingFile.getName()); + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(new DataSourceDeclarationImpl(ternaryPredicate, fileDataSource)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + } + } + + @Test + public void testLoadRdfInvalidFormat() throws IOException { + final FileDataSource fileDataSource = new RdfFileDataSource( + FileDataSourceTestUtils.INPUT_FOLDER + FileDataSourceTestUtils.invalidFormatNtFileNameRoot + ".nt"); + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(new DataSourceDeclarationImpl(ternaryPredicate, fileDataSource)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + FileDataSourceTestUtils.testNoFactsOverPredicate(reasoner, queryAtom); + } + } + +} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerSparqlInput.java similarity index 50% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerSparqlInput.java index d729bdc55..7fd6c34ad 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromSparqlQueryTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerSparqlInput.java @@ -1,13 +1,10 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * VLog4j Core Components + * Rulewerk VLog Reasoner Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -23,6 +20,9 @@ * #L% */ +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + import java.io.IOException; import java.net.URL; import java.util.Arrays; @@ -30,42 +30,46 @@ import org.junit.Ignore; import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; - -public class LoadDataFromSparqlQueryTest { +import org.semanticweb.rulewerk.core.exceptions.IncompatiblePredicateArityException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; + +public class VLogReasonerSparqlInput { /** * Tests the query "SELECT ?b ?a WHERE {?a p:P22 ?b}" - * + * * @throws ReasonerStateException * @throws EdbIdbSeparationException * @throws IOException * @throws IncompatiblePredicateArityException + * @throws QueryPredicateNonExistentException */ @Ignore // Ignored during CI because it makes lengthy calls to remote servers @Test - public void testSimpleSparqlQuery() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testSimpleSparqlQuery() throws IOException { final URL endpoint = new URL("http://query.wikidata.org/sparql"); final LinkedHashSet queryVariables = new LinkedHashSet<>( - Arrays.asList(Expressions.makeVariable("b"), Expressions.makeVariable("a"))); + Arrays.asList(Expressions.makeUniversalVariable("b"), Expressions.makeUniversalVariable("a"))); final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(endpoint, queryVariables, // a has father b - "?a p:P22 ?b"); + "?a wdt:P22 ?b"); final Predicate fatherOfPredicate = Expressions.makePredicate("FatherOf", 2); + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(new DataSourceDeclarationImpl(fatherOfPredicate, dataSource)); - try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.addFactsFromDataSource(fatherOfPredicate, dataSource); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makeAtom(fatherOfPredicate, - Expressions.makeVariable("x"), Expressions.makeVariable("y")), false)) { + try (final QueryResultIterator answerQuery = reasoner + .answerQuery(Expressions.makePositiveLiteral(fatherOfPredicate, + Expressions.makeUniversalVariable("x"), Expressions.makeUniversalVariable("y")), false)) { assertTrue(answerQuery.hasNext()); final QueryResult firstAnswer = answerQuery.next(); @@ -76,21 +80,22 @@ public void testSimpleSparqlQuery() @Ignore // Ignored during CI because it makes lengthy calls to remote servers @Test - public void testSimpleSparqlQueryHttps() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testSimpleSparqlQueryHttps() throws IOException { final URL endpoint = new URL("https://query.wikidata.org/sparql"); final LinkedHashSet queryVariables = new LinkedHashSet<>( - Arrays.asList(Expressions.makeVariable("b"), Expressions.makeVariable("a"))); + Arrays.asList(Expressions.makeUniversalVariable("b"), Expressions.makeUniversalVariable("a"))); final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(endpoint, queryVariables, // a has father b - "?a p:P22 ?b"); + "?a wdt:P22 ?b"); final Predicate fatherOfPredicate = Expressions.makePredicate("FatherOf", 2); + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(new DataSourceDeclarationImpl(fatherOfPredicate, dataSource)); - try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.addFactsFromDataSource(fatherOfPredicate, dataSource); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makeAtom(fatherOfPredicate, - Expressions.makeVariable("x"), Expressions.makeVariable("y")), false)) { + try (final QueryResultIterator answerQuery = reasoner + .answerQuery(Expressions.makePositiveLiteral(fatherOfPredicate, + Expressions.makeUniversalVariable("x"), Expressions.makeUniversalVariable("y")), false)) { assertTrue(answerQuery.hasNext()); final QueryResult firstAnswer = answerQuery.next(); @@ -101,29 +106,31 @@ public void testSimpleSparqlQueryHttps() /** * Tests the query "SELECT ?b ?a WHERE {?a p:P22 ?b .}" - * + * * @throws ReasonerStateException * @throws EdbIdbSeparationException * @throws IOException * @throws IncompatiblePredicateArityException + * @throws QueryPredicateNonExistentException */ @Ignore // Ignored during CI because it makes lengthy calls to remote servers @Test - public void testSimpleSparqlQuery2() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testSimpleSparqlQuery2() throws IOException { final URL endpoint = new URL("https://query.wikidata.org/sparql"); final LinkedHashSet queryVariables = new LinkedHashSet<>( - Arrays.asList(Expressions.makeVariable("b"), Expressions.makeVariable("a"))); + Arrays.asList(Expressions.makeUniversalVariable("b"), Expressions.makeUniversalVariable("a"))); final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(endpoint, queryVariables, // a has father b - "?a p:P22 ?b ."); + "?a wdt:P22 ?b ."); final Predicate fatherOfPredicate = Expressions.makePredicate("FatherOf", 2); + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(new DataSourceDeclarationImpl(fatherOfPredicate, dataSource)); - try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.addFactsFromDataSource(fatherOfPredicate, dataSource); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makeAtom(fatherOfPredicate, - Expressions.makeVariable("x"), Expressions.makeVariable("y")), false)) { + try (final QueryResultIterator answerQuery = reasoner + .answerQuery(Expressions.makePositiveLiteral(fatherOfPredicate, + Expressions.makeUniversalVariable("x"), Expressions.makeUniversalVariable("y")), false)) { assertTrue(answerQuery.hasNext()); } @@ -131,42 +138,45 @@ public void testSimpleSparqlQuery2() } @Ignore // Ignored during CI because it makes lengthy calls to remote servers - @Test(expected = RuntimeException.class) - public void testConjunctiveQueryNewLineCharacterInQueryBody() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + @Test(expected = RulewerkRuntimeException.class) + public void testConjunctiveQueryNewLineCharacterInQueryBody() throws IOException { final URL endpoint = new URL("https://query.wikidata.org/sparql"); final LinkedHashSet queryVariables = new LinkedHashSet<>( - Arrays.asList(Expressions.makeVariable("a"), Expressions.makeVariable("c"))); + Arrays.asList(Expressions.makeUniversalVariable("a"), Expressions.makeUniversalVariable("c"))); final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(endpoint, queryVariables, // b has father a and b has mother c - "?b p:P22 ?a .\n" + "?b p:P25 ?c"); + "?b wdt:P22 ?a .\n" + "?b wdt:P25 ?c"); final Predicate haveChildrenTogether = Expressions.makePredicate("haveChildrenTogether", 2); + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(new DataSourceDeclarationImpl(haveChildrenTogether, dataSource)); - try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.addFactsFromDataSource(haveChildrenTogether, dataSource); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - reasoner.answerQuery(Expressions.makeAtom(haveChildrenTogether, Expressions.makeVariable("x"), - Expressions.makeVariable("y")), false); + reasoner.answerQuery(Expressions.makePositiveLiteral(haveChildrenTogether, + Expressions.makeUniversalVariable("x"), Expressions.makeUniversalVariable("y")), false); } } @Ignore // Ignored during CI because it makes lengthy calls to remote servers @Test - public void testConjunctiveQuery() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testConjunctiveQuery() throws IOException { final URL endpoint = new URL("https://query.wikidata.org/sparql"); final LinkedHashSet queryVariables = new LinkedHashSet<>( - Arrays.asList(Expressions.makeVariable("a"), Expressions.makeVariable("c"))); + Arrays.asList(Expressions.makeUniversalVariable("a"), Expressions.makeUniversalVariable("c"))); final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(endpoint, queryVariables, // b has father a and b has mother c - "?b p:P22 ?a ." + "?b p:P25 ?c"); + "?b wdt:P22 ?a ." + "?b wdt:P25 ?c"); final Predicate haveChildrenTogether = Expressions.makePredicate("haveChildrenTogether", 2); + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(new DataSourceDeclarationImpl(haveChildrenTogether, dataSource)); - try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.addFactsFromDataSource(haveChildrenTogether, dataSource); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); - try (final QueryResultIterator answerQuery = reasoner.answerQuery(Expressions.makeAtom(haveChildrenTogether, - Expressions.makeVariable("x"), Expressions.makeVariable("y")), false)) { + try (final QueryResultIterator answerQuery = reasoner + .answerQuery( + Expressions.makePositiveLiteral(haveChildrenTogether, + Expressions.makeUniversalVariable("x"), Expressions.makeUniversalVariable("y")), + false)) { assertTrue(answerQuery.hasNext()); } @@ -174,19 +184,18 @@ public void testConjunctiveQuery() } @Test(expected = IncompatiblePredicateArityException.class) - public void testDataSourcePredicateDoesNotMatchSparqlQueryTerms() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { + public void testDataSourcePredicateDoesNotMatchSparqlQueryTerms() throws IOException { final URL endpoint = new URL("https://query.wikidata.org/sparql"); final LinkedHashSet queryVariables = new LinkedHashSet<>( - Arrays.asList(Expressions.makeVariable("b"), Expressions.makeVariable("a"))); + Arrays.asList(Expressions.makeUniversalVariable("b"), Expressions.makeUniversalVariable("a"))); final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(endpoint, queryVariables, // b has father a and b has mother c - "?b p:P22 ?a ." + "?b p:P25 ?c"); + "?b wdt:P22 ?a ." + "?b wdt:P25 ?c"); + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("ternary", 3), dataSource)); - try (final Reasoner reasoner = Reasoner.getInstance()) { - // TODO must validate predicate arity sonner - reasoner.addFactsFromDataSource(Expressions.makePredicate("ternary", 3), dataSource); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { reasoner.load(); } } diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerStateTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerStateTest.java new file mode 100644 index 000000000..0ea81efbb --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerStateTest.java @@ -0,0 +1,487 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.exceptions.ReasonerStateException; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.Algorithm; +import org.semanticweb.rulewerk.core.reasoner.Correctness; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.ReasonerState; +import org.semanticweb.rulewerk.core.reasoner.implementation.CsvFileDataSource; + +public class VLogReasonerStateTest { + + private static final Predicate p = Expressions.makePredicate("p", 1); + private static final Predicate q = Expressions.makePredicate("q", 1); + private static final Variable x = Expressions.makeUniversalVariable("x"); + private static final Constant c = Expressions.makeAbstractConstant("c"); + private static final Constant d = Expressions.makeAbstractConstant("d"); + private static final PositiveLiteral exampleQueryAtom = Expressions.makePositiveLiteral("q", x); + + private static final PositiveLiteral ruleHeadQx = Expressions.makePositiveLiteral(q, x); + private static final PositiveLiteral ruleBodyPx = Expressions.makePositiveLiteral(p, x); + private static final Rule ruleQxPx = Expressions.makeRule(ruleHeadQx, ruleBodyPx); + private static final Fact factPc = Expressions.makeFact(p, c); + private static final Fact factPd = Expressions.makeFact(p, d); + + @Test(expected = ReasonerStateException.class) + public void testFailAnswerQueryBeforeLoad() { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + reasoner.answerQuery(exampleQueryAtom, true); + } + } + + @Test(expected = ReasonerStateException.class) + public void testFailExportQueryAnswersBeforeLoad() throws IOException { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + reasoner.exportQueryAnswersToCsv(exampleQueryAtom, "", true); + } + } + + @Test(expected = ReasonerStateException.class) + public void testFailAnswerQueryAfterReset() throws IOException { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + reasoner.reason(); + reasoner.resetReasoner(); + reasoner.answerQuery(exampleQueryAtom, true); + } + } + + @Test(expected = ReasonerStateException.class) + public void testFailExportQueryAnswersAfterReset() throws IOException { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + reasoner.reason(); + reasoner.resetReasoner(); + reasoner.exportQueryAnswersToCsv(exampleQueryAtom, "", true); + } + } + + @Test + public void testAddFactsAndQuery() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + + kb.addStatement(factPc); + reasoner.load(); + + final PositiveLiteral query = Expressions.makePositiveLiteral(p, x); + final Set> expectedAnswersC = new HashSet<>(Arrays.asList(Collections.singletonList(c))); + + try (final QueryResultIterator queryResult = reasoner.answerQuery(query, true)) { + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); + final Set> queryAnswersC = QueryResultsUtils.collectQueryResults(queryResult); + + assertEquals(expectedAnswersC, queryAnswersC); + } + + reasoner.getKnowledgeBase().addStatement(factPd); + + try (final QueryResultIterator queryResult = reasoner.answerQuery(query, true)) { + assertEquals(Correctness.INCORRECT, queryResult.getCorrectness()); + assertEquals(expectedAnswersC, QueryResultsUtils.collectQueryResults(queryResult)); + } + + reasoner.load(); + + try (final QueryResultIterator queryResult = reasoner.answerQuery(query, true)) { + assertEquals(Correctness.SOUND_AND_COMPLETE, queryResult.getCorrectness()); + + final Set> queryAnswersD = QueryResultsUtils.collectQueryResults(queryResult); + + final Set> expectedAnswersCD = new HashSet<>( + Arrays.asList(Collections.singletonList(c), Collections.singletonList(d))); + assertEquals(expectedAnswersCD, queryAnswersD); + } + } + } + + @Test + public void testAddRules2() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(ruleQxPx); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.resetReasoner(); + } + } + + @Test(expected = NullPointerException.class) + public void testAddRules3() { + final KnowledgeBase kb = new KnowledgeBase(); + final List rules = new ArrayList<>(); + rules.add(ruleQxPx); + rules.add(null); + kb.addStatements(rules); + } + + @Test(expected = NullPointerException.class) + public void testAddFacts2() throws IOException { + + final KnowledgeBase kb = new KnowledgeBase(); + final List facts = new ArrayList<>(); + facts.add(factPc); + facts.add(null); + kb.addStatements(facts); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + } + } + + @Test + public void testResetBeforeLoad() { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + reasoner.resetReasoner(); + } + } + + @Test + public void testResetDiscardInferences() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc); + + for (final Algorithm algorithm : Algorithm.values()) { + // discard inferences regardless of the inference algorithm + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.setAlgorithm(algorithm); + + reasoner.load(); + reasoner.reason(); + try (final QueryResultIterator queryQxIterator = reasoner.answerQuery(ruleHeadQx, true)) { + final Set> queryQxResults = QueryResultsUtils.collectQueryResults(queryQxIterator); + final Set> queryQxExpectedResults = new HashSet<>(); + queryQxExpectedResults.add(Arrays.asList(c)); + assertEquals(queryQxResults, queryQxExpectedResults); + } + + reasoner.resetReasoner(); + reasoner.load(); + try (final QueryResultIterator queryQxIterator = reasoner.answerQuery(ruleHeadQx, true)) { + final Set> queryQxResults = QueryResultsUtils.collectQueryResults(queryQxIterator); + assertTrue(queryQxResults.isEmpty()); + } + try (final QueryResultIterator queryPxIterator = reasoner.answerQuery(ruleBodyPx, true)) { + final Set> queryPxResults = QueryResultsUtils.collectQueryResults(queryPxIterator); + final Set> queryPxExpectedResults = new HashSet<>(); + queryPxExpectedResults.add(Arrays.asList(c)); + assertEquals(queryPxResults, queryPxExpectedResults); + } + } + } + } + + @Test + public void testResetKeepExplicitDatabase() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(ruleQxPx); + // assert p(c) + kb.addStatement(factPc); + // assert r(d) + final Predicate predicateR1 = Expressions.makePredicate("r", 1); + kb.addStatement(new DataSourceDeclarationImpl(predicateR1, + new CsvFileDataSource(FileDataSourceTestUtils.INPUT_FOLDER + "constantD.csv"))); + // p(?x) -> q(?x) + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + checkExplicitFacts(reasoner, predicateR1); + + reasoner.resetReasoner(); + reasoner.load(); + checkExplicitFacts(reasoner, predicateR1); + + // check rule exists in knowledge base after reset + reasoner.reason(); + try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(ruleHeadQx, true)) { + assertTrue(queryResultIterator.hasNext()); + assertEquals(Arrays.asList(c), queryResultIterator.next().getTerms()); + assertFalse(queryResultIterator.hasNext()); + } + + } + } + + private void checkExplicitFacts(final Reasoner reasoner, final Predicate predicateR1) { + try (final QueryResultIterator queryResultIteratorPx = reasoner.answerQuery(ruleBodyPx, true)) { + assertTrue(queryResultIteratorPx.hasNext()); + assertEquals(factPc.getArguments(), queryResultIteratorPx.next().getTerms()); + assertFalse(queryResultIteratorPx.hasNext()); + } + try (final QueryResultIterator queryResultIteratorRx = reasoner + .answerQuery(Expressions.makePositiveLiteral(predicateR1, x), true)) { + assertTrue(queryResultIteratorRx.hasNext()); + assertEquals(Arrays.asList(Expressions.makeAbstractConstant("d")), queryResultIteratorRx.next().getTerms()); + assertFalse(queryResultIteratorRx.hasNext()); + } + } + + @Test + public void testResetEmptyKnowledgeBase() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + // 1. load and reason + reasoner.load(); + reasoner.reason(); + reasoner.resetReasoner(); + + // 2. load again + reasoner.load(); + reasoner.resetReasoner(); + + // 3. load and reason again + reasoner.load(); + reasoner.reason(); + reasoner.close(); + } + } + + @Test(expected = ReasonerStateException.class) + public void testFailExportQueryAnswerToCsvBeforeLoad() throws IOException { + try (final Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + reasoner.exportQueryAnswersToCsv(exampleQueryAtom, FileDataSourceTestUtils.OUTPUT_FOLDER + "output.csv", + true); + } + } + + @Test + public void testSuccessiveCloseAfterLoad() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + reasoner.close(); + reasoner.close(); + } + } + + @Test(expected = ReasonerStateException.class) + public void testSuccessiveCloseBeforeLoad() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.close(); + reasoner.close(); + reasoner.load(); + } + } + + @Test + public void testCloseRepeatedly() throws IOException { + try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + reasoner.close(); + } + + try (final VLogReasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + reasoner.load(); + reasoner.close(); + reasoner.close(); + } + } + + @Test + public void testStatementRemovalBeforeLoad() { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + kb.removeStatement(ruleQxPx); + assertEquals(ReasonerState.KB_NOT_LOADED, reasoner.getReasonerState()); + } + } + + @Test + public void testStatementsListRemovalBeforeLoad() { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + kb.removeStatements(Arrays.asList(factPc, factPd)); + assertEquals(ReasonerState.KB_NOT_LOADED, reasoner.getReasonerState()); + } + } + + @Test + public void testStatementsArrayRemovalBeforeLoad() { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + kb.removeStatements(factPc, factPd); + assertEquals(ReasonerState.KB_NOT_LOADED, reasoner.getReasonerState()); + } + } + + @Test + public void testStatementRemovalAfterLoad() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + kb.removeStatement(ruleQxPx); + assertEquals(ReasonerState.KB_CHANGED, reasoner.getReasonerState()); + } + } + + @Test + public void testStatementsListRemovalAfterLoad() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + kb.removeStatements(Arrays.asList(ruleQxPx, factPd)); + assertEquals(ReasonerState.KB_CHANGED, reasoner.getReasonerState()); + } + } + + @Test + public void testStatementsArrayRemovalAfterLoad() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + kb.removeStatements(Arrays.asList(ruleQxPx, factPd)); + assertEquals(ReasonerState.KB_CHANGED, reasoner.getReasonerState()); + } + } + + @Test + public void testStatementRemovalBeforeReason() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + kb.removeStatement(ruleQxPx); + reasoner.reason(); + assertEquals(ReasonerState.MATERIALISED, reasoner.getReasonerState()); + } + } + + @Test + public void testStatementsListRemovalBeforeReason() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + kb.removeStatements(Arrays.asList(ruleQxPx, factPd)); + reasoner.reason(); + assertEquals(ReasonerState.MATERIALISED, reasoner.getReasonerState()); + } + } + + @Test + public void testStatementsArrayRemovalBeforeReason() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + kb.removeStatements(ruleQxPx, factPd); + reasoner.reason(); + assertEquals(ReasonerState.MATERIALISED, reasoner.getReasonerState()); + } + } + + @Test + public void testStatementRemovalAfterReason() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + kb.removeStatement(ruleQxPx); + assertEquals(ReasonerState.KB_CHANGED, reasoner.getReasonerState()); + } + } + + @Test + public void testStatementsListRemovalAfterReason() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + kb.removeStatements(Arrays.asList(factPc, ruleQxPx)); + assertEquals(ReasonerState.KB_CHANGED, reasoner.getReasonerState()); + } + } + + @Test + public void testStatementsArrayRemovalAfterReason() throws IOException { + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + kb.removeStatements(factPc, ruleQxPx); + assertEquals(ReasonerState.KB_CHANGED, reasoner.getReasonerState()); + } + } + + @Test + public void testStatementNotRemovedAfterReason() throws IOException { + final Fact newFact = Expressions.makeFact("newPred", c); + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + kb.removeStatement(newFact); + assertEquals(ReasonerState.MATERIALISED, reasoner.getReasonerState()); + } + } + + @Test + public void testStatementsListNotRemovedAfterReason() throws IOException { + final Fact newFact = Expressions.makeFact("newPred", c); + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + kb.removeStatements(Arrays.asList(newFact)); + assertEquals(ReasonerState.MATERIALISED, reasoner.getReasonerState()); + } + } + + @Test + public void testStatementsArrayListNotRemovedAfterReason() throws IOException { + final Fact newFact = Expressions.makeFact("newPred", c); + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatements(ruleQxPx, factPc, factPd); + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + kb.removeStatements(newFact, newFact); + assertEquals(ReasonerState.MATERIALISED, reasoner.getReasonerState()); + } + } + +} diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerTridentInput.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerTridentInput.java new file mode 100644 index 000000000..9cf044481 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerTridentInput.java @@ -0,0 +1,96 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; + +import java.io.File; +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Set; + +import org.junit.Ignore; +import org.junit.Test; +import org.mockito.internal.util.collections.Sets; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.implementation.TridentDataSource; + +public class VLogReasonerTridentInput { + + private static final Predicate ternaryPredicate = Expressions.makePredicate("triple", 3); + private static final PositiveLiteral queryAtom = Expressions.makePositiveLiteral(ternaryPredicate, + Expressions.makeUniversalVariable("s"), Expressions.makeUniversalVariable("p"), + Expressions.makeUniversalVariable("o")); + + @SuppressWarnings("unchecked") + private static final Set> expectedTernaryQueryResult = Sets.newSet( + Arrays.asList(Expressions.makeAbstractConstant("http://example.org/c1"), + Expressions.makeAbstractConstant("http://example.org/p"), + Expressions.makeAbstractConstant("http://example.org/c2")), + Arrays.asList(Expressions.makeAbstractConstant("http://example.org/c1"), + Expressions.makeAbstractConstant("http://example.org/q"), + Expressions.makeDatatypeConstant("test string", "http://www.w3.org/2001/XMLSchema#string"))); + + @Ignore + // Test fails, see https://github.com/karmaresearch/vlog/issues/55 + @Test + public void testLoadTernaryFactsFromSingleRdfDataSource() throws IOException { + final DataSource fileDataSource = new TridentDataSource( + FileDataSourceTestUtils.INPUT_FOLDER + "tridentTernaryFacts"); + + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(new DataSourceDeclarationImpl(ternaryPredicate, fileDataSource)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + + final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtom, true); + final Set> queryResult = QueryResultsUtils.collectQueryResults(queryResultIterator); + + assertEquals(expectedTernaryQueryResult, queryResult); + } + } + + @Ignore + // Test fails, see https://github.com/karmaresearch/vlog/issues/56 + @Test(expected = IOException.class) + public void tridentDbDoesNotExist_fails() throws IOException { + final File nonexistingFile = new File("nonexisting"); + assertFalse(nonexistingFile.exists()); + final DataSource dataSource = new TridentDataSource(nonexistingFile.getName()); + final KnowledgeBase kb = new KnowledgeBase(); + kb.addStatement(new DataSourceDeclarationImpl(ternaryPredicate, dataSource)); + + try (final VLogReasoner reasoner = new VLogReasoner(kb)) { + reasoner.load(); + } + } + +} diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java new file mode 100644 index 000000000..c5ff2617d --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogReasonerWriteInferencesTest.java @@ -0,0 +1,193 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.*; +import static org.mockito.Mockito.*; + +import java.io.IOException; +import java.io.StringWriter; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.BiConsumer; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.junit.Before; +import org.junit.Test; + +import org.semanticweb.rulewerk.core.exceptions.PrefixDeclarationException; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.Constant; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.PrefixDeclarationRegistry; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.Reasoner.InferenceAction; +import org.semanticweb.rulewerk.core.reasoner.implementation.InMemoryDataSource; + +public class VLogReasonerWriteInferencesTest { + private final Constant c = Expressions.makeAbstractConstant("http://example.org/c"); + private final Fact fact = Expressions.makeFact("http://example.org/s", c); + private final AbstractConstant dresdenConst = Expressions.makeAbstractConstant("dresden"); + private final Predicate locatedInPred = Expressions.makePredicate("LocatedIn", 2); + private final Predicate addressPred = Expressions.makePredicate("address", 4); + private final Predicate universityPred = Expressions.makePredicate("university", 2); + private final UniversalVariable varX = Expressions.makeUniversalVariable("X"); + private final UniversalVariable varY = Expressions.makeUniversalVariable("Y"); + private final PositiveLiteral pl1 = Expressions.makePositiveLiteral(locatedInPred, varX, varY); + private final PositiveLiteral pl2 = Expressions.makePositiveLiteral("location", varX, varY); + private final PositiveLiteral pl3 = Expressions.makePositiveLiteral(addressPred, varX, + Expressions.makeExistentialVariable("Y"), Expressions.makeExistentialVariable("Z"), + Expressions.makeExistentialVariable("Q")); + private final PositiveLiteral pl4 = Expressions.makePositiveLiteral(locatedInPred, + Expressions.makeExistentialVariable("Q"), Expressions.makeUniversalVariable("F")); + private final PositiveLiteral pl5 = Expressions.makePositiveLiteral(universityPred, varX, + Expressions.makeUniversalVariable("F")); + private final Conjunction conjunction = Expressions.makePositiveConjunction(pl3, pl4); + private final Rule rule1 = Expressions.makeRule(pl1, pl2); + private final Rule rule2 = Expressions.makeRule(conjunction, Expressions.makeConjunction(pl5)); + private final Fact f1 = Expressions.makeFact(locatedInPred, Expressions.makeAbstractConstant("Egypt"), + Expressions.makeAbstractConstant("Africa")); + private final Fact f2 = Expressions.makeFact(addressPred, Expressions.makeAbstractConstant("TSH"), + Expressions.makeAbstractConstant("Pragerstraße13"), Expressions.makeAbstractConstant("01069"), + dresdenConst); + private final Fact f3 = Expressions.makeFact("city", dresdenConst); + private final Fact f4 = Expressions.makeFact("country", Expressions.makeAbstractConstant("germany")); + private final Fact f5 = Expressions.makeFact(universityPred, Expressions.makeAbstractConstant("tudresden"), + Expressions.makeAbstractConstant("germany")); + private final InMemoryDataSource locations = new VLogInMemoryDataSource(2, 1); + private KnowledgeBase kb; + + @Before + public void initKb() { + kb = new KnowledgeBase(); + kb.addStatement(fact); + kb.addStatements(rule1, rule2, f1, f2, f3, f4, f5); + locations.addTuple("dresden", "germany"); + kb.addStatement(new DataSourceDeclarationImpl(Expressions.makePredicate("location", 2), locations)); + } + + @Test + public void writeInferences_example_succeeds() throws IOException { + assertEquals(10, getInferences().size()); + } + + @Test + public void writeInferences_withPrefixDeclarations_abbreviatesIris() + throws IOException, PrefixDeclarationException { + PrefixDeclarationRegistry prefixDeclarations = mock(PrefixDeclarationRegistry.class); + Map prefixMap = new HashMap<>(); + prefixMap.put("eg:", "http://example.org/"); + when(prefixDeclarations.getBaseIri()).thenReturn(""); + when(prefixDeclarations.getPrefixIri(eq("eg:"))).thenReturn("http://example.org/"); + when(prefixDeclarations.iterator()).thenReturn(prefixMap.entrySet().iterator()); + kb.mergePrefixDeclarations(prefixDeclarations); + + assertEquals(11, getInferences().size()); + assertTrue("the abbreviated fact is present", getInferences().contains("eg:s(eg:c) .")); + } + + @Test(expected = RulewerkRuntimeException.class) + public void writeInferences_withBase_writesBase() throws IOException, PrefixDeclarationException { + PrefixDeclarationRegistry prefixDeclarations = mock(PrefixDeclarationRegistry.class); + when(prefixDeclarations.getBaseIri()).thenReturn("http://example.org/"); + when(prefixDeclarations.iterator()).thenReturn(new HashMap().entrySet().iterator()); + kb.mergePrefixDeclarations(prefixDeclarations); + getInferences(); + //assertEquals(11, getInferences().size()); + //assertTrue("the base declaration is present", getInferences().contains("@base .")); + } + + @Test + public void getInferences_example_succeeds() throws IOException { + final List inferences = getInferences(); + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + final List fromStream = reasoner.getInferences().map(Fact::toString).collect(Collectors.toList()); + assertEquals(inferences, fromStream); + } + } + + @Test + public void unsafeForEachInference_example_succeeds() throws IOException { + final List inferences = getInferences(); + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + final List fromUnsafe = new ArrayList<>(); + + reasoner.unsafeForEachInference((Predicate, terms) -> { + fromUnsafe.add(Expressions.makeFact(Predicate, terms).toString()); + }); + + assertEquals(inferences, fromUnsafe); + } + } + + @Test(expected = IOException.class) + public void forEachInference_throwingAction_throws() throws IOException { + InferenceAction action = mock(InferenceAction.class); + doThrow(IOException.class).when(action).accept(any(Predicate.class), anyList()); + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + reasoner.forEachInference(action); + } + } + + private class ThrowingConsumer implements BiConsumer> { + @Override + public void accept(Predicate predicate, List terms) { + VLogQueryResultUtils.sneakilyThrowIOException(); + } + } + + @Test(expected = RulewerkRuntimeException.class) + public void unsafeForEachInference_throwingAction_throws() throws IOException { + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + reasoner.unsafeForEachInference(new ThrowingConsumer()); + } + } + + private List getInferences() throws IOException { + try (final Reasoner reasoner = new VLogReasoner(kb)) { + reasoner.reason(); + StringWriter writer = new StringWriter(); + reasoner.writeInferences(writer); + + Stream inferences = Arrays.stream(writer.toString().split("(?<=[>)]\\s?)\\.\\s*")); + + return inferences.map((String inference) -> inference + ".").collect(Collectors.toList()); + } + } +} diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java new file mode 100644 index 000000000..d79fceab3 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogRegressionTest.java @@ -0,0 +1,108 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/* + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.AbstractConstant; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.QueryResult; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.UniversalVariable; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; + +public class VLogRegressionTest { + @Test + public void test_issue_166() throws IOException { + final KnowledgeBase knowledgeBase = new KnowledgeBase(); + + final Predicate A = Expressions.makePredicate("A", 1); + final Predicate B = Expressions.makePredicate("B", 1); + final Predicate C = Expressions.makePredicate("C", 1); + final Predicate R = Expressions.makePredicate("Rel", 1); + + final AbstractConstant star = Expressions.makeAbstractConstant("star"); + final AbstractConstant cy = Expressions.makeAbstractConstant("cy"); + final AbstractConstant r0 = Expressions.makeAbstractConstant("r0"); + final UniversalVariable x0 = Expressions.makeUniversalVariable("x0"); + final UniversalVariable x2 = Expressions.makeUniversalVariable("x2"); + + knowledgeBase.addStatement(Expressions.makeRule(Expressions.makePositiveLiteral(B, x2), + Expressions.makePositiveLiteral(A, x2))); + knowledgeBase.addStatement(Expressions.makeFact(B, star)); + knowledgeBase.addStatement(Expressions.makeRule(Expressions.makePositiveLiteral(R, r0), + Expressions.makePositiveLiteral(C, cy), + Expressions.makePositiveLiteral(B, x0))); + knowledgeBase.addStatement(Expressions.makeFact(C, cy)); + + try (final Reasoner reasoner = new VLogReasoner(knowledgeBase)) { + reasoner.reason(); + final QueryResultIterator result = reasoner.answerQuery(Expressions.makePositiveLiteral(R, x0), false); + assertTrue(result.hasNext()); + final QueryResult terms = result.next(); + assertFalse(result.hasNext()); + final List expectedTerms = new ArrayList(); + expectedTerms.add(r0); + assertEquals(expectedTerms, terms.getTerms()); + } + } + + @Test + public void test_vlog_issue_44() throws IOException { + final KnowledgeBase knowledgeBase = new KnowledgeBase(); + + final Predicate P = Expressions.makePredicate("P", 1); + final Predicate Q = Expressions.makePredicate("Q", 1); + final Predicate R = Expressions.makePredicate("R", 1); + + final AbstractConstant c = Expressions.makeAbstractConstant("c"); + final AbstractConstant d = Expressions.makeAbstractConstant("d"); + final UniversalVariable x = Expressions.makeUniversalVariable("x"); + + knowledgeBase.addStatement(Expressions.makeFact(P, c)); + knowledgeBase.addStatement(Expressions.makeFact(Q, d)); + knowledgeBase.addStatement(Expressions.makeRule(Expressions.makePositiveLiteral(R, x), + Expressions.makePositiveLiteral(P, x), + Expressions.makeNegativeLiteral(Q, x))); + + try (final Reasoner reasoner = new VLogReasoner(knowledgeBase)) { + reasoner.reason(); + final QueryResultIterator result = reasoner.answerQuery(Expressions.makePositiveLiteral(R, x), false); + assertTrue(result.hasNext()); + final QueryResult terms = result.next(); + assertFalse(result.hasNext()); + final List expectedTerms = new ArrayList(); + expectedTerms.add(c); + assertEquals(expectedTerms, terms.getTerms()); + assertFalse(result.hasNext()); + } + } +} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogTermNamesTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogTermNamesTest.java similarity index 92% rename from vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogTermNamesTest.java rename to rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogTermNamesTest.java index c9d3b9f12..2ff298e9d 100644 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogTermNamesTest.java +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogTermNamesTest.java @@ -1,10 +1,10 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; +package org.semanticweb.rulewerk.reasoner.vlog; /*- * #%L - * VLog4j Core Components + * Rulewerk VLog Reasoner Support * %% - * Copyright (C) 2018 VLog4j Developers + * Copyright (C) 2018 - 2020 Rulewerk Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,6 +34,7 @@ import karmaresearch.vlog.AlreadyStartedException; import karmaresearch.vlog.EDBConfigurationException; +import karmaresearch.vlog.NonExistingPredicateException; import karmaresearch.vlog.NotStartedException; import karmaresearch.vlog.Rule; import karmaresearch.vlog.Term; @@ -44,7 +45,7 @@ public class VLogTermNamesTest { @Test - public void testTermCase() throws EDBConfigurationException, NotStartedException { + public void testTermCase() throws EDBConfigurationException, NotStartedException, NonExistingPredicateException { final String[][] argsAMatrix = { { "A" }, { "a" } }; final karmaresearch.vlog.Term varX = VLogExpressions.makeVariable("x"); final karmaresearch.vlog.Atom atomBx = new karmaresearch.vlog.Atom("b", varX); @@ -70,11 +71,6 @@ public void testTermCase() throws EDBConfigurationException, NotStartedException assertFalse(queryResultIteratorBx1.hasNext()); queryResultIteratorBx1.close(); - // Querying x(?X) before materialize - final TermQueryResultIterator queryResultIteratorXx = vLog.query(new karmaresearch.vlog.Atom("x", varX)); - assertFalse(queryResultIteratorXx.hasNext()); - queryResultIteratorBx1.close(); - vLog.materialize(true); // Querying b(?X) after materialize @@ -91,7 +87,7 @@ public void testTermCase() throws EDBConfigurationException, NotStartedException @Test public void testSupportedConstantNames() - throws AlreadyStartedException, EDBConfigurationException, IOException, NotStartedException { + throws AlreadyStartedException, EDBConfigurationException, IOException, NotStartedException, NonExistingPredicateException { final String constantNameNumber = "1"; final String constantNameStartsWithNumber = "12_13_14"; final String[][] argsAMatrix = { { constantNameNumber }, { constantNameStartsWithNumber } }; diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverterTest.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverterTest.java new file mode 100644 index 000000000..3f0dd88a5 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/VLogToModelConverterTest.java @@ -0,0 +1,91 @@ +package org.semanticweb.rulewerk.reasoner.vlog; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +import static org.junit.Assert.assertEquals; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.implementation.AbstractConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.DatatypeConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.LanguageStringConstantImpl; +import org.semanticweb.rulewerk.core.model.implementation.NamedNullImpl; + +public class VLogToModelConverterTest { + + @Test + public void testAbstractConstantConversion() { + final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, + "c"); + final Term rulewerkTerm = new AbstractConstantImpl("c"); + final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); + assertEquals(rulewerkTerm, convertedTerm); + } + + @Test + public void testAbstractConstantIriConversion() { + final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, + ""); + final Term rulewerkTerm = new AbstractConstantImpl("http://example.org/test"); + final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); + assertEquals(rulewerkTerm, convertedTerm); + } + + @Test + public void testDatatypeConstantConversion() { + final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, + "\"a\"^^"); + final Term rulewerkTerm = new DatatypeConstantImpl("a", "http://example.org/test"); + final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); + assertEquals(rulewerkTerm, convertedTerm); + } + + @Test + public void testLanguageStringConversion() { + final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, + "\"Test\"@en"); + final Term rulewerkTerm = new LanguageStringConstantImpl("Test", "en"); + final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); + assertEquals(rulewerkTerm, convertedTerm); + } + + @Test + public void testNamedNullConversion() { + final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, + "_123"); + final Term rulewerkTerm = new NamedNullImpl("_123"); + final Term convertedTerm = VLogToModelConverter.toTerm(vLogTerm); + assertEquals(rulewerkTerm, convertedTerm); + } + + @Test(expected = IllegalArgumentException.class) + public void testVariableConversion() { + final karmaresearch.vlog.Term vLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, + "X"); + VLogToModelConverter.toTerm(vLogTerm); + } + + @Test(expected = RuntimeException.class) + public void testAbstractConstantContainingQuoteExpression() { + final String constName = "\""; + VLogToModelConverter.toTerm(new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, constName)); + } + +} diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue61.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue61.java new file mode 100644 index 000000000..6165fa13a --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue61.java @@ -0,0 +1,74 @@ +package org.semanticweb.rulewerk.reasoner.vlog.issues; + +import static org.junit.Assert.assertEquals; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.junit.Ignore; +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; + +public class Issue61 { + + @Ignore + @Test + public void part01() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + + RuleParser.parseInto(kb, "p(a)."); + RuleParser.parseInto(kb, "q(?X,!Y,!Z) :- p(?X) ."); + RuleParser.parseInto(kb, "q(?X,!Y,!Y) :- p(?X) ."); + + Reasoner reasoner = new VLogReasoner(kb); + reasoner.reason(); + + PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); + assertEquals(2, reasoner.countQueryAnswers(query, true).getCount()); + + reasoner.close(); + } + + @Ignore + @Test + public void part02() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + + RuleParser.parseInto(kb, "p(a)."); + RuleParser.parseInto(kb, "q(?X,!Y,!Y) :- p(?X) ."); + RuleParser.parseInto(kb, "q(?X,!Y,!Z) :- p(?X) ."); + + Reasoner reasoner = new VLogReasoner(kb); + reasoner.reason(); + + PositiveLiteral query = RuleParser.parsePositiveLiteral("q(?X,?Y,?Z)"); + assertEquals(1, reasoner.countQueryAnswers(query, true).getCount()); + + reasoner.close(); + } + +} diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue63.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue63.java new file mode 100644 index 000000000..1f09ecd89 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue63.java @@ -0,0 +1,48 @@ +package org.semanticweb.rulewerk.reasoner.vlog.issues; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.junit.Test; +import org.semanticweb.rulewerk.core.exceptions.RulewerkRuntimeException; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; + +public class Issue63 { + + @Test(expected = RulewerkRuntimeException.class) + public void test() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + + RuleParser.parseInto(kb, "p(a)."); + RuleParser.parseInto(kb, "q(?X) :- ~p(?X) ."); + + Reasoner reasoner = new VLogReasoner(kb); + reasoner.reason(); + + reasoner.close(); + } + +} diff --git a/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue67.java b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue67.java new file mode 100644 index 000000000..57b0a9c04 --- /dev/null +++ b/rulewerk-vlog/src/test/java/org/semanticweb/rulewerk/reasoner/vlog/issues/Issue67.java @@ -0,0 +1,62 @@ +package org.semanticweb.rulewerk.reasoner.vlog.issues; + +import static org.junit.Assert.assertTrue; + +/*- + * #%L + * Rulewerk VLog Reasoner Support + * %% + * Copyright (C) 2018 - 2021 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.util.Set; +import java.util.stream.Collectors; + +import org.junit.Ignore; +import org.junit.Test; +import org.semanticweb.rulewerk.core.model.api.Fact; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; + +public class Issue67 { + + @Ignore + @Test + public void part01() throws ParsingException, IOException { + KnowledgeBase kb = new KnowledgeBase(); + + RuleParser.parseInto(kb, "B1_(a, b, c, d, prov1) ."); + RuleParser.parseInto(kb, "B2_(a, a, c, prov2) . "); + RuleParser.parseInto(kb, "H1_(a, n1_2_0, n1_2_0, n1_3_0, n1_4_0) ."); + RuleParser.parseInto(kb, "H2_(n1_3_0, n1_5_0, n1_6_0) ."); + RuleParser.parseInto(kb, + "true(?x1) :- B1_(?x1, ?x2, ?y1, ?y2, ?F_1), B2_(?x1, ?x1, ?y1, ?F_2), H1_(?x1, ?z1, ?z1, ?z2, ?F_3), H2_(?z2, ?z3, ?F_4) ."); + + Reasoner reasoner = new VLogReasoner(kb); + reasoner.reason(); + Set inferences = reasoner.getInferences().collect(Collectors.toSet()); + + Fact query = RuleParser.parseFact("true(a)."); + assertTrue(inferences.contains(query)); + + reasoner.close(); + } + +} diff --git a/shell.nix b/shell.nix new file mode 100644 index 000000000..b847c5606 --- /dev/null +++ b/shell.nix @@ -0,0 +1,14 @@ +(import + ( + let + lock = builtins.fromJSON (builtins.readFile ./flake.lock); + in + fetchTarball { + url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz"; + sha256 = lock.nodes.flake-compat.locked.narHash; + } + ) + { + src = ./.; + }) +.shellNix diff --git a/vlog4j-core/pom.xml b/vlog4j-core/pom.xml deleted file mode 100644 index 30b7edd7c..000000000 --- a/vlog4j-core/pom.xml +++ /dev/null @@ -1,64 +0,0 @@ - - - 4.0.0 - - - org.semanticweb.vlog4j - vlog4j-parent - 0.0.1 - - - vlog4j-core - jar - - VLog4j Core Components - Core components of VLog4j: reasoner and model - - - 1.0.1 - - - - - - ${project.groupId} - vlog4j-base - ${karmaresearch.vlog.version} - - - - - - development - - - - - - org.apache.maven.plugins - maven-install-plugin - 2.4 - - - initialize - - install-file - - - ${project.groupId} - vlog4j-base - ${karmaresearch.vlog.version} - jar - ./lib/jvlog-local.jar - - - - - - - - - diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Atom.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Atom.java deleted file mode 100644 index 07e98098a..000000000 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Atom.java +++ /dev/null @@ -1,72 +0,0 @@ -package org.semanticweb.vlog4j.core.model.api; - -import java.util.List; -import java.util.Set; - -/* - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -/** - * Interface for atoms. An atom is predicate applied to a tuple of terms; that - * is, an atomic formula is a formula of the form P(t1,...,tn) for P a - * {@link Predicate} name, and t1,...,tn some {@link Term}s. The number of terms - * in the tuple corresponds to the {@link Predicate} arity. - * - * @author david.carral@tu-dresden.de - */ - -public interface Atom { - - /** - * The atom predicate. - * - * @return the atom predicate. - */ - public Predicate getPredicate(); - - /** - * The list of terms representing the tuple arguments. - * - * @return an unmodifiable list of terms with the same size as the - * {@link Predicate} arity. - */ - public List getTerms(); - - /** - * Returns the {@link Variable}s that occur among the atom terms. - * - * @return the set of atom variables - */ - public Set getVariables(); - - /** - * Returns the {@link Constant}s that occur among the atom terms. - * - * @return the set of atom constants - */ - public Set getConstants(); - - /** - * Returns the {@link Blank}s that occur among the atom terms. - * - * @return the set of atom blanks - */ - public Set getBlanks(); -} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Conjunction.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Conjunction.java deleted file mode 100644 index ff2f53e0c..000000000 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Conjunction.java +++ /dev/null @@ -1,65 +0,0 @@ -package org.semanticweb.vlog4j.core.model.api; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.List; -import java.util.Set; - -/** - * Interface for representing conjunctions of {@link Atom}s, i.e., lists of - * atomic formulas that are connected with logical AND. Conjunctions may have - * free variables, since they contain no quantifiers. - * - * @author Markus Krötzsch - * - */ -public interface Conjunction extends Iterable { - - /** - * Returns the list of atoms that are part of this conjunction. - * - * @return list of atoms - */ - public List getAtoms(); - - /** - * Returns the set of terms of a certain type that occur in this conjunction. - * - * @param termType - * the type of the term requested - * @return set of matching terms used in this conjunction - */ - public Set getTerms(TermType termType); - - /** - * Returns the set of all terms that occur in this conjunction. - * - * @return set of terms used in this conjunction - */ - public Set getTerms(); - - /** - * Returns the {@link Variable}s that occur in this conjunction. - * - * @return a set of variables - */ - public Set getVariables(); -} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java deleted file mode 100644 index aa8244ec1..000000000 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/Rule.java +++ /dev/null @@ -1,65 +0,0 @@ -package org.semanticweb.vlog4j.core.model.api; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.Set; - -/** - * Interface for classes representing a rule. This implementation assumes that - * rules are defined by their head and body atoms, without explicitly specifying - * quantifiers. All variables in the body are considered universally quantified; - * all variables in the head that do not occur in the body are considered - * existentially quantified. - * - * @author Markus Krötzsch - * - */ -public interface Rule { - - /** - * Returns the conjunction of head atoms (the consequence of the rule). - * - * @return conjunction of atoms - */ - public Conjunction getHead(); - - /** - * Returns the conjunction of body atoms (the premise of the rule). - * - * @return conjunction of atoms - */ - public Conjunction getBody(); - - /** - * Returns the existentially quantified head variables of this rule. - * - * @return a set of variables - */ - public Set getExistentiallyQuantifiedVariables(); - - /** - * Returns the universally quantified variables of this rule. - * - * @return a set of variables - */ - public Set getUniversallyQuantifiedVariables(); - -} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/TermType.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/TermType.java deleted file mode 100644 index 8b5eee309..000000000 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/api/TermType.java +++ /dev/null @@ -1,44 +0,0 @@ -package org.semanticweb.vlog4j.core.model.api; - -/* - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -/** - * Enumeration listing the different types of terms ({@link #CONSTANT}, - * {@link #BLANK}, and {@link #VARIABLE}). - * - * @author david.carral@tu-dresden.de - * - */ -public enum TermType { - /** - * A constant is an entity used to represent named domain elements. - */ - CONSTANT, - /** - * A blank is an entity used to represent anonymous domain elements introduced - * during the reasoning process to satisfy existential restrictions. - */ - BLANK, - /** - * A variable is a parameter that stands for an arbitrary domain element. - */ - VARIABLE -} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AtomImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AtomImpl.java deleted file mode 100644 index d4e26000f..000000000 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/AtomImpl.java +++ /dev/null @@ -1,150 +0,0 @@ -package org.semanticweb.vlog4j.core.model.implementation; - -/* - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.Collections; -import java.util.List; -import java.util.Set; - -import org.apache.commons.lang3.Validate; -import org.eclipse.jdt.annotation.NonNull; -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Blank; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.TermType; -import org.semanticweb.vlog4j.core.model.api.Variable; - -/** - * Implements {@link Atom} objects. An atom is a formula of the form - * P(t1,...,tn) for P a {@link Predicate} name, and t1,...,tn some - * {@link Term}s. The number of terms corresponds to the {@link Predicate} - * arity. - * - * @author david.carral@tu-dresden.de - * @author Markus Krötzsch - */ -public class AtomImpl implements Atom { - - private final Predicate predicate; - private final List terms; - - /** - * Creates an {@link Atom} of the form "{@code predicate}({@code terms})". - * - * @param predicate - * non-blank predicate name - * @param terms - * non-empty list of non-null terms. List size must be the same as - * the predicate arity. - */ - public AtomImpl(@NonNull final Predicate predicate, @NonNull final List terms) { - Validate.notNull(predicate, "Atom predicates cannot be null."); - Validate.noNullElements(terms, "Null terms cannot appear in atoms. The list contains a null at position [%d]."); - Validate.notEmpty(terms, "Atoms of arity zero are not supported: please specify at least one term."); - - Validate.isTrue(terms.size() == predicate.getArity(), "Terms size [%d] does not match predicate arity [%d].", - terms.size(), predicate.getArity()); - - this.predicate = predicate; - this.terms = terms; - } - - @Override - public Predicate getPredicate() { - return this.predicate; - } - - @Override - public List getTerms() { - return Collections.unmodifiableList(this.terms); - } - - @Override - public Set getVariables() { - final TermFilter termFilter = new TermFilter(TermType.VARIABLE); - for (final Term term : this.terms) { - term.accept(termFilter); - } - return termFilter.getVariables(); - } - - @Override - public Set getConstants() { - final TermFilter termFilter = new TermFilter(TermType.CONSTANT); - for (final Term term : this.terms) { - term.accept(termFilter); - } - return termFilter.getConstants(); - } - - @Override - public Set getBlanks() { - final TermFilter termFilter = new TermFilter(TermType.BLANK); - for (final Term term : this.terms) { - term.accept(termFilter); - } - return termFilter.getBlanks(); - } - - @Override - public int hashCode() { - final int prime = 31; - int result = this.predicate.hashCode(); - result = prime * result + this.terms.hashCode(); - return result; - } - - @Override - public boolean equals(final Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (!(obj instanceof Atom)) { - return false; - } - final Atom other = (Atom) obj; - - return this.predicate.equals(other.getPredicate()) && this.terms.equals(other.getTerms()); - } - - @Override - public String toString() { - final StringBuilder stringBuilder = new StringBuilder(); - stringBuilder.append(this.predicate.getName()).append("("); - boolean first = true; - for (final Term term : this.terms) { - if (first) { - first = false; - } else { - stringBuilder.append(", "); - } - stringBuilder.append(term); - } - stringBuilder.append(")"); - return stringBuilder.toString(); - } - -} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/BlankImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/BlankImpl.java deleted file mode 100644 index 764180645..000000000 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/BlankImpl.java +++ /dev/null @@ -1,61 +0,0 @@ -package org.semanticweb.vlog4j.core.model.implementation; - -/* - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.semanticweb.vlog4j.core.model.api.Blank; -import org.semanticweb.vlog4j.core.model.api.TermType; -import org.semanticweb.vlog4j.core.model.api.TermVisitor; - -/** - * Implements {@link TermType#BLANK} terms. A blank is an entity used to - * represent anonymous domain elements introduced during the reasoning process - * to satisfy existential restrictions. - * - * @author david.carral@tu-dresden.de - */ -public class BlankImpl extends AbstractTermImpl implements Blank { - - /** - * Instantiates a {@code BlankImpl} object with the name - * {@code name}. - * - * @param name - * cannot be a blank String (null, empty or whitespace). - */ - public BlankImpl(final String name) { - super(name); - } - - @Override - public TermType getType() { - return TermType.BLANK; - } - - @Override - public T accept(TermVisitor termVisitor) { - return termVisitor.visit(this); - } - - @Override - public String toString() { - return "_" + this.getName(); - } -} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java deleted file mode 100644 index 6db6fa13b..000000000 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/ConjunctionImpl.java +++ /dev/null @@ -1,137 +0,0 @@ -/** - * - */ -package org.semanticweb.vlog4j.core.model.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.Set; - -import org.apache.commons.lang3.Validate; -import org.eclipse.jdt.annotation.NonNull; -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.TermType; -import org.semanticweb.vlog4j.core.model.api.Variable; - -/** - * Simple implementation of {@link Conjunction}. - * - * @author Markus Krötzsch - */ -public class ConjunctionImpl implements Conjunction { - - final List atoms; - - /** - * Constructor. - * - * @param atoms - * a non-null list of atoms, that cannot contain null elements. - */ - public ConjunctionImpl(@NonNull List atoms) { - Validate.noNullElements(atoms); - this.atoms = atoms; - } - - @Override - public List getAtoms() { - return Collections.unmodifiableList(this.atoms); - } - - /** - * Returns a term filter object that has visited all terms in this conjunction - * for the given type. - * - * @param termType - * specifies the type of term to look for - * @return term filter - */ - TermFilter getTermFilter(TermType termType) { - final TermFilter termFilter = new TermFilter(termType); - for (final Atom atom : this.atoms) { - for (final Term term : atom.getTerms()) { - term.accept(termFilter); - } - } - return termFilter; - } - - @Override - public Set getTerms() { - return getTermFilter(null).getTerms(); - } - - @Override - public Set getTerms(TermType termType) { - return getTermFilter(termType).getTerms(); - } - - @Override - public Set getVariables() { - return getTermFilter(TermType.VARIABLE).getVariables(); - } - - @Override - public int hashCode() { - return this.atoms.hashCode(); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (!(obj instanceof Conjunction)) { - return false; - } - final Conjunction other = (Conjunction) obj; - return this.atoms.equals(other.getAtoms()); - } - - @Override - public Iterator iterator() { - return this.atoms.iterator(); - } - - @Override - public String toString() { - final StringBuilder stringBuilder = new StringBuilder(); - boolean first = true; - for (final Atom atom : this.atoms) { - if (first) { - first = false; - } else { - stringBuilder.append(", "); - } - stringBuilder.append(atom.toString()); - } - return stringBuilder.toString(); - } - -} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java deleted file mode 100644 index 6762f2af2..000000000 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/Expressions.java +++ /dev/null @@ -1,193 +0,0 @@ -package org.semanticweb.vlog4j.core.model.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.Arrays; -import java.util.List; - -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; - -/** - * This utilities class provides static methods for creating terms and formulas - * in vlog4j. - * - * @author Markus Krötzsch - * - */ -public final class Expressions { - /** - * Private constructor prevents this utilities class to be instantiated. - */ - private Expressions() { - } - - /** - * Creates a {@link Variable}. - * - * @param name - * name of the variable - * @return a {@link Variable} corresponding to the input. - */ - public static Variable makeVariable(String name) { - return new VariableImpl(name); - } - - /** - * Creates a {@link Constant}. - * - * @param name - * name of the constant - * @return a {@link Constant} corresponding to the input. - */ - public static Constant makeConstant(String name) { - return new ConstantImpl(name); - } - - /** - * Creates a {@link Predicate}. - * - * @param name - * non-blank predicate name - * @param arity - * predicate arity, strictly greater than 0 - * @return a {@link Predicate} corresponding to the input. - */ - public static Predicate makePredicate(String name, int arity) { - return new PredicateImpl(name, arity); - } - - /** - * Creates an {@code Atom}. - * - * @param predicateName - * non-blank {@link Predicate} name - * @param terms - * non-empty, non-null list of non-null terms - * @return an {@link Atom} with given {@code terms} and {@link Predicate} - * constructed from name given {@code predicateName} and {@code arity} - * given {@code terms} size. - */ - public static Atom makeAtom(final String predicateName, final List terms) { - final Predicate predicate = makePredicate(predicateName, terms.size()); - - return new AtomImpl(predicate, terms); - } - - /** - * Creates an {@code Atom}. - * - * @param predicateName - * non-blank {@link Predicate} name - * @param terms - * non-empty, non-null array of non-null terms - * @return an {@link Atom} with given {@code terms} and {@link Predicate} - * constructed from name given {@code predicateName} and {@code arity} - * given {@code terms} length. - */ - public static Atom makeAtom(final String predicateName, final Term... terms) { - final Predicate predicate = makePredicate(predicateName, terms.length); - - return new AtomImpl(predicate, Arrays.asList(terms)); - } - - /** - * Creates an {@code Atom}. - * - * @param predicate - * a non-null {@link Predicate} - * @param terms - * non-empty, non-null list of non-null terms. List size must be the - * same as the given {@code predicate} arity. - * @return an {@link Atom} corresponding to the input. - */ - public static Atom makeAtom(final Predicate predicate, final List terms) { - return new AtomImpl(predicate, terms); - } - - /** - * Creates an {@code Atom}. - * - * @param predicate - * a non-null {@link Predicate} - * @param terms - * non-empty, non-null array of non-null terms. Aray size must be the - * same as the given {@code predicate} arity. - * @return an {@link Atom} corresponding to the input - */ - public static Atom makeAtom(final Predicate predicate, final Term... terms) { - return new AtomImpl(predicate, Arrays.asList(terms)); - } - - /** - * Creates a {@code Conjunction}. - * - * @param atoms - * list of non-null atoms - * @return a {@link Conjunction} corresponding to the input - */ - public static Conjunction makeConjunction(final List atoms) { - return new ConjunctionImpl(atoms); - } - - /** - * Creates a {@code Conjunction}. - * - * @param atoms - * array of non-null atoms - * @return a {@link Conjunction} corresponding to the input - */ - public static Conjunction makeConjunction(final Atom... atoms) { - return new ConjunctionImpl(Arrays.asList(atoms)); - } - - /** - * Creates a {@code Rule}. - * - * @param head - * conjunction of atoms - * @param body - * conjunction of atoms - * @return a {@link Rule} corresponding to the input - */ - public static Rule makeRule(final Conjunction head, final Conjunction body) { - return new RuleImpl(head, body); - } - - /** - * Creates a {@code Rule} with a single atom in its head. - * - * @param headAtom - * @param bodyAtoms - * array of non-null atoms - * @return a {@link Rule} corresponding to the input - */ - public static Rule makeRule(final Atom headAtom, final Atom... bodyAtoms) { - return new RuleImpl(new ConjunctionImpl(Arrays.asList(headAtom)), - new ConjunctionImpl(Arrays.asList(bodyAtoms))); - } - -} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java deleted file mode 100644 index cb4eefee9..000000000 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/RuleImpl.java +++ /dev/null @@ -1,115 +0,0 @@ -package org.semanticweb.vlog4j.core.model.implementation; - -import java.util.HashSet; - -/* - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.Set; - -import org.apache.commons.lang3.Validate; -import org.eclipse.jdt.annotation.NonNull; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; - -/** - * Implementation for {@link Rule}. Represents rules with non-empty heads and - * bodies. - * - * @author Irina Dragoste - * - */ -public class RuleImpl implements Rule { - - final Conjunction body; - final Conjunction head; - - /** - * Creates a Rule with a non-empty body and an non-empty head. All variables in the body are considered universally quantified; all variables in the head - * that do not occur in the body are considered existentially quantified. - * - * @param head - * list of Atoms representing the rule body conjuncts. - * @param body - * list of Atoms representing the rule head conjuncts. - */ - public RuleImpl(@NonNull final Conjunction head, @NonNull final Conjunction body) { - Validate.notNull(head); - Validate.notNull(body); - Validate.notEmpty(body.getAtoms()); - Validate.notEmpty(head.getAtoms()); - - this.head = head; - this.body = body; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = this.body.hashCode(); - result = prime * result + this.head.hashCode(); - return result; - } - - @Override - public boolean equals(final Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (!(obj instanceof Rule)) { - return false; - } - final Rule other = (Rule) obj; - - return this.head.equals(other.getHead()) && this.body.equals(other.getBody()); - } - - @Override - public String toString() { - return this.head + " :- " + this.body; - } - - @Override - public Conjunction getHead() { - return this.head; - } - - @Override - public Conjunction getBody() { - return this.body; - } - - @Override - public Set getUniversallyQuantifiedVariables() { - return this.body.getVariables(); - } - - @Override - public Set getExistentiallyQuantifiedVariables() { - final Set result = new HashSet<>(this.head.getVariables()); - result.removeAll(this.body.getVariables()); - return result; - } - -} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/TermFilter.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/TermFilter.java deleted file mode 100644 index 880266cde..000000000 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/model/implementation/TermFilter.java +++ /dev/null @@ -1,138 +0,0 @@ -package org.semanticweb.vlog4j.core.model.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.Collections; -import java.util.HashSet; -import java.util.Set; - -import org.semanticweb.vlog4j.core.model.api.Blank; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.TermType; -import org.semanticweb.vlog4j.core.model.api.TermVisitor; -import org.semanticweb.vlog4j.core.model.api.Variable; - -/** - * A visitor that builds a set of terms of a specific type. It can be used to - * visit many terms and will only retain the ones that match the given type. - * - * @author Markus Krötzsch - * - */ -public class TermFilter implements TermVisitor { - - final TermType termType; - final Set terms = new HashSet<>(); - - /** - * Creates a new term filter. - * - * @param termType - * type of the term to restrict to, or null if all terms should be - * kept - */ - public TermFilter(TermType termType) { - this.termType = termType; - } - - /** - * Returns the set of terms collected so far. - * - * @return set of terms - */ - public Set getTerms() { - return Collections.unmodifiableSet(this.terms); - } - - /** - * Returns the set of variables collected so far, which might be empty if the - * terms collected are not variables. The method will also return an empty set - * if anything other than variables are being collected. - * - * @return set of variables (see {@link Variable}). - */ - @SuppressWarnings("unchecked") - public Set getVariables() { - if (this.termType.equals(TermType.VARIABLE)) { - return (Set) (Set) Collections.unmodifiableSet(this.terms); - } else { - return Collections.emptySet(); - } - } - - /** - * Returns the set of constants collected so far, which might be empty if the - * terms collected are not constants. The method will also return an empty set - * if anything other than constants are being collected. - * - * @return set of constants (see {@link Constant}). - */ - @SuppressWarnings("unchecked") - public Set getConstants() { - if (this.termType.equals(TermType.CONSTANT)) { - return (Set) (Set) Collections.unmodifiableSet(this.terms); - } else { - return Collections.emptySet(); - } - } - - /** - * Returns the set of blanks collected so far, which might be empty if the - * terms collected are not blanks. The method will also return an empty set - * if anything other than blanks are being collected. - * - * @return set of blanks (see {@link Blank}). - */ - @SuppressWarnings("unchecked") - public Set getBlanks(){ - if (this.termType.equals(TermType.BLANK)) { - return (Set) (Set) Collections.unmodifiableSet(this.terms); - } else { - return Collections.emptySet(); - } - } - - @Override - public Void visit(Constant term) { - if (this.termType == null || this.termType.equals(TermType.CONSTANT)) { - this.terms.add(term); - } - return null; - } - - @Override - public Void visit(Variable term) { - if (this.termType == null || this.termType.equals(TermType.VARIABLE)) { - this.terms.add(term); - } - return null; - } - - @Override - public Void visit(Blank term) { - if (this.termType == null || this.termType.equals(TermType.BLANK)) { - this.terms.add(term); - } - return null; - } - -} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/DataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/DataSource.java deleted file mode 100644 index aa139eb43..000000000 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/DataSource.java +++ /dev/null @@ -1,42 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -/** - * Interfaces various types of data sources for storing facts. - * - * @author Irina Dragoste - * - */ -public interface DataSource { - - public static final String PREDICATE_NAME_CONFIG_LINE = "EDB%1$d_predname=%2$s\n"; - public static final String DATASOURCE_TYPE_CONFIG_PARAM = "EDB%1$d_type"; - - /** - * Constructs a String representation of the data source. - * - * @return a String representation of the data source configuration for a - * certain predicate. - */ - public String toConfigString(); - -} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java deleted file mode 100644 index 1835f68d6..000000000 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/Reasoner.java +++ /dev/null @@ -1,477 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner; - -import java.io.IOException; -import java.util.Collection; -import java.util.List; - -import org.eclipse.jdt.annotation.NonNull; -import org.eclipse.jdt.annotation.Nullable; -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.TermType; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; -import org.semanticweb.vlog4j.core.reasoner.implementation.VLogReasoner; - -/* - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -/** - * Interface that exposes the existential rule reasoning capabilities of VLog. - *
- * The knowledge base of the reasoner can be loaded with explicit facts - * and existential rules that would infer implicit facts trough - * reasoning.
- * Facts can be added to the knowledge base: - *

    - *
  • as in-memory Java objects ({@link #addFacts(Atom...)}
  • - *
  • from a persistent data source - * ({@link #addFactsFromDataSource(Predicate, DataSource)})
  • - *
- * Note that facts with the same predicate cannot come from multiple sources - * (where a source can be a collection of in-memory {@link Atom} objects, or a - * {@link DataSource} .
- * Rules added to the knowledge base ({@link #addRules(Rule...)}) can be - * re-written internally by VLog, using the corresponding set - * {@link RuleRewriteStrategy}.
- *
- * Once adding facts and rules to the knowledge base has been completed, the - * knowledge base can be loaded into the reasoner. The following - * pre-condition must be respected: the {@link Predicate}s appearing in - * {@link Rule} heads (called IDBs) cannot also appear in knowledge base - * facts (called EDBs). An {@link EdbIdbSeparationException} would be - * thrown when loading the knowledge base.
- * - *
- * The loaded reasoner can perform atomic queries on explicit facts - * before reasoning, and all implicit and explicit facts after calling - * {@link Reasoner#reason()}. Queries can provide an iterator for the results - * ({@link #answerQuery(Atom, boolean)}, or the results can be exported to a - * file ({@link #exportQueryAnswersToCsv(Atom, String, boolean)}).
- *
- * Reasoning with various {@link Algorithm}s is supported, that can lead - * to different sets of inferred facts and different termination behavior. In - * some cases, reasoning with rules with existentially quantified variables - * {@link Rule#getExistentiallyQuantifiedVariables()} may not terminate. We - * recommend reasoning with algorithm {@link Algorithm#RESTRICTED_CHASE}, as it - * leads to termination in more cases. To avoid non-termination, a reasoning - * timeout can be set ({@link Reasoner#setReasoningTimeout(Integer)}).
- * Incremental reasoning is not supported. To add more facts and rule to - * the knowledge base and reason again, the reasoner needs to be - * reset ({@link #resetReasoner()}) to the state of its knowledge base - * before loading. Then, more information can be added to the knowledge base, - * the reasoner can be loaded again, and querying and reasoning can be - * performed. - * - * @author Irina Dragoste - * - */ -public interface Reasoner extends AutoCloseable { - - /** - * Factory method that to instantiate a Reasoner. - * - * @return a {@link VLogReasoner} instance. - */ - public static Reasoner getInstance() { - return new VLogReasoner(); - } - - /** - * Sets the algorithm that will be used for reasoning over the knowledge base. - * If no algorithm is set, the default algorithm is - * {@link Algorithm#RESTRICTED_CHASE} will be used. - * - * @param algorithm - * the algorithm to be used for reasoning. - */ - void setAlgorithm(@NonNull Algorithm algorithm); - - /** - * Getter for the algorithm that will be used for reasoning over the knowledge - * base. The default value is {@link Algorithm#RESTRICTED_CHASE}. - * - * @return the reasoning algorithm. - */ - Algorithm getAlgorithm(); - - /** - * In some cases, reasoning with rules with existentially quantified variables - * {@link Rule#getExistentiallyQuantifiedVariables()} may not terminate. We - * recommend reasoning with algorithm {@link Algorithm#RESTRICTED_CHASE}, as it - * leads to termination in more cases.
- * This method sets a timeout (in seconds) after which reasoning can be - * artificially interrupted if it has not reached completion. - * - * @param seconds - * interval after which reasoning will be interrupted, in seconds. If - * {@code null}, reasoning will not be interrupted and will return - * only after (if) it has reached completion. - */ - void setReasoningTimeout(@Nullable Integer seconds); - - /** - * This method returns the reasoning timeout, representing the interval (in - * {@code seconds}) after which reasoning will be interrupted if it has not - * reached completion. The default value is {@code null}, in which case - * reasoning terminates only after (if) it reaches completion. - * - * @return if not {@code null}, number of seconds after which the reasoning will - * be interrupted, if it has not reached completion. - */ - @Nullable - Integer getReasoningTimeout(); - - /** - * Loaded {@link Rule}s can be re-written internally to an equivalent set of - * rules, according to given {@code ruleRewritingStrategy}. If no staregy is - * set, the default value is {@link RuleRewriteStrategy#NONE}, meaning that the - * rules will not be re-written. - * - * @param ruleRewritingStrategy - * strategy according to which the rules will be rewritten before - * reasoning. - * @throws ReasonerStateException - * if the reasoner has already been loaded. - */ - void setRuleRewriteStrategy(@NonNull RuleRewriteStrategy ruleRewritingStrategy) throws ReasonerStateException; - - /** - * Getter for the strategy according to which rules will be rewritten before - * reasoning. The default value is {@link RuleRewriteStrategy#NONE}, meaning - * that the rules will not be re-written. - * - * @return the current rule re-writing strategy - */ - @NonNull - RuleRewriteStrategy getRuleRewriteStrategy(); - - /** - * Sets the logging level of the internal VLog C++ resource. Default value is - * {@link LogLevel#WARNING} - * - * @param logLevel - * the logging level to be set for VLog C++ resource. - */ - void setLogLevel(@NonNull LogLevel logLevel); - - /** - * Returns the logging level of the internal VLog C++ resource. If no value has - * been set, the default is {@link LogLevel#WARNING}. - * - * @return the logging level of the VLog C++ resource. - */ - @Nullable - LogLevel getLogLevel(); - - /** - * Redirects the logs of the internal VLog C++ resource to given file. If no log - * file is set or the given {@code filePath} is not a valid file path, VLog will - * log to the default system output. - * - * @param filePath - * the file for the internal VLog C++ resource to log to. If - * {@code null} or an invalid file path, the reasoner will log to the - * default system output. - */ - void setLogFile(@Nullable String filePath); - - /** - * Adds rules to the reasoner knowledge base in the given order. After - * the reasoner has been loaded ({@link #load()}), the rules may be rewritten - * internally according to the set {@link RuleRewriteStrategy}. - * - * @param rules - * non-null rules to be added to the knowledge base for - * reasoning. - * @throws ReasonerStateException - * if the reasoner has already been loaded. - * @throws IllegalArgumentException - * if the {@code rules} atoms contain terms which are not of type - * {@link TermType#CONSTANT} or {@link TermType#VARIABLE}. - */ - void addRules(@NonNull Rule... rules) throws ReasonerStateException; - - /** - * Adds rules to the reasoner knowledge base in the given order. Rules - * can only be added before loading ({@link #load()}). After the reasoner has - * been loaded, the rules may be rewritten internally according to the set - * {@link RuleRewriteStrategy}. - * - * @param rules - * non-null rules to be added to the knowledge base for - * reasoning. - * @throws ReasonerStateException - * if the reasoner has already been loaded. - * @throws IllegalArgumentException - * if the {@code rules} atoms contain terms which are not of type - * {@link TermType#CONSTANT} or {@link TermType#VARIABLE}. - */ - void addRules(@NonNull List rules) throws ReasonerStateException; - - /** - * Adds non-null facts to the reasoner knowledge base. A fact is - * an {@link Atom} with all terms ({@link Atom#getTerms()}) of type - * {@link TermType#CONSTANT}.
- * Facts can only be added before loading ({@link #load()}).
- * Facts predicates ({@link Atom#getPredicate()}) cannot have multiple data - * sources. - * - * @param facts - * facts to be added to the knowledge base. The given order is - * not maintained. - * @throws ReasonerStateException - * if the reasoner has already been loaded ({@link #load()}). - * @throws IllegalArgumentException - * if the knowledge base contains facts from a data source - * with the same predicate ({@link Atom#getPredicate()}) as an - * {@link Atom} among given {@code facts}. - * @throws IllegalArgumentException - * if the {@code facts} atoms contain terms which are not of type - * {@link TermType#CONSTANT}. - */ - // TODO add examples to javadoc about multiple sources per predicate and EDB/IDB - void addFacts(@NonNull Atom... facts) throws ReasonerStateException; - - /** - * Adds non-null facts to the reasoner knowledge base. A fact is - * an {@link Atom} with all terms ({@link Atom#getTerms()}) of type - * {@link TermType#CONSTANT}.
- * Facts can only be added before loading ({@link #load()}).
- * Facts predicates ({@link Atom#getPredicate()}) cannot have multiple data - * sources. - * - * @param facts - * facts to be added to the knowledge base. - * @throws ReasonerStateException - * if the reasoner has already been loaded ({@link #load()}). - * @throws IllegalArgumentException - * if the knowledge base contains facts from a data source - * with the same predicate ({@link Atom#getPredicate()}) as an - * {@link Atom} among given {@code facts}. - * @throws IllegalArgumentException - * if the {@code facts} atoms contain terms which are not of type - * {@link TermType#CONSTANT}. - */ - // TODO add examples to javadoc about multiple sources per predicate and EDB/IDB - void addFacts(@NonNull Collection facts) throws ReasonerStateException; - - /** - * Adds facts stored in given {@code dataSource} for given {@code predicate} to - * the reasoner knowledge base. Facts predicates cannot have multiple - * data sources, including in-memory {@link Atom} objects added trough - * {@link #addFacts}. - * - * @param predicate - * the {@link Predicate} for which the given {@code dataSource} - * contains facts. - * @param dataSource - * @throws ReasonerStateException - * if the reasoner has already been loaded ({@link #load()}). - * @throws IllegalArgumentException - * if the knowledge base contains facts in memory (added - * using {@link #addFacts}) or from a data source with the same - * {@link Predicate} as given {@code predicate}. - */ - // TODO add example to javadoc with two datasources and with in-memory facts for - // the same predicate. - // TODO validate predicate arity corresponds to the dataSource facts arity - void addFactsFromDataSource(@NonNull Predicate predicate, @NonNull DataSource dataSource) - throws ReasonerStateException; - - /** - * Loads the knowledge base, consisting of the current rules and facts, - * into the reasoner (if it has not been loaded yet). If the reasoner has - * already been loaded, this call does nothing. After loading, the reasoner is - * ready for reasoning and querying.
- * Loading pre-condition: the {@link Predicate}s appearing in - * {@link Rule} heads ({@link Rule#getHead()}), called IDB predicates, cannot - * also appear in knowledge base facts, called EDB predicates. An - * {@link EdbIdbSeparationException} would be thrown in this case. - * - * @throws IOException - * if an I/O error occurs related to the resources in the - * knowledge base to be loaded. - * @throws EdbIdbSeparationException - * if a {@link Predicate} appearing in a {@link Rule} head - * (IDB predicate) also appears in a knowledge base fact (EDB - * predicate). - * @throws IncompatiblePredicateArityException - * if the arity of a {@link Predicate} of a fact loaded from a data - * source ({@link #addFactsFromDataSource(Predicate, DataSource)}) - * does nor match the arity of the facts in the corresponding data - * source. - */ - // FIXME should EdbIdbSeparationException be thrown when users try to add - // facts/rules? - void load() throws IOException, EdbIdbSeparationException, IncompatiblePredicateArityException; - - /** - * Performs reasoning on the loaded knowledge base, depending on the set - * {@link Algorithm}. Reasoning implies extending the set of explicit facts in - * the knowledge base with implicit facts inferred by knowledge base rules.
- *
- * In some cases, reasoning with rules with existentially quantified variables - * {@link Rule#getExistentiallyQuantifiedVariables()} may not terminate. We - * recommend reasoning with algorithm {@link Algorithm#RESTRICTED_CHASE}, as it - * leads to termination in more cases.
- * To avoid non-termination, a reasoning timeout can be set - * ({@link Reasoner#setReasoningTimeout(Integer)}).
- *
- * Incremental reasoning is not supported. To add more facts and rule to - * the knowledge base and reason again, the reasoner needs to be - * reset ({@link #resetReasoner()}) to the state of its knowledge base - * before loading. Then, more information can be added to the knowledge base, - * the reasoner can be loaded again, and querying and reasoning can be - * performed. - * - * @return - *
    - *
  • the value returned by the previous {@link Reasoner#reason()} - * call, if successive reasoning is attempted before a - * {@link Reasoner#resetReasoner()}.
  • - *
  • {@code true}, if reasoning reached completion.
  • - *
  • {@code false}, if reasoning has been interrupted before - * completion.
  • - *
- * @throws IOException - * if I/O exceptions occur during reasoning. - * @throws ReasonerStateException - * if this method is called before loading ({@link Reasoner#load()}. - */ - boolean reason() throws IOException, ReasonerStateException; - - // TODO add examples to query javadoc - /** - * Evaluates an atomic query ({@code queryAtom}) on the current state of the - * reasoner knowledge base: - *
- *
- * An answer to the query is the terms a fact that matches the {@code quryAtom}: - * the fact predicate is the same as the {@code quryAtom} predicate, the - * {@link TermType#CONSTANT} terms of the {@code quryAtom} appear in the answer - * fact at the same term position, and the {@link TermType#VARIABLE} terms of - * the {@code quryAtom} are matched by terms in the fact, either named - * ({@link TermType#CONSTANT}) or anonymous ({@link TermType#BLANK}). The same - * variable name identifies the same term in the answer fact.
- * A query answer is represented by a {@link QueryResult}. A query can have - * multiple, distinct query answers. This method returns an Iterator over these - * answers. - * - * @param queryAtom - * an {@link Atom} representing the query to be answered. - * @param includeBlanks - * if {@code true}, facts containing terms of type - * {@link TermType#BLANK} (representing anonymous individuals - * introduced to satisfy rule existentially quantified variables) - * will be included into the query results. Otherwise, the query - * results will only contain the facts with terms of type - * {@link TermType#CONSTANT} (representing named individuals). - * @return an {@link AutoCloseable} iterator for {@link QueryResult}s, - * representing distinct answers to the query. - * @throws ReasonerStateException - * if this method is called before loading ({@link Reasoner#load()}. - * @throws IllegalArgumentException - * if the given {@code queryAtom} contains terms - * ({@link Atom#getTerms()}) which are not of type - * {@link TermType#CONSTANT} or {@link TermType#VARIABLE}. - */ - QueryResultIterator answerQuery(@NonNull Atom queryAtom, boolean includeBlanks) throws ReasonerStateException; - - // TODO add examples to query javadoc - /** - * Evaluates an atomic query ({@code queryAtom}) on the current state of the - * reasoner knowledge base, and writes its results the .csv file - * at given path {@code csvFilePath}: - *
    - *
  • If the reasoner is loaded (see {@link #load()}), but has not - * reasoned yet, the query will be evaluated on the explicit set of facts.
  • - *
  • Otherwise, if this method is called after reasoning (see - * {@link #reason()}, the query will be evaluated on the explicit and implicit - * facts inferred trough reasoning.
  • - *
- *
- * An answer to the query is the terms a fact that matches the {@code quryAtom}: - * the fact predicate is the same as the {@code quryAtom} predicate, the - * {@link TermType#CONSTANT} terms of the {@code quryAtom} appear in the answer - * fact at the same term position, and the {@link TermType#VARIABLE} terms of - * the {@code quryAtom} are matched by terms in the fact, either named - * ({@link TermType#CONSTANT}) or anonymous ({@link TermType#BLANK}). The same - * variable name identifies the same term in the answer fact.
- * A query answer is represented by a {@link QueryResult}. A query can have - * multiple, distinct query answers. - * - * @param queryAtom - * an {@link Atom} representing the query to be answered. - * @param csvFilePath - * path to a .csv file where the query answers will be - * written. Each line of the .csv file represents a - * query answer fact, and it will contain the fact term names as - * columns. - * @param includeBlanks - * if {@code true}, facts containing terms of type - * {@link TermType#BLANK} (representing anonymous individuals - * introduced to satisfy rule existentially quantified variables) - * will be included into the query answers. Otherwise, the query - * answers will only contain the facts with terms of type - * {@link TermType#CONSTANT} (representing named individuals). - * - * @throws ReasonerStateException - * if this method is called before loading ({@link Reasoner#load()}. - * @throws IOException - * if an I/O error occurs regarding given file - * ({@code csvFilePath)}. - * @throws IllegalArgumentException - *
    - *
  • if the given {@code queryAtom} contains terms - * ({@link Atom#getTerms()}) which are not of type - * {@link TermType#CONSTANT} or {@link TermType#VARIABLE}.
  • - *
  • if the given {@code csvFilePath} does not end with - * .csv extension.
  • - *
- */ - void exportQueryAnswersToCsv(@NonNull Atom queryAtom, @NonNull String csvFilePath, boolean includeBlanks) - throws ReasonerStateException, IOException; - - /** - * Resets the reasoner to a pre-loading state (before the call of - * {@link #load()} method). All facts inferred by reasoning are discarded. Rules - * and facts added to the reasoner need to be loaded again, to be able to - * perform querying and reasoning. - */ - void resetReasoner(); - - // TODO Map exportDBToDir(File location); - - // TODO not allow any operation after closing, except close(); - @Override - void close(); - -} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java deleted file mode 100644 index 4b08e9240..000000000 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/ReasonerState.java +++ /dev/null @@ -1,67 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner; - -/* - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -/** - * Enum for the states a {@link VLogReasoner} can be in. Certain operations are not - * allowed in some states. - * - * @author Irina Dragoste - * - */ -public enum ReasonerState { - /** - * State a Reasoner is in before method {@link ReasonerInterface#load()} has - * been called. The Reasoner cannot reason before it has been loaded. The - * Reasoner can only be loaded once. Reasoning and querying are not allowed in - * this state. - */ - BEFORE_LOADING("before loading"), - /** - * State a Reasoner is in after method {@link ReasonerInterface#load()} has been - * called, and before method {@link ReasonerInterface#reason()} has been called. - * The Reasoner can only be loaded once. Loading in this state is ineffective. - * Adding rules, fact and fact data sources and setting the rule re-writing - * strategy are not allowed in this state. - */ - AFTER_LOADING("loaded"), - - /** - * State a Reasoner is in after method {@link ReasonerInterface#reason()} has - * been called. The Reasoner cannot reason again, once it reached this state. - * Loading and setting the reasoning algorithm this state are ineffective. - * Reasoning, adding rules, fact and fact data sources and setting the rule - * re-writing strategy are not allowed in this state. - */ - AFTER_REASONING("completed reasoning"); - - private final String name; - - private ReasonerState(String name) { - this.name = name; - } - - @Override - public String toString() { - return name; - } - -} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/exceptions/EdbIdbSeparationException.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/exceptions/EdbIdbSeparationException.java deleted file mode 100644 index 5084a84e4..000000000 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/exceptions/EdbIdbSeparationException.java +++ /dev/null @@ -1,54 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner.exceptions; - -import java.text.MessageFormat; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.Set; - -import org.semanticweb.vlog4j.core.model.api.Predicate; - -/** - * Exception thrown when attempting to load the reasoner with a knowledge base (facts and rules) that contains predicates that - * are both EDB (occur in facts) and IDB (occur in rule heads). Predicates that - * occur in facts cannot appear in rule heads. - * - * @author Irina Dragoste - * - */ -public class EdbIdbSeparationException extends VLog4jException { - - /** - * generated serial version UID - */ - private static final long serialVersionUID = -6731598892649856691L; - - private static final String messagePattern = "The following predicates occur both in facts (EDBs) and rule heads (IDBs): {0}!"; - - /** - * Creates an exception with a logging message for given predicates. - * @param edbIdbPredicates predicates which are both EDB (occur in facts) and IDB (occur in rule heads). - */ - public EdbIdbSeparationException(Set edbIdbPredicates) { - super(MessageFormat.format(messagePattern, edbIdbPredicates)); - } - -} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSource.java deleted file mode 100644 index 80b6fea90..000000000 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSource.java +++ /dev/null @@ -1,124 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.File; -import java.io.IOException; - -import org.apache.commons.lang3.Validate; -import org.eclipse.jdt.annotation.NonNull; -import org.semanticweb.vlog4j.core.model.api.TermType; -import org.semanticweb.vlog4j.core.reasoner.DataSource; - -/** - * A CsvFileDataSource stores fact terms (tuples) as lines in a ".csv" format - * file, each column being a predicate argument. - * - * @author Irina Dragoste - * - */ -public class CsvFileDataSource implements DataSource { - public static final String CSV_FILE_EXTENSION = ".csv"; - private static final String DATASOURCE_TYPE_CONFIG_VALUE = "INMEMORY"; - - private final File csvFile; - private final String dirCanonicalPath; - - /** - * A .csv format file, where each line corresponds to a fact of - * {@link TermType#CONSTANT} {@link Term}s, each column being the fact term - * name. - * - * @return - */ - public File getCsvFile() { - return this.csvFile; - } - - /** - * Constructor. - * - * @param csvFile - * must be a file of ".csv" extension and valid CSV format. The - * content of the file represents fact tuples, where each line - * corresponds to a fact, each column being a predicate argument. - * @throws IOException - * if the given {@code csvFile} path is and invalid file path. - * @throws IllegalArgumentException - * if the given {@code csvFilePath} does not end with - * .csv extension. - */ - public CsvFileDataSource(@NonNull final File csvFile) throws IOException { - Validate.notNull(csvFile, "Data source file cannot be null!"); - Validate.isTrue(csvFile.getName().endsWith(CSV_FILE_EXTENSION), - "Expected .csv extension for data source file [%s]!", csvFile); - this.dirCanonicalPath = csvFile.getAbsoluteFile().getParentFile().getCanonicalPath(); - this.csvFile = csvFile; - } - - @Override - public final String toConfigString() { - final String configStringPattern = - - DataSource.PREDICATE_NAME_CONFIG_LINE + - - DATASOURCE_TYPE_CONFIG_PARAM + "=" + DATASOURCE_TYPE_CONFIG_VALUE + "\n" + - - "EDB%1$d_param0=" + dirCanonicalPath + "\n" + - - "EDB%1$d_param1=" + getFileNameWithoutExtension() + "\n"; - - return configStringPattern; - } - - String getDirCanonicalPath() throws IOException { - return dirCanonicalPath; - } - - String getFileNameWithoutExtension() { - final String fileName = this.csvFile.getName(); - return fileName.substring(0, fileName.lastIndexOf(CSV_FILE_EXTENSION)); - } - - @Override - public int hashCode() { - return this.csvFile.hashCode(); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (!(obj instanceof CsvFileDataSource)) - return false; - final CsvFileDataSource other = (CsvFileDataSource) obj; - return csvFile.equals(other.getCsvFile()); - } - - @Override - public String toString() { - return "CsvFileDataSource [csvFile=" + csvFile + "]"; - - } - -} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultIterator.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultIterator.java deleted file mode 100644 index c1ec04d9e..000000000 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/QueryResultIterator.java +++ /dev/null @@ -1,61 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -/* - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.Iterator; - -import org.semanticweb.vlog4j.core.model.api.QueryResult; - -import karmaresearch.vlog.Term; -import karmaresearch.vlog.TermQueryResultIterator; - -/** - * Iterates trough all answers to a query. An answer to a query is a - * {@link QueryResult}. Each query answer is distinct. - * - * @author Irina Dragoste - * - */ -public class QueryResultIterator implements Iterator, AutoCloseable { - - private final TermQueryResultIterator vLogTermQueryResultIterator; - - public QueryResultIterator(TermQueryResultIterator termQueryResultIterator) { - this.vLogTermQueryResultIterator = termQueryResultIterator; - } - - @Override - public boolean hasNext() { - return this.vLogTermQueryResultIterator.hasNext(); - } - - @Override - public QueryResult next() { - final Term[] vLogQueryResult = this.vLogTermQueryResultIterator.next(); - return VLogToModelConverter.toQueryResult(vLogQueryResult); - } - - @Override - public void close() { - this.vLogTermQueryResultIterator.close(); - } - -} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java deleted file mode 100644 index 52bf502df..000000000 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSource.java +++ /dev/null @@ -1,146 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.net.URL; -import java.util.Iterator; -import java.util.LinkedHashSet; - -import org.apache.commons.lang3.Validate; -import org.eclipse.jdt.annotation.NonNull; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.reasoner.DataSource; - -/** - * A SparqlQueryResultDataSource provide the results of a SPARQL query on a - * given web endpoint. - * - * @author Irina Dragoste - * - */ -public class SparqlQueryResultDataSource implements DataSource { - - private static final String DATASOURCE_TYPE_CONFIG_VALUE = "SPARQL"; - - private final URL endpoint; - private final LinkedHashSet queryVariables; - private final String queryBody; - - /** - * Creates a data source from answers to a remote SPARQL query. - * - * @param endpoint - * the web location of the resource the query will be evaluated on. - * @param queryVariables - * the variables of the query, in the given order. The variable at - * each position in the ordered set will be mapped to its - * correspondent query answer term at the same position. - * @param queryBody - * the content of the WHERE clause in the SPARQL query. Must - * not contain {@code newline} characters ({@code "\n")}. - */ - // TODO add examples to javadoc - // TODO add illegal argument exceptions to javadoc - public SparqlQueryResultDataSource(@NonNull final URL endpoint, - @NonNull final LinkedHashSet queryVariables, @NonNull final String queryBody) { - Validate.notNull(endpoint, "Endpoint cannot be null."); - Validate.notNull(queryVariables, "Query variables ordered set cannot be null."); - Validate.noNullElements(queryVariables, "Query variables cannot be null or contain null elements."); - Validate.notEmpty(queryVariables, "There must be at least one query variable."); - Validate.notBlank(queryBody, "Query body cannot be null or blank [{}].", queryBody); - // TODO validate query body syntax (for example, new line character) - // TODO validate early that the arity coincides with - // the assigned predicate - this.endpoint = endpoint; - this.queryVariables = queryVariables; - this.queryBody = queryBody; - } - - public URL getEndpoint() { - return endpoint; - } - - public String getQueryBody() { - return queryBody; - } - - public LinkedHashSet getQueryVariables() { - return queryVariables; - } - - @Override - public final String toConfigString() { - final String configStringPattern = - - DataSource.PREDICATE_NAME_CONFIG_LINE + - - DATASOURCE_TYPE_CONFIG_PARAM + "=" + DATASOURCE_TYPE_CONFIG_VALUE + "\n" + - - "EDB%1$d_param0=" + endpoint + "\n" + "EDB%1$d_param1=" + getQueryVariablesList(queryVariables) - + "\n" + - - "EDB%1$d_param2=" + queryBody + "\n"; - - return configStringPattern; - } - - private String getQueryVariablesList(LinkedHashSet queryVariables) { - final StringBuilder sb = new StringBuilder(); - final Iterator iterator = queryVariables.iterator(); - while (iterator.hasNext()) { - sb.append(iterator.next().getName()); - if (iterator.hasNext()) { - sb.append(","); - } - } - return sb.toString(); - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + endpoint.hashCode(); - result = prime * result + queryBody.hashCode(); - result = prime * result + queryVariables.hashCode(); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - final SparqlQueryResultDataSource other = (SparqlQueryResultDataSource) obj; - return this.endpoint.equals(other.getEndpoint()) && this.queryVariables.equals(other.getQueryVariables()) - && this.queryBody.equals(other.getQueryBody()); - } - - @Override - public String toString() { - return "SparqlQueryResultDataSource [endpoint=" + endpoint + ", queryVariables=" + queryVariables - + ", queryBody=" + queryBody + "]"; - } - -} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java deleted file mode 100644 index 10de85244..000000000 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/TermToVLogConverter.java +++ /dev/null @@ -1,64 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.semanticweb.vlog4j.core.model.api.Blank; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.TermVisitor; -import org.semanticweb.vlog4j.core.model.api.Variable; - -/** - * A visitor that converts {@link Term}s of different types to corresponding - * internal VLog model {@link karmaresearch.vlog.Term}s. - * - * @author Irina Dragoste - * - */ -class TermToVLogConverter implements TermVisitor { - - /** - * Transforms a Constant to a {@link karmaresearch.vlog.Term} with the same name - * and type {@link karmaresearch.vlog.Term.TermType#CONSTANT}. - */ - @Override - public karmaresearch.vlog.Term visit(Constant term) { - return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, term.getName()); - } - - /** - * Transforms a Variable to a {@link karmaresearch.vlog.Term} with the same name - * and type {@link karmaresearch.vlog.Term.TermType#VARIABLE}. - */ - @Override - public karmaresearch.vlog.Term visit(Variable term) { - return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, term.getName()); - } - - /** - * Transforms a Blank to a {@link karmaresearch.vlog.Term} with the same name - * and type {@link karmaresearch.vlog.Term.TermType#BLANK}. - */ - @Override - public karmaresearch.vlog.Term visit(Blank term) { - return new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, term.getName()); - } - -} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java deleted file mode 100644 index b1dce18f3..000000000 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogReasoner.java +++ /dev/null @@ -1,399 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Formatter; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.lang3.Validate; -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.DataSource; -import org.semanticweb.vlog4j.core.reasoner.LogLevel; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.ReasonerState; -import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import karmaresearch.vlog.AlreadyStartedException; -import karmaresearch.vlog.EDBConfigurationException; -import karmaresearch.vlog.NotStartedException; -import karmaresearch.vlog.TermQueryResultIterator; -import karmaresearch.vlog.VLog; - -/* - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -public class VLogReasoner implements Reasoner { - private static Logger LOGGER = LoggerFactory.getLogger(VLogReasoner.class); - - private final VLog vLog = new VLog(); - private ReasonerState reasonerState = ReasonerState.BEFORE_LOADING; - - private LogLevel internalLogLevel = LogLevel.WARNING; - private Algorithm algorithm = Algorithm.RESTRICTED_CHASE; - private Integer timeoutAfterSeconds; - private RuleRewriteStrategy ruleRewriteStrategy = RuleRewriteStrategy.NONE; - - private final List rules = new ArrayList<>(); - private final Map> factsForPredicate = new HashMap<>(); - private final Map dataSourceForPredicate = new HashMap<>(); - - /** - * Holds the state of the reasoning result. Has value {@code true} if reasoning - * has completed, {@code false} if it has been interrupted. - */ - private boolean reasoningCompleted; - - @Override - public void setAlgorithm(final Algorithm algorithm) { - Validate.notNull(algorithm, "Algorithm cannot be null!"); - this.algorithm = algorithm; - } - - @Override - public Algorithm getAlgorithm() { - return this.algorithm; - } - - @Override - public void setReasoningTimeout(Integer seconds) { - if (seconds != null) { - Validate.isTrue(seconds > 0, "Only strictly positive timeout period alowed!", seconds); - } - this.timeoutAfterSeconds = seconds; - } - - @Override - public Integer getReasoningTimeout() { - return this.timeoutAfterSeconds; - } - - @Override - public void addRules(final Rule... rules) throws ReasonerStateException { - addRules(Arrays.asList(rules)); - } - - @Override - public void addRules(final List rules) throws ReasonerStateException { - if (this.reasonerState != ReasonerState.BEFORE_LOADING) { - throw new ReasonerStateException(this.reasonerState, - "Rules cannot be added after the reasoner has been loaded! Call reset() to undo loading and reasoning."); - } - Validate.noNullElements(rules, "Null rules are not alowed! The list contains a null at position [%d]."); - this.rules.addAll(new ArrayList<>(rules)); - } - - @Override - public void setRuleRewriteStrategy(RuleRewriteStrategy ruleRewritingStrategy) throws ReasonerStateException { - Validate.notNull(ruleRewritingStrategy, "Rewrite strategy cannot be null!"); - if (this.reasonerState != ReasonerState.BEFORE_LOADING) { - throw new ReasonerStateException(this.reasonerState, - "Rules cannot be re-writen after the reasoner has been loaded! Call reset() to undo loading and reasoning."); - } - this.ruleRewriteStrategy = ruleRewritingStrategy; - } - - @Override - public RuleRewriteStrategy getRuleRewriteStrategy() { - return this.ruleRewriteStrategy; - } - - @Override - public void addFacts(final Atom... facts) throws ReasonerStateException { - addFacts(Arrays.asList(facts)); - } - - @Override - public void addFacts(final Collection facts) throws ReasonerStateException { - if (this.reasonerState != ReasonerState.BEFORE_LOADING) { - throw new ReasonerStateException(this.reasonerState, - "Facts cannot be added after the reasoner has been loaded! Call reset() to undo loading and reasoning."); - } - Validate.noNullElements(facts, "Null facts are not alowed! The list contains a fact at position [%d]."); - for (final Atom fact : facts) { - validateFactTermsAreConstant(fact); - - final Predicate predicate = fact.getPredicate(); - validateNoDataSourceForPredicate(predicate); - - this.factsForPredicate.putIfAbsent(predicate, new HashSet<>()); - this.factsForPredicate.get(predicate).add(fact); - } - } - - @Override - public void addFactsFromDataSource(final Predicate predicate, final DataSource dataSource) - throws ReasonerStateException { - if (this.reasonerState != ReasonerState.BEFORE_LOADING) { - throw new ReasonerStateException(this.reasonerState, - "Data sources cannot be added after the reasoner has been loaded! Call reset() to undo loading and reasoning."); - } - Validate.notNull(predicate, "Null predicates are not allowed!"); - Validate.notNull(dataSource, "Null dataSources are not allowed!"); - validateNoDataSourceForPredicate(predicate); - Validate.isTrue(!this.factsForPredicate.containsKey(predicate), - "Multiple data sources for the same predicate are not allowed! Facts for predicate [%s] alredy added in memory: %s", - predicate, this.factsForPredicate.get(predicate)); - - this.dataSourceForPredicate.put(predicate, dataSource); - } - - private void validateFactTermsAreConstant(Atom fact) { - final Set nonConstantTerms = new HashSet<>(fact.getTerms()); - nonConstantTerms.removeAll(fact.getConstants()); - Validate.isTrue(nonConstantTerms.isEmpty(), - "Only Constant terms alowed in Fact atoms! The following non-constant terms [%s] appear for fact [%s]!", - nonConstantTerms, fact); - - } - - private void validateNoDataSourceForPredicate(final Predicate predicate) { - Validate.isTrue(!this.dataSourceForPredicate.containsKey(predicate), - "Multiple data sources for the same predicate are not allowed! Facts for predicate [%s] alredy added from data source: %s", - predicate, this.dataSourceForPredicate.get(predicate)); - } - - @Override - public void load() throws EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - if (this.reasonerState != ReasonerState.BEFORE_LOADING) { - LOGGER.warn("This method call is ineffective: the Reasoner has already been loaded."); - } else { - validateEdbIdbSeparation(); - - this.reasonerState = ReasonerState.AFTER_LOADING; - - if (this.dataSourceForPredicate.isEmpty() && this.factsForPredicate.isEmpty()) { - LOGGER.warn("No facts have been provided."); - } - - try { - this.vLog.start(generateDataSourcesConfig(), false); - } catch (final AlreadyStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration.", e); - } - - validateDataSourcePredicateArities(); - - loadInMemoryFacts(); - - if (this.rules.isEmpty()) { - LOGGER.warn("No rules have been provided for reasoning."); - } else { - loadRules(); - } - } - } - - private void validateDataSourcePredicateArities() throws IncompatiblePredicateArityException { - for (final Predicate predicate : this.dataSourceForPredicate.keySet()) { - final int dataSourcePredicateArity; - try { - dataSourcePredicateArity = this.vLog.getPredicateArity(ModelToVLogConverter.toVLogPredicate(predicate)); - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } - if (dataSourcePredicateArity == -1) { - LOGGER.warn("Data source {0} for predicate {1} is empty: ", this.dataSourceForPredicate.get(predicate), - predicate); - } else if (predicate.getArity() != dataSourcePredicateArity) { - throw new IncompatiblePredicateArityException(predicate, dataSourcePredicateArity, - this.dataSourceForPredicate.get(predicate)); - } - } - - } - - @Override - public boolean reason() throws IOException, ReasonerStateException { - if (this.reasonerState == ReasonerState.BEFORE_LOADING) { - throw new ReasonerStateException(this.reasonerState, "Reasoning is not allowed before loading!"); - } else if (this.reasonerState == ReasonerState.AFTER_REASONING) { - LOGGER.warn( - "This method call is ineffective: this Reasoner has already reasoned. Successive reason() calls are not supported. Call reset() to undo loading and reasoning and reload to be able to reason again"); - } else { - this.reasonerState = ReasonerState.AFTER_REASONING; - - final boolean skolemChase = this.algorithm == Algorithm.SKOLEM_CHASE; - try { - if (this.timeoutAfterSeconds == null) { - this.vLog.materialize(skolemChase); - this.reasoningCompleted = true; - } else { - this.reasoningCompleted = this.vLog.materialize(skolemChase, this.timeoutAfterSeconds); - } - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } - } - return this.reasoningCompleted; - } - - @Override - public QueryResultIterator answerQuery(Atom queryAtom, boolean includeBlanks) throws ReasonerStateException { - final boolean filterBlanks = !includeBlanks; - if (this.reasonerState == ReasonerState.BEFORE_LOADING) { - throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); - } - Validate.notNull(queryAtom, "Query atom must not be null!"); - - final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(queryAtom); - TermQueryResultIterator stringQueryResultIterator; - try { - stringQueryResultIterator = this.vLog.query(vLogAtom, true, filterBlanks); - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } - return new QueryResultIterator(stringQueryResultIterator); - } - - @Override - public void exportQueryAnswersToCsv(final Atom queryAtom, final String csvFilePath, final boolean includeBlanks) - throws ReasonerStateException, IOException { - final boolean filterBlanks = !includeBlanks; - if (this.reasonerState == ReasonerState.BEFORE_LOADING) { - throw new ReasonerStateException(this.reasonerState, "Querying is not alowed before reasoner is loaded!"); - } - Validate.notNull(queryAtom, "Query atom must not be null!"); - Validate.notNull(csvFilePath, "File to export query answer to must not be null!"); - Validate.isTrue(csvFilePath.endsWith(CsvFileDataSource.CSV_FILE_EXTENSION), - "Expected .csv extension for file [%s]!", csvFilePath); - - final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(queryAtom); - try { - this.vLog.writeQueryResultsToCsv(vLogAtom, csvFilePath, filterBlanks); - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } - } - - @Override - public void resetReasoner() { - this.reasonerState = ReasonerState.BEFORE_LOADING; - this.vLog.stop(); - LOGGER.warn( - "Reasoner has been reset. All inferences computed during reasoning have been discarded. More data and rules can be added after resetting. The reasoner needs to be loaded again to perform querying and reasoning."); - } - - @Override - public void close() { - this.vLog.stop(); - } - - private void validateEdbIdbSeparation() throws EdbIdbSeparationException { - final Set edbPredicates = collectEdbPredicates(); - final Set idbPredicates = collectIdbPredicates(); - final Set intersection = new HashSet<>(edbPredicates); - intersection.retainAll(idbPredicates); - - if (!intersection.isEmpty()) { - throw new EdbIdbSeparationException(intersection); - } - } - - private Set collectEdbPredicates() { - final Set edbPredicates = new HashSet<>(); - edbPredicates.addAll(this.dataSourceForPredicate.keySet()); - edbPredicates.addAll(this.factsForPredicate.keySet()); - return edbPredicates; - } - - private Set collectIdbPredicates() { - final Set idbPredicates = new HashSet<>(); - for (final Rule rule : this.rules) { - for (final Atom headAtom : rule.getHead()) { - idbPredicates.add(headAtom.getPredicate()); - } - } - return idbPredicates; - } - - String generateDataSourcesConfig() { - final StringBuilder configStringBuilder = new StringBuilder(); - int dataSourceIndex = 0; - for (final Predicate predicate : this.dataSourceForPredicate.keySet()) { - final DataSource dataSource = this.dataSourceForPredicate.get(predicate); - try (final Formatter formatter = new Formatter(configStringBuilder);) { - formatter.format(dataSource.toConfigString(), dataSourceIndex, - ModelToVLogConverter.toVLogPredicate(predicate)); - } - dataSourceIndex++; - } - return configStringBuilder.toString(); - } - - private void loadInMemoryFacts() { - for (final Predicate predicate : this.factsForPredicate.keySet()) { - final Set factsForPredicate = this.factsForPredicate.get(predicate); - - final String vLogPredicate = ModelToVLogConverter.toVLogPredicate(predicate); - final String[][] tuplesForPredicate = ModelToVLogConverter.toVLogFactTuples(factsForPredicate); - try { - this.vLog.addData(vLogPredicate, tuplesForPredicate); - } catch (final EDBConfigurationException e) { - throw new RuntimeException("Invalid data sources configuration.", e); - } - } - } - - private void loadRules() { - final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter.toVLogRuleArray(this.rules); - final karmaresearch.vlog.VLog.RuleRewriteStrategy vLogRuleRewriteStrategy = ModelToVLogConverter - .toVLogRuleRewriteStrategy(this.ruleRewriteStrategy); - try { - this.vLog.setRules(vLogRuleArray, vLogRuleRewriteStrategy); - } catch (final NotStartedException e) { - throw new RuntimeException("Inconsistent reasoner state.", e); - } - } - - @Override - public void setLogLevel(LogLevel logLevel) { - Validate.notNull(logLevel, "Log level cannot be null!"); - this.internalLogLevel = logLevel; - this.vLog.setLogLevel(ModelToVLogConverter.toVLogLogLevel(this.internalLogLevel)); - } - - @Override - public LogLevel getLogLevel() { - return this.internalLogLevel; - } - - @Override - public void setLogFile(String filePath) { - this.vLog.setLogFile(filePath); - } - -} diff --git a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java b/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java deleted file mode 100644 index 113f8a2b8..000000000 --- a/vlog4j-core/src/main/java/org/semanticweb/vlog4j/core/reasoner/implementation/VLogToModelConverter.java +++ /dev/null @@ -1,96 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -/* - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.ArrayList; -import java.util.List; - -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.BlankImpl; -import org.semanticweb.vlog4j.core.model.implementation.ConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.VariableImpl; - -/** - * Utility class with static methods for converting from VLog internal model - * ({@code karmaresearch.vlog} objects) to VLog API model - * ({@code org.semanticweb.vlog4j.core.model.api}) objects. - * - * @author Irina Dragoste - * - */ -class VLogToModelConverter { - - /** - * Converts internal VLog query results (represented as arrays of - * {@link karmaresearch.vlog.Term}s) into VLog model API QueryResults. - * - * @param vLogQueryResult - * an array of terms that represent an answer to a query. - * @return a QueryResult containing the corresponding {@code vLogQueryResult} as - * a List of {@link Term}s. - */ - static QueryResult toQueryResult(karmaresearch.vlog.Term[] vLogQueryResult) { - return new QueryResultImpl(toTermList(vLogQueryResult)); - } - - /** - * Converts an array of internal VLog terms ({@link karmaresearch.vlog.Term}) - * into the corresponding list of VLog API model {@link Term}. - * - * @param vLogTerms - * input terms array, to be converted to a list of corresponding - * {@link Term}s. - * @return list of {@link Term}s, where each element corresponds to the element - * in given {@code vLogTerms} at the same position. - */ - static List toTermList(karmaresearch.vlog.Term[] vLogTerms) { - List terms = new ArrayList<>(vLogTerms.length); - for (karmaresearch.vlog.Term vLogTerm : vLogTerms) { - terms.add(toTerm(vLogTerm)); - } - return terms; - } - - /** - * Converts an internal VLog term ({@link karmaresearch.vlog.Term}) to a VLog - * API model {@link Term} of the same type and name. - * - * @param vLogTerm - * term to be converted - * @return a ({@link karmaresearch.vlog.Term}) with the same name as given - * {@code vLogTerm} and of the corresponding type. - */ - static Term toTerm(karmaresearch.vlog.Term vLogTerm) { - String name = vLogTerm.getName(); - switch (vLogTerm.getTermType()) { - case CONSTANT: - return new ConstantImpl(name); - case BLANK: - return new BlankImpl(name); - case VARIABLE: - return new VariableImpl(name); - default: - throw new IllegalArgumentException("Unexpected vlog term type: " + vLogTerm.getTermType()); - } - } - -} diff --git a/vlog4j-core/src/test/data/output/binaryFacts.csv b/vlog4j-core/src/test/data/output/binaryFacts.csv deleted file mode 100644 index bcaabc2bc..000000000 --- a/vlog4j-core/src/test/data/output/binaryFacts.csv +++ /dev/null @@ -1,2 +0,0 @@ -c1,c2 -c3,c4 diff --git a/vlog4j-core/src/test/data/output/include_blanks.csv b/vlog4j-core/src/test/data/output/include_blanks.csv deleted file mode 100644 index e502cf529..000000000 --- a/vlog4j-core/src/test/data/output/include_blanks.csv +++ /dev/null @@ -1,2 +0,0 @@ -c,1_2_0 -c,1_3_0 diff --git a/vlog4j-core/src/test/data/output/outputXXZ.csv b/vlog4j-core/src/test/data/output/outputXXZ.csv deleted file mode 100644 index cf3f1ce66..000000000 --- a/vlog4j-core/src/test/data/output/outputXXZ.csv +++ /dev/null @@ -1 +0,0 @@ -c,c,d diff --git a/vlog4j-core/src/test/data/output/outputXYZ.csv b/vlog4j-core/src/test/data/output/outputXYZ.csv deleted file mode 100644 index cf3f1ce66..000000000 --- a/vlog4j-core/src/test/data/output/outputXYZ.csv +++ /dev/null @@ -1 +0,0 @@ -c,c,d diff --git a/vlog4j-core/src/test/data/output/unaryFacts.csv b/vlog4j-core/src/test/data/output/unaryFacts.csv deleted file mode 100644 index d0aaf976a..000000000 --- a/vlog4j-core/src/test/data/output/unaryFacts.csv +++ /dev/null @@ -1,2 +0,0 @@ -c1 -c2 diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/AtomImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/AtomImplTest.java deleted file mode 100644 index 216c7481d..000000000 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/AtomImplTest.java +++ /dev/null @@ -1,141 +0,0 @@ -package org.semanticweb.vlog4j.core.model; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertTrue; - -import java.util.Arrays; -import java.util.Set; - -import org.junit.Test; -import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.AtomImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; - -public class AtomImplTest { - - @Test - public void testGetters() { - final Variable x = Expressions.makeVariable("X"); - final Variable y = Expressions.makeVariable("Y"); - final Constant c = Expressions.makeConstant("c"); - final Constant d = Expressions.makeConstant("d"); - final Atom atomP = Expressions.makeAtom("p", x, c, d, y); - final Atom atomQ = Expressions.makeAtom("q", c, d); - - final Set variables = Sets.newSet(x, y); - final Set constants = Sets.newSet(c, d); - - assertEquals("p", atomP.getPredicate().getName()); - assertEquals(atomP.getTerms().size(), atomP.getPredicate().getArity()); - - assertEquals(variables, atomP.getVariables()); - assertEquals(constants, atomP.getConstants()); - assertEquals(Arrays.asList(x, c, d, y), atomP.getTerms()); - - assertEquals("q", atomQ.getPredicate().getName()); - assertEquals(atomQ.getTerms().size(), atomQ.getPredicate().getArity()); - - assertTrue(atomQ.getVariables().isEmpty()); - assertEquals(constants, atomQ.getConstants()); - assertEquals(Arrays.asList(c, d), atomQ.getTerms()); - } - - @Test - public void testEquals() { - final Variable x = Expressions.makeVariable("X"); - final Constant c = Expressions.makeConstant("c"); - - final Predicate predicateP = new PredicateImpl("p", 2); - final Predicate predicateQ = new PredicateImpl("q", 2); - - final Atom atom1 = Expressions.makeAtom("p", Arrays.asList(x, c)); - final Atom atom2 = Expressions.makeAtom("p", x, c); - final Atom atom3 = new AtomImpl(predicateP, Arrays.asList(x, c)); - final Atom atom4 = new AtomImpl(predicateQ, Arrays.asList(x, c)); - final Atom atom5 = new AtomImpl(predicateP, Arrays.asList(c, x)); - - assertEquals(atom1, atom1); - assertEquals(atom1, atom2); - assertEquals(atom1, atom3); - assertEquals(atom1.hashCode(), atom1.hashCode()); - assertNotEquals(atom4, atom1); - assertNotEquals(atom4.hashCode(), atom1.hashCode()); - assertNotEquals(atom5, atom1); - assertNotEquals(atom5.hashCode(), atom1.hashCode()); - assertFalse(atom1.equals(null)); - assertFalse(atom1.equals(c)); - } - - @Test(expected = NullPointerException.class) - public void termsNotNull() { - final Predicate predicate1 = Expressions.makePredicate("p", 1); - new AtomImpl(predicate1, null); - } - - @Test(expected = IllegalArgumentException.class) - public void termsNoNullElements() { - final Predicate predicate1 = Expressions.makePredicate("p", 1); - final Variable x = Expressions.makeVariable("X"); - new AtomImpl(predicate1, Arrays.asList(x, null)); - } - - @Test(expected = IllegalArgumentException.class) - public void termsNonEmpty() { - Expressions.makeAtom("p"); - } - - @Test(expected = NullPointerException.class) - public void predicateNotNull() { - final Predicate nullPredicate = null; - Expressions.makeAtom(nullPredicate, Expressions.makeConstant("c")); - } - - @Test(expected = NullPointerException.class) - public void predicateNameNotNull() { - final String nullPredicateName = null; - Expressions.makeAtom(nullPredicateName, Expressions.makeConstant("c")); - } - - @Test(expected = IllegalArgumentException.class) - public void predicateNameNotEmpty() { - Expressions.makeAtom("", Expressions.makeConstant("c")); - } - - @Test(expected = IllegalArgumentException.class) - public void predicateNameNotWhitespace() { - Expressions.makeAtom(" ", Expressions.makeConstant("c")); - } - - @Test(expected = IllegalArgumentException.class) - public void termSizeMatchesPredicateArity() { - final Predicate predicateArity1 = Expressions.makePredicate("p", 1); - Expressions.makeAtom(predicateArity1, Expressions.makeConstant("c"), Expressions.makeVariable("X")); - } - -} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java deleted file mode 100644 index b8b26e7e2..000000000 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/ConjunctionImplTest.java +++ /dev/null @@ -1,95 +0,0 @@ -package org.semanticweb.vlog4j.core.model; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; - -import java.util.Arrays; -import java.util.List; - -import org.junit.Test; -import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.TermType; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; - -public class ConjunctionImplTest { - - @Test - public void testGetters() { - Variable x = Expressions.makeVariable("X"); - Variable y = Expressions.makeVariable("Y"); - Constant c = Expressions.makeConstant("c"); - Constant d = Expressions.makeConstant("d"); - Atom atom1 = Expressions.makeAtom("p", x, c); - Atom atom2 = Expressions.makeAtom("p", y, x); - Atom atom3 = Expressions.makeAtom("q", x, d); - List atomList = Arrays.asList(atom1, atom2, atom3); - - Conjunction conjunction = new ConjunctionImpl(atomList); - - assertEquals(atomList, conjunction.getAtoms()); - assertEquals(Sets.newSet(x, y), conjunction.getVariables()); - assertEquals(Sets.newSet(c, d), conjunction.getTerms(TermType.CONSTANT)); - } - - @Test - public void testEquals() { - Variable x = Expressions.makeVariable("X"); - Variable y = Expressions.makeVariable("Y"); - Constant c = Expressions.makeConstant("c"); - Constant d = Expressions.makeConstant("d"); - Atom atom1 = Expressions.makeAtom("p", x, c); - Atom atom2 = Expressions.makeAtom("p", y, x); - Atom atom3 = Expressions.makeAtom("q", x, d); - List atomList = Arrays.asList(atom1, atom2, atom3); - Conjunction conjunction1 = new ConjunctionImpl(atomList); - Conjunction conjunction2 = Expressions.makeConjunction(atom1, atom2, atom3); - Conjunction conjunction3 = Expressions.makeConjunction(atom1, atom3, atom2); - - assertEquals(conjunction1, conjunction1); - assertEquals(conjunction2, conjunction1); - assertEquals(conjunction2.hashCode(), conjunction1.hashCode()); - assertNotEquals(conjunction3, conjunction1); - assertNotEquals(conjunction3.hashCode(), conjunction1.hashCode()); - assertFalse(conjunction1.equals(null)); - assertFalse(conjunction1.equals(c)); - } - - @Test(expected = NullPointerException.class) - public void atomsNotNull() { - new ConjunctionImpl(null); - } - - @Test(expected = IllegalArgumentException.class) - public void atomsNoNullElements() { - Variable x = Expressions.makeVariable("X"); - Atom atom1 = Expressions.makeAtom("p", x); - List atomList = Arrays.asList(atom1, null); - Expressions.makeConjunction(atomList); - } - -} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java deleted file mode 100644 index 210782c48..000000000 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/RuleImplTest.java +++ /dev/null @@ -1,112 +0,0 @@ -package org.semanticweb.vlog4j.core.model; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.*; - -import java.util.Collections; - -import org.junit.Test; -import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; - -public class RuleImplTest { - - @Test - public void testGetters() { - Variable x = Expressions.makeVariable("X"); - Variable y = Expressions.makeVariable("Y"); - Variable z = Expressions.makeVariable("Z"); - Constant c = Expressions.makeConstant("c"); - Constant d = Expressions.makeConstant("d"); - Atom atom1 = Expressions.makeAtom("p", x, c); - Atom atom2 = Expressions.makeAtom("p", x, z); - Atom atom3 = Expressions.makeAtom("q", x, y); - Atom atom4 = Expressions.makeAtom("r", x, d); - Conjunction body = Expressions.makeConjunction(atom1, atom2); - Conjunction head = Expressions.makeConjunction(atom3, atom4); - Rule rule = Expressions.makeRule(head, body); - - assertEquals(body, rule.getBody()); - assertEquals(head, rule.getHead()); - assertEquals(Collections.singleton(y), rule.getExistentiallyQuantifiedVariables()); - assertEquals(Sets.newSet(x, z), rule.getUniversallyQuantifiedVariables()); - } - - @Test - public void testEquals() { - Variable x = Expressions.makeVariable("X"); - Variable y = Expressions.makeVariable("Y"); - Variable z = Expressions.makeVariable("Z"); - Constant c = Expressions.makeConstant("c"); - Atom atom1 = Expressions.makeAtom("p", x, c); - Atom atom2 = Expressions.makeAtom("p", x, z); - Atom atom3 = Expressions.makeAtom("q", x, y); - Conjunction body = Expressions.makeConjunction(atom1, atom2); - Conjunction head = Expressions.makeConjunction(atom3); - Rule rule1 = new RuleImpl(head, body); - Rule rule2 = Expressions.makeRule(atom3, atom1, atom2); - Rule rule3 = new RuleImpl(head, head); - Rule rule4 = new RuleImpl(body, body); - Rule rule5 = new RuleImpl(body, head); - - assertEquals(rule1, rule1); - assertEquals(rule2, rule1); - assertEquals(rule2.hashCode(), rule1.hashCode()); - assertNotEquals(rule3, rule1); - assertNotEquals(rule3.hashCode(), rule1.hashCode()); - assertNotEquals(rule4, rule1); - assertNotEquals(rule4.hashCode(), rule1.hashCode()); - assertNotEquals(rule5, rule1); - assertFalse(rule1.equals(null)); - assertFalse(rule1.equals(c)); - } - - @Test(expected = IllegalArgumentException.class) - public void bodyNonEmpty() { - Expressions.makeRule(Expressions.makeAtom("p", Expressions.makeVariable("X"))); - } - - @Test(expected = NullPointerException.class) - public void bodyNotNull() { - Conjunction head = Expressions.makeConjunction(Expressions.makeAtom("p", Expressions.makeVariable("X"))); - Expressions.makeRule(head, null); - } - - @Test(expected = IllegalArgumentException.class) - public void headNonEmpty() { - Conjunction body = Expressions.makeConjunction(Expressions.makeAtom("p", Expressions.makeVariable("X"))); - Expressions.makeRule(Expressions.makeConjunction(), body); - } - - @Test(expected = NullPointerException.class) - public void headNotNull() { - Conjunction body = Expressions.makeConjunction(Expressions.makeAtom("p", Expressions.makeVariable("X"))); - Expressions.makeRule(null, body); - } - -} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java deleted file mode 100644 index 63c8423cc..000000000 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/model/TermImplTest.java +++ /dev/null @@ -1,78 +0,0 @@ -package org.semanticweb.vlog4j.core.model; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.*; - -import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.TermType; -import org.semanticweb.vlog4j.core.model.implementation.ConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.VariableImpl; - -public class TermImplTest { - - @Test - public void constantImplEqualityTest() { - Term c = new ConstantImpl("c"); - Term ctoo = new ConstantImpl("c"); - Term a = new ConstantImpl("a"); - Term v = new VariableImpl("c"); - - assertEquals(c, c); - assertEquals(ctoo, c); - assertNotEquals(a, c); - assertNotEquals(v, c); - assertNotEquals(a.hashCode(), c.hashCode()); - assertNotEquals(v.hashCode(), c.hashCode()); - assertFalse(c.equals(null)); // written like this for recording coverage properly - assertFalse(c.equals("c")); // written like this for recording coverage properly - } - - @Test - public void termGetterTest() { - Term c = new ConstantImpl("c"); - assertEquals("c", c.getName()); - assertEquals(TermType.CONSTANT, c.getType()); - - Term v = new VariableImpl("v"); - assertEquals("v", v.getName()); - assertEquals(TermType.VARIABLE, v.getType()); - } - - @Test(expected = NullPointerException.class) - public void constantNameNonNullTest() { - new ConstantImpl((String)null); - } - - @Test(expected = IllegalArgumentException.class) - public void constantNameNonEmptyTest() { - new ConstantImpl(""); - } - - @Test(expected = IllegalArgumentException.class) - public void constantNameNonWhitespaceTest() { - new ConstantImpl(" "); - } - - - -} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/CsvFileUtils.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/CsvFileUtils.java deleted file mode 100644 index 90a7ed251..000000000 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/CsvFileUtils.java +++ /dev/null @@ -1,70 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.FileReader; -import java.io.IOException; -import java.io.Reader; -import java.util.ArrayList; -import java.util.List; - -import org.apache.commons.csv.CSVFormat; -import org.apache.commons.csv.CSVParser; - -/** - * Utility class for collecting the content of a .csv file, or writing fact - * terms to a csv file. - * - * @author Irina Dragoste - * - */ -public final class CsvFileUtils { - public static final String CSV_EXPORT_FOLDER = "src/test/data/output/"; - public static final String CSV_INPORT_FOLDER = "src/test/data/input/"; - - private CsvFileUtils() { - } - - /** - * Collects the content of given {@code csvFile} into a List of lines, where - * each line is represented as a List of String entries. - * - * @param csvFile - * file to be read - * @return content of given {@code csvFile} as a List of lines, where each line - * is represented as a List of String entries. - * @throws IOException - * if an I/O error occurs regarding given {@code csvFile} - */ - public static List> getCSVContent(final String csvFile) throws IOException { - final List> content = new ArrayList<>(); - - final Reader in = new FileReader(csvFile); - final CSVParser parse = CSVFormat.DEFAULT.parse(in); - parse.forEach(csvRecord -> { - final List line = new ArrayList<>(); - csvRecord.forEach(line::add); - content.add(line); - }); - return content; - } - -} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java deleted file mode 100644 index 95c14efbb..000000000 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/LoggingTest.java +++ /dev/null @@ -1,187 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner; - -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import java.io.BufferedReader; -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileReader; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.IOException; - -import org.junit.After; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; - -@Ignore -public class LoggingTest { - private static final String logFilePath = "src/test/data/log.out"; - - private static final Variable vx = Expressions.makeVariable("x"); - // p(?x) -> q(?x) - private static final Atom ruleHeadQx = Expressions.makeAtom("q", vx); - private static final Atom ruleBodyPx = Expressions.makeAtom("p", vx); - private static final Rule rule = Expressions.makeRule(ruleHeadQx, ruleBodyPx); - - private static final Constant constantC = Expressions.makeConstant("c"); - private static final Atom factPc = Expressions.makeAtom("p", constantC); - - @Before - public void assertLogTestFileNotExists() { - assertFalse(new File(logFilePath).exists()); - } - - // TODO remaining tests: change log file - // TODO remaining tests: test that the log level and the log files can be set - // any time - - @Test - public void testSetLogFileNull() throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { - try (final Reasoner instance = Reasoner.getInstance()) { - instance.setLogFile(null); - assertFalse(new File(logFilePath).exists()); - instance.setLogLevel(LogLevel.INFO); - - instance.addFacts(factPc); - instance.addRules(rule); - instance.load(); - instance.reason(); - } - // TODO test that logging is redirected to system output - assertFalse(new File(logFilePath).exists()); - } - - @Test - public void testSetLogFileInexistent() throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { - try (final Reasoner instance = Reasoner.getInstance()) { - instance.setLogFile("/a/b"); - assertFalse(new File("/a/b").exists()); - instance.setLogLevel(LogLevel.INFO); - - instance.addFacts(factPc); - instance.addRules(rule); - instance.load(); - instance.reason(); - } - // TODO test that logging is redirected to system output - assertFalse(new File(logFilePath).exists()); - assertFalse(new File("/a/b").exists()); - } - - @Test(expected = NullPointerException.class) - public void testSetLogLevelNull() { - try (final Reasoner instance = Reasoner.getInstance()) { - instance.setLogLevel(null); - } - } - - @Test - public void testSetLogFileAppendsToFile() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - try (final Reasoner instance = Reasoner.getInstance()) { - instance.addFacts(factPc); - instance.addRules(rule); - instance.setLogLevel(LogLevel.INFO); - instance.setLogFile(logFilePath); - instance.load(); - instance.reason(); - - final int countLinesBeforeReset = readFile(); - assertTrue(countLinesBeforeReset > 0); - - instance.resetReasoner(); - instance.load(); - instance.reason(); - - final int countLinesAfterReset = readFile(); - - // the logger appends to the same file after reset - assertTrue(countLinesAfterReset > countLinesBeforeReset); - } - } - - @Test - public void testLogLevelInfo() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner instance = Reasoner.getInstance()) { - instance.addFacts(factPc); - instance.addRules(rule); - - instance.setLogLevel(LogLevel.INFO); - instance.setLogFile(logFilePath); - instance.load(); - instance.setLogLevel(LogLevel.INFO); - instance.reason(); - instance.setLogLevel(LogLevel.INFO); - - final int countLinesReasonLogLevelInfo = readFile(); - assertTrue(countLinesReasonLogLevelInfo > 0); - } - } - - @Test - public void testLogLevelDebug() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner instance = Reasoner.getInstance()) { - instance.addFacts(factPc); - instance.addRules(rule); - - instance.setLogLevel(LogLevel.DEBUG); - instance.setLogFile(logFilePath); - instance.load(); - instance.setLogLevel(LogLevel.DEBUG); - instance.reason(); - instance.setLogLevel(LogLevel.DEBUG); - - final int countLinesReasonLogLevelDebug = readFile(); - assertTrue(countLinesReasonLogLevelDebug > 0); - } - } - - @After - public void deleteLogTestFile() { - new File(logFilePath).delete(); - } - - private int readFile() throws IOException, FileNotFoundException { - int countLines = 0; - assertTrue(new File(logFilePath).exists()); - try (BufferedReader br = new BufferedReader(new FileReader(logFilePath))) { - String sCurrentLine; - while ((sCurrentLine = br.readLine()) != null) { - assertFalse(sCurrentLine.isEmpty()); - countLines++; - } - } - - return countLines; - } - -} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java deleted file mode 100644 index 1dfe392a1..000000000 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AddDataSourceTest.java +++ /dev/null @@ -1,153 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -import static org.junit.Assert.assertEquals; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.File; -import java.io.IOException; -import java.util.List; -import java.util.Set; - -import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.CsvFileUtils; -import org.semanticweb.vlog4j.core.reasoner.DataSource; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; - -import karmaresearch.vlog.EDBConfigurationException; - -public class AddDataSourceTest { - - private static final String CSV_FILE_PATH = CsvFileUtils.CSV_INPORT_FOLDER + "unaryFacts.csv"; - - @Test - public void testAddDataSourceExistentDataForDifferentPredicates() throws ReasonerStateException, - EdbIdbSeparationException, EDBConfigurationException, IOException, IncompatiblePredicateArityException { - final Predicate predicateParity1 = Expressions.makePredicate("p", 1); - final Constant constantA = Expressions.makeConstant("a"); - final Atom factPredicatePArity2 = Expressions.makeAtom("p", constantA, constantA); - final Atom factPredicateQArity1 = Expressions.makeAtom("q", constantA); - final Predicate predicateLArity1 = Expressions.makePredicate("l", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - - try (final VLogReasoner reasoner = new VLogReasoner()) { - reasoner.addFacts(factPredicatePArity2, factPredicateQArity1); - reasoner.addFactsFromDataSource(predicateLArity1, dataSource); - reasoner.addFactsFromDataSource(predicateParity1, dataSource); - reasoner.load(); - reasoner.reason(); - final QueryResultIterator queryResultIteratorL1 = reasoner.answerQuery( - Expressions.makeAtom(predicateLArity1, Expressions.makeVariable("x")), false); - final Set> queryResultsL1 = QueryResultsUtils.collectQueryResults(queryResultIteratorL1); - - final QueryResultIterator queryResultIteratorP1 = reasoner.answerQuery( - Expressions.makeAtom(predicateParity1, Expressions.makeVariable("x")), false); - final Set> queryResultsP1 = QueryResultsUtils.collectQueryResults(queryResultIteratorP1); - assertEquals(queryResultsL1, queryResultsP1); - - } - } - - @Test - public void testAddDataSourceBeforeLoading() throws ReasonerStateException, EdbIdbSeparationException, - EDBConfigurationException, IOException, IncompatiblePredicateArityException { - final Predicate predicateP = Expressions.makePredicate("p", 1); - final Predicate predicateQ = Expressions.makePredicate("q", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - try (final VLogReasoner reasoner = new VLogReasoner()) { - reasoner.addFactsFromDataSource(predicateP, dataSource); - reasoner.addFactsFromDataSource(predicateQ, dataSource); - reasoner.load(); - } - } - - @Test(expected = ReasonerStateException.class) - public void testAddDataSourceAfterLoading() throws ReasonerStateException, EdbIdbSeparationException, - EDBConfigurationException, IOException, IncompatiblePredicateArityException { - final Predicate predicateP = Expressions.makePredicate("p", 1); - final Predicate predicateQ = Expressions.makePredicate("q", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - try (final VLogReasoner reasoner = new VLogReasoner()) { - reasoner.addFactsFromDataSource(predicateP, dataSource); - reasoner.load(); - reasoner.addFactsFromDataSource(predicateQ, dataSource); - } - } - - @Test(expected = ReasonerStateException.class) - public void testAddDataSourceAfterReasoning() throws ReasonerStateException, EdbIdbSeparationException, - EDBConfigurationException, IOException, IncompatiblePredicateArityException { - final Predicate predicateP = Expressions.makePredicate("p", 1); - final Predicate predicateQ = Expressions.makePredicate("q", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - try (final VLogReasoner reasoner = new VLogReasoner()) { - reasoner.addFactsFromDataSource(predicateP, dataSource); - reasoner.load(); - reasoner.reason(); - reasoner.addFactsFromDataSource(predicateQ, dataSource); - } - } - - @Test(expected = IllegalArgumentException.class) - public void testAddDataSourceNoMultipleDataSourcesForPredicate() throws ReasonerStateException, IOException { - final Predicate predicate = Expressions.makePredicate("p", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - try (final VLogReasoner reasoner = new VLogReasoner()) { - reasoner.addFactsFromDataSource(predicate, dataSource); - reasoner.addFactsFromDataSource(predicate, dataSource); - } - } - - @Test(expected = IllegalArgumentException.class) - public void testAddDataSourceNoFactsForPredicate() throws ReasonerStateException, IOException { - final Predicate predicate = Expressions.makePredicate("p", 1); - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - final Atom fact = Expressions.makeAtom(Expressions.makePredicate("p", 1), Expressions.makeConstant("a")); - try (final VLogReasoner reasoner = new VLogReasoner()) { - reasoner.addFacts(fact); - reasoner.addFactsFromDataSource(predicate, dataSource); - } - } - - @Test(expected = NullPointerException.class) - public void testAddDataSourcePredicateNotNull() throws ReasonerStateException, IOException { - final DataSource dataSource = new CsvFileDataSource(new File(CSV_FILE_PATH)); - try (final VLogReasoner reasoner = new VLogReasoner()) { - reasoner.addFactsFromDataSource(null, dataSource); - } - } - - @Test(expected = NullPointerException.class) - public void testAddDataSourceNotNullDataSource() throws ReasonerStateException { - final Predicate predicate = Expressions.makePredicate("p", 1); - try (final VLogReasoner reasoner = new VLogReasoner()) { - reasoner.addFactsFromDataSource(predicate, null); - } - } - -} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java deleted file mode 100644 index 2a553113f..000000000 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/AnswerQueryTest.java +++ /dev/null @@ -1,303 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertTrue; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Set; - -import org.junit.Assert; -import org.junit.Test; -import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.QueryResult; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.TermType; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; - -import karmaresearch.vlog.EDBConfigurationException; - -public class AnswerQueryTest { - - @Test - public void testEDBQuerySameConstantSubstitutesSameVariableName() - throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { - final String predicate = "p"; - final Constant constantC = Expressions.makeConstant("c"); - final Constant constantD = Expressions.makeConstant("d"); - final Variable x = Expressions.makeVariable("X"); - final Variable y = Expressions.makeVariable("Y"); - final Variable z = Expressions.makeVariable("Z"); - final Atom fact = Expressions.makeAtom(predicate, constantC, constantC, constantD); - - final boolean includeBlanks = false; - @SuppressWarnings("unchecked") - final Set> factCCD = Sets.newSet(Arrays.asList(constantC, constantC, constantD)); - - try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.addFacts(fact); - reasoner.load(); - - final Atom queryAtomXYZ = Expressions.makeAtom(predicate, x, y, z); - try (final QueryResultIterator queryResultIteratorXYZ = reasoner.answerQuery(queryAtomXYZ, includeBlanks)) { - final Set> queryResultsXYZ = QueryResultsUtils.collectQueryResults(queryResultIteratorXYZ); - assertEquals(factCCD, queryResultsXYZ); - } - - final Atom queryAtomXXZ = Expressions.makeAtom(predicate, x, x, z); - try (final QueryResultIterator queryResultIteratorXXZ = reasoner.answerQuery(queryAtomXXZ, includeBlanks)) { - final Set> queryResultsXXZ = QueryResultsUtils.collectQueryResults(queryResultIteratorXXZ); - assertEquals(factCCD, queryResultsXXZ); - } - - final Atom queryAtomXXX = Expressions.makeAtom(predicate, x, x, x); - try (final QueryResultIterator queryResultIteratorXXX = reasoner.answerQuery(queryAtomXXX, includeBlanks)) { - assertFalse(queryResultIteratorXXX.hasNext()); - } - - final Atom queryAtomXYX = Expressions.makeAtom(predicate, x, y, x); - try (final QueryResultIterator queryResultIteratorXYX = reasoner.answerQuery(queryAtomXYX, includeBlanks)) { - - assertFalse(queryResultIteratorXYX.hasNext()); - } - } - } - - @Test - public void testIDBQuerySameBlankSubstitutesSameVariableName() - throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { - final String predicate = "p"; - final Variable x = Expressions.makeVariable("X"); - final Variable y = Expressions.makeVariable("Y"); - final Variable z = Expressions.makeVariable("Z"); - final Atom pYY = Expressions.makeAtom(predicate, y, y); - final Atom pYZ = Expressions.makeAtom(predicate, y, z); - final Rule pX__pYY_pYZ = Expressions.makeRule(Expressions.makeConjunction(pYY, pYZ), - Expressions.makeConjunction(Expressions.makeAtom(predicate, x))); - assertEquals(Sets.newSet(y, z), pX__pYY_pYZ.getExistentiallyQuantifiedVariables()); - - try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); - reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES); - reasoner.addFacts(Expressions.makeAtom(predicate, Expressions.makeConstant("c"))); - reasoner.addRules(pX__pYY_pYZ); - reasoner.load(); - reasoner.reason(); - - // expected p(_:b1, _:b1), p(_:b1, _:b2) - try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(pYZ, true)) { - final Set> queryResults = QueryResultsUtils.collectQueryResults(queryResultIterator); - assertTrue(queryResults.size() == 2); - final ArrayList> queryResultsArray = new ArrayList<>(queryResults); - assertEquals(queryResultsArray.get(0).get(0), queryResultsArray.get(1).get(0)); // y - assertNotEquals(queryResultsArray.get(0).get(1), queryResultsArray.get(1).get(1)); // y, z - } - - // expected p(_:b1, _:b1) - try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(pYY, true)) { - final Set> queryResults = QueryResultsUtils.collectQueryResults(queryResultIterator); - assertTrue(queryResults.size() == 1); - final ArrayList> queryResultsArray = new ArrayList<>(queryResults); - assertEquals(queryResultsArray.get(0).get(0), queryResultsArray.get(0).get(1)); // y - } - } - } - - @Test - public void testIDBQuerySameIndividualSubstitutesSameVariableName() - throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { - final String predicate = "p"; - final Variable x = Expressions.makeVariable("X"); - final Variable y = Expressions.makeVariable("Y"); - final Variable z = Expressions.makeVariable("Z"); - final Variable t = Expressions.makeVariable("T"); - final Atom pXYYZZT = Expressions.makeAtom(predicate, x, y, y, z, z, t); - final Rule pXY__pXYYZZT = Expressions.makeRule(pXYYZZT, Expressions.makeAtom(predicate, x, y)); - assertEquals(Sets.newSet(z, t), pXY__pXYYZZT.getExistentiallyQuantifiedVariables()); - final Constant constantC = Expressions.makeConstant("c"); - final Constant constantD = Expressions.makeConstant("d"); - final Atom factPcd = Expressions.makeAtom(predicate, constantC, constantD); - - try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.addFacts(factPcd); - reasoner.addRules(pXY__pXYYZZT); - reasoner.load(); - reasoner.reason(); - - final Atom queryAtomXYYZZT = pXYYZZT; - try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtomXYYZZT, true)) { - assertTrue(queryResultIterator.hasNext()); - final List queryResultTerms = queryResultIterator.next().getTerms(); - assertEquals(6, queryResultTerms.size()); - - assertEquals(constantC, queryResultTerms.get(0)); // x - assertEquals(constantD, queryResultTerms.get(1)); // y - assertEquals(constantD, queryResultTerms.get(2)); // y - - final Term blankForZ = queryResultTerms.get(3); // z - assertEquals(TermType.BLANK, blankForZ.getType()); - assertEquals(blankForZ, queryResultTerms.get(4)); // z - - final Term blankForT = queryResultTerms.get(5); // t - assertEquals(TermType.BLANK, blankForT.getType()); - - assertNotEquals(queryResultTerms.get(4), blankForT); // z, t - - assertFalse(queryResultIterator.hasNext()); - } - - // x and y do not have the same constant substitution - final Atom queryAtomXXYZZT = Expressions.makeAtom(predicate, x, x, y, z, z, t); - try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtomXXYZZT, true)) { - assertFalse(queryResultIterator.hasNext()); - } - // z and t do not have the same blank substitution - final Atom queryAtomXYYZZZ = Expressions.makeAtom(predicate, x, y, y, z, z, z); - try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtomXYYZZZ, true)) { - assertFalse(queryResultIterator.hasNext()); - } - // universal and existential variables do not have the same substitution - // y and z do not have the same constant substitution - final Atom queryAtomXYYYZT = Expressions.makeAtom(predicate, x, y, y, y, z, t); - try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtomXYYYZT, true)) { - assertFalse(queryResultIterator.hasNext()); - } - - // y and t do not have the same constant substitution - final Atom queryAtomXYYZZY = Expressions.makeAtom(predicate, x, y, y, z, z, y); - try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtomXYYZZY, true)) { - assertFalse(queryResultIterator.hasNext()); - } - - } - } - - @Test - public void queryResultWithBlanks() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - final Variable vx = Expressions.makeVariable("x"); - final Variable vy = Expressions.makeVariable("y"); - // P(x) -> Q(y) - final Rule existentialRule = Expressions.makeRule(Expressions.makeAtom("q", vy), Expressions.makeAtom("p", vx)); - assertEquals(Sets.newSet(vy), existentialRule.getExistentiallyQuantifiedVariables()); - final Constant constantC = Expressions.makeConstant("c"); - final Atom fact = Expressions.makeAtom("p", constantC); - final Atom queryAtom = Expressions.makeAtom("q", Expressions.makeVariable("?x")); - - try (final VLogReasoner reasoner = new VLogReasoner()) { - reasoner.addFacts(fact); - reasoner.addRules(existentialRule); - reasoner.load(); - reasoner.reason(); - - try (final QueryResultIterator queryResultIteratorIncludeBlanks = reasoner.answerQuery(queryAtom, true)) { - assertTrue(queryResultIteratorIncludeBlanks.hasNext()); - final QueryResult queryResult = queryResultIteratorIncludeBlanks.next(); - assertTrue(queryResult.getTerms().size() == 1); - final Term queryResultTerm = queryResult.getTerms().get(0); - assertEquals(TermType.BLANK, queryResultTerm.getType()); - assertFalse(queryResultIteratorIncludeBlanks.hasNext()); - } - - try (final QueryResultIterator queryResultIteratorExcludeBlanks = reasoner.answerQuery(queryAtom, false)) { - assertFalse(queryResultIteratorExcludeBlanks.hasNext()); - } - } - } - - @Test - public void queryEmptyKnowledgeBase() throws IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { - try (final VLogReasoner reasoner = new VLogReasoner()) { - reasoner.load(); - - final Atom queryAtom = Expressions.makeAtom("P", Expressions.makeVariable("?x")); - final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtom, true); - Assert.assertFalse(queryResultIterator.hasNext()); - queryResultIterator.close(); - - reasoner.reason(); - - try (final QueryResultIterator queryResultIteratorAfterReason = reasoner.answerQuery(queryAtom, true)) { - assertFalse(queryResultIteratorAfterReason.hasNext()); - } - } - } - - @Test - public void queryEmptyRules() throws IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { - try (final VLogReasoner reasoner = new VLogReasoner()) { - final Atom fact = Expressions.makeAtom("P", Expressions.makeConstant("c")); - reasoner.addFacts(fact); - reasoner.load(); - - final Atom queryAtom = Expressions.makeAtom("P", Expressions.makeVariable("?x")); - - reasoner.reason(); - - try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtom, true)) { - final Set> queryResults = QueryResultsUtils.collectQueryResults(queryResultIterator); - @SuppressWarnings("unchecked") - final Set> expectedQueryResults = Sets.newSet(Arrays.asList(Expressions.makeConstant("c"))); - assertEquals(expectedQueryResults, queryResults); - } - } - } - - @Test - public void queryEmptyFacts() - throws EDBConfigurationException, IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { - final Variable vx = Expressions.makeVariable("x"); - final Rule rule = Expressions.makeRule(Expressions.makeAtom("q", vx), Expressions.makeAtom("p", vx)); - - try (final VLogReasoner reasoner = new VLogReasoner()) { - reasoner.addRules(rule); - reasoner.load(); - - final Atom queryAtom = Expressions.makeAtom("P", Expressions.makeVariable("?x")); - try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(queryAtom, true)) { - Assert.assertFalse(queryResultIterator.hasNext()); - queryResultIterator.close(); - } - - reasoner.reason(); - - try (final QueryResultIterator queryResultIteratorAfterReason = reasoner.answerQuery(queryAtom, true)) { - assertFalse(queryResultIteratorAfterReason.hasNext()); - } - } - } - -} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSourceTest.java deleted file mode 100644 index 8650edf28..000000000 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/CsvFileDataSourceTest.java +++ /dev/null @@ -1,62 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertEquals; - -import java.io.File; -import java.io.IOException; - -import org.junit.Test; -import org.semanticweb.vlog4j.core.reasoner.CsvFileUtils; - -public class CsvFileDataSourceTest { - - @Test - public void testToConfigString() throws IOException { - final File csvFile = new File(CsvFileUtils.CSV_INPORT_FOLDER + "file.csv"); - final CsvFileDataSource csvFileDataSource = new CsvFileDataSource(csvFile); - - final String expectedConfigString = "EDB%1$d_predname=%2$s\n" + "EDB%1$d_type=INMEMORY\n" - + "EDB%1$d_param0=" + new File(csvFile.getParent()).getCanonicalPath() + "\n" + "EDB%1$d_param1=file\n"; - - final String actualConfigString = csvFileDataSource.toConfigString(); - assertEquals(expectedConfigString, actualConfigString); - } - - @Test - public void getFileNameWithoutExtension() throws IOException { - final CsvFileDataSource csvFileDataSource = new CsvFileDataSource( - new File(CsvFileUtils.CSV_INPORT_FOLDER + "file.csv")); - assertEquals("file", csvFileDataSource.getFileNameWithoutExtension()); - } - - @Test(expected = NullPointerException.class) - public void fileNameNotNull() throws IOException { - new CsvFileDataSource(null); - } - - @Test(expected = IllegalArgumentException.class) - public void fileNameEndsWithCsv() throws IOException { - new CsvFileDataSource(new File("invalid/file/name")); - } - -} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvTest.java deleted file mode 100644 index 8615195e6..000000000 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ExportQueryAnswersToCsvTest.java +++ /dev/null @@ -1,105 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; - -import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.CsvFileUtils; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; - -public class ExportQueryAnswersToCsvTest { - - @Test - public void testEDBQuerySameConstantSubstitutesSameVariableName() - throws ReasonerStateException, IOException, EdbIdbSeparationException, IncompatiblePredicateArityException { - final String predicate = "p"; - final Constant constantC = Expressions.makeConstant("c"); - final Constant constantD = Expressions.makeConstant("d"); - final Variable x = Expressions.makeVariable("X"); - final Variable y = Expressions.makeVariable("Y"); - final Variable z = Expressions.makeVariable("Z"); - final Atom fact = Expressions.makeAtom(predicate, constantC, constantC, constantD); - - final boolean includeBlanks = false; - // final String csvFilePath = CSV_EXPORT_FOLDER + "output"; - final List> factCCD = Arrays.asList(Arrays.asList("c", "c", "d")); - - try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.addFacts(fact); - reasoner.load(); - - final Atom queryAtomXYZ = Expressions.makeAtom(predicate, x, y, z); - final String csvFilePathXYZ = CsvFileUtils.CSV_EXPORT_FOLDER + "outputXYZ.csv"; - reasoner.exportQueryAnswersToCsv(queryAtomXYZ, csvFilePathXYZ, includeBlanks); - final List> csvContentXYZ = CsvFileUtils.getCSVContent(csvFilePathXYZ); - assertEquals(factCCD, csvContentXYZ); - - final Atom queryAtomXXZ = Expressions.makeAtom(predicate, x, x, z); - final String csvFilePathXXZ = CsvFileUtils.CSV_EXPORT_FOLDER + "outputXXZ.csv"; - reasoner.exportQueryAnswersToCsv(queryAtomXXZ, csvFilePathXXZ, includeBlanks); - final List> csvContentXXZ = CsvFileUtils.getCSVContent(csvFilePathXXZ); - assertEquals(factCCD, csvContentXXZ); - - final Atom queryAtomXXX = Expressions.makeAtom("q", x, x, x); - final String csvFilePathXXX = CsvFileUtils.CSV_EXPORT_FOLDER + "outputXXX.csv"; - reasoner.exportQueryAnswersToCsv(queryAtomXXX, csvFilePathXXX, includeBlanks); - final List> csvContentXXX = CsvFileUtils.getCSVContent(csvFilePathXXX); - assertTrue(csvContentXXX.isEmpty()); - - final Atom queryAtomXYX = Expressions.makeAtom("q", x, y, x); - final String csvFilePathXYX = CsvFileUtils.CSV_EXPORT_FOLDER + "outputXYX.csv"; - reasoner.exportQueryAnswersToCsv(queryAtomXYX, csvFilePathXYX, includeBlanks); - final List> csvContentXYX = CsvFileUtils.getCSVContent(csvFilePathXYX); - assertTrue(csvContentXYX.isEmpty()); - } - - } - - @Test - public void testExportQueryEmptyKnowledgeBase() - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - final Atom queryAtom = Expressions.makeAtom("p", Expressions.makeVariable("?x"), - Expressions.makeVariable("?y")); - try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.load(); - final String emptyFilePath = CsvFileUtils.CSV_EXPORT_FOLDER + "empty.csv"; - reasoner.exportQueryAnswersToCsv(queryAtom, emptyFilePath, true); - assertTrue(CsvFileUtils.getCSVContent(emptyFilePath).isEmpty()); - - reasoner.exportQueryAnswersToCsv(queryAtom, emptyFilePath, false); - assertTrue(CsvFileUtils.getCSVContent(emptyFilePath).isEmpty()); - } - } - -} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java deleted file mode 100644 index bb8db36b5..000000000 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/GeneratedAnonymousIndividualsTest.java +++ /dev/null @@ -1,177 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertTrue; - -import java.io.IOException; -import java.util.List; - -import org.junit.Test; -import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.CsvFileUtils; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; - -public class GeneratedAnonymousIndividualsTest { - - private static final String includeBlanksFilePath = CsvFileUtils.CSV_EXPORT_FOLDER + "include_blanks.csv"; - private static final String excludeBlanksFilePath = CsvFileUtils.CSV_EXPORT_FOLDER + "exclude_blanks.csv"; - - private static final Variable vx = Expressions.makeVariable("x"); - private static final Variable vy = Expressions.makeVariable("y"); - private static final Variable vz = Expressions.makeVariable("z"); - private static final String p = "p"; - - // rule: P(?x) -> P(?x,!y), P(?x,!z) - private static final Rule existentialRule = Expressions.makeRule( - Expressions.makeConjunction(Expressions.makeAtom(p, vx, vy), Expressions.makeAtom(p, vx, vz)), - Expressions.makeConjunction(Expressions.makeAtom(p, vx))); - static { - // y,z existential variables that can introduce blanks (anonymous individuals) - assertEquals(Sets.newSet(vy, vz), existentialRule.getExistentiallyQuantifiedVariables()); - } - - // fact: P(c) - private static final Constant constantC = Expressions.makeConstant("c"); - private static final Atom fact = Expressions.makeAtom(p, constantC); - - // query: P(?x,?y) ? - final Atom queryAtom = Expressions.makeAtom(p, Expressions.makeVariable("?x"), Expressions.makeVariable("?y")); - - @Test - public void testBlanksSkolemChaseNoRuleRewrite() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); - assertEquals(RuleRewriteStrategy.NONE, reasoner.getRuleRewriteStrategy()); - - reasoner.addFacts(fact); - reasoner.addRules(existentialRule); - reasoner.load(); - reasoner.reason(); - reasoner.exportQueryAnswersToCsv(queryAtom, includeBlanksFilePath, true); - - checkTowDistinctBlanksGenerated(reasoner); - } - } - - @Test - public void testBlanksSkolemChaseSplitHeadPieces() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); - // P(?x) -> P(?x,!y), P(?x,!z) - // after split becomes {{P(?x) -> P(?x,!y), {P(?x)-> P(?x,!z)}} - reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES); - - reasoner.addFacts(fact); - reasoner.addRules(existentialRule); - reasoner.load(); - reasoner.reason(); - reasoner.exportQueryAnswersToCsv(queryAtom, includeBlanksFilePath, true); - - checkTowDistinctBlanksGenerated(reasoner); - } - } - - @Test - public void testBlanksRestrictedChaseNoRuleRewrite() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); - assertEquals(RuleRewriteStrategy.NONE, reasoner.getRuleRewriteStrategy()); - - reasoner.addFacts(fact); - reasoner.addRules(existentialRule); - reasoner.load(); - reasoner.reason(); - reasoner.exportQueryAnswersToCsv(queryAtom, includeBlanksFilePath, true); - - checkTowDistinctBlanksGenerated(reasoner); - } - } - - @Test - public void testBlanksRestrictedChaseSplitHeadPieces() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - - try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); - - // {P(?x) -> P(?x,!y), P(?x,!z)} - // after split becomes {{P(?x) -> P(?x,!y), {P(?x)-> P(?x,!z)}} - reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES); - reasoner.addFacts(fact); - reasoner.addRules(existentialRule); - reasoner.load(); - reasoner.reason(); - - reasoner.exportQueryAnswersToCsv(queryAtom, includeBlanksFilePath, true); - // expected fact: P(c, _:b) - final List> csvContentIncludeBlanks = CsvFileUtils.getCSVContent(includeBlanksFilePath); - assertTrue(csvContentIncludeBlanks.size() == 1); - for (final List queryResult : csvContentIncludeBlanks) { - assertTrue(queryResult.size() == 2); - assertEquals(queryResult.get(0), "c"); - } - final String blank = csvContentIncludeBlanks.get(0).get(1); - assertNotEquals("c", blank); - - reasoner.exportQueryAnswersToCsv(queryAtom, excludeBlanksFilePath, false); - final List> csvContentExcludeBlanks = CsvFileUtils.getCSVContent(excludeBlanksFilePath); - assertTrue(csvContentExcludeBlanks.isEmpty()); - - } - } - - private void checkTowDistinctBlanksGenerated(final Reasoner reasoner) - throws ReasonerStateException, IOException, EdbIdbSeparationException { - // expected facts: P(c, _:b1), P(c, _:b2) - final List> csvContentIncludeBlanks = CsvFileUtils.getCSVContent(includeBlanksFilePath); - assertTrue(csvContentIncludeBlanks.size() == 2); - for (final List queryResult : csvContentIncludeBlanks) { - assertTrue(queryResult.size() == 2); - assertEquals(queryResult.get(0), "c"); - } - final String blank1 = csvContentIncludeBlanks.get(0).get(1); - final String blank2 = csvContentIncludeBlanks.get(1).get(1); - assertNotEquals(blank1, blank2); - assertNotEquals("c", blank1); - assertNotEquals("c", blank2); - - reasoner.exportQueryAnswersToCsv(queryAtom, excludeBlanksFilePath, false); - final List> csvContentExcludeBlanks = CsvFileUtils.getCSVContent(excludeBlanksFilePath); - assertTrue(csvContentExcludeBlanks.isEmpty()); - } - -} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvTest.java deleted file mode 100644 index 060cd7a60..000000000 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromCsvTest.java +++ /dev/null @@ -1,113 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import java.io.File; -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Set; - -import org.junit.Test; -import org.mockito.internal.util.collections.Sets; -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.CsvFileUtils; -import org.semanticweb.vlog4j.core.reasoner.DataSource; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; - -import karmaresearch.vlog.EDBConfigurationException; - -public class LoadDataFromCsvTest { - - private static final File UNARY_FACTS_CSV_FILE = new File(CsvFileUtils.CSV_INPORT_FOLDER + "unaryFacts.csv"); - - @Test - public void testGenerateDataSourcesConfigEmpty() throws ReasonerStateException, IOException { - try (final VLogReasoner reasoner = new VLogReasoner()) { - final String dataSourcesConfig = reasoner.generateDataSourcesConfig(); - assertTrue(dataSourcesConfig.isEmpty()); - } - } - - @Test - public void testLoadEmptyCsv() - throws IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { - final File emptyCsv = new File(CsvFileUtils.CSV_INPORT_FOLDER + "empty.csv"); - final CsvFileDataSource emptyDataSource = new CsvFileDataSource(emptyCsv); - - final Predicate predicateP = Expressions.makePredicate("p", 2); - final Atom queryAtom = Expressions.makeAtom(predicateP, Expressions.makeVariable("x"), - Expressions.makeVariable("y")); - try (final VLogReasoner reasoner = new VLogReasoner()) { - reasoner.addFactsFromDataSource(predicateP, emptyDataSource); - reasoner.load(); - reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); - reasoner.reason(); - try (final QueryResultIterator answerQuery = reasoner.answerQuery(queryAtom, true)) { - assertFalse(answerQuery.hasNext()); - } - try (final QueryResultIterator answerQuery = reasoner.answerQuery(queryAtom, false)) { - assertFalse(answerQuery.hasNext()); - } - reasoner.resetReasoner(); - reasoner.load(); - reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); - reasoner.reason(); - try (final QueryResultIterator answerQuery = reasoner.answerQuery(queryAtom, true)) { - assertFalse(answerQuery.hasNext()); - } - try (final QueryResultIterator answerQuery = reasoner.answerQuery(queryAtom, false)) { - assertFalse(answerQuery.hasNext()); - } - } - } - - @Test - public void testLoadUnaryFactsFromCsv() throws ReasonerStateException, EdbIdbSeparationException, - EDBConfigurationException, IOException, IncompatiblePredicateArityException { - final Predicate predicateP = Expressions.makePredicate("p", 1); - final Predicate predicateQ = Expressions.makePredicate("q", 1); - final DataSource dataSource = new CsvFileDataSource(UNARY_FACTS_CSV_FILE); - @SuppressWarnings("unchecked") - final Set> expectedPQueryResults = Sets.newSet(Arrays.asList(Expressions.makeConstant("c1")), - Arrays.asList(Expressions.makeConstant("c2"))); - try (final VLogReasoner reasoner = new VLogReasoner()) { - reasoner.addFactsFromDataSource(predicateP, dataSource); - reasoner.addFactsFromDataSource(predicateQ, dataSource); - reasoner.load(); - final QueryResultIterator pQueryResultIterator = reasoner - .answerQuery(Expressions.makeAtom(predicateP, Expressions.makeVariable("x")), true); - final Set> pQueryResults = QueryResultsUtils.collectQueryResults(pQueryResultIterator); - assertEquals(expectedPQueryResults, pQueryResults); - } - } - -} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java deleted file mode 100644 index 8c9e46b41..000000000 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/LoadDataFromMemoryTest.java +++ /dev/null @@ -1,88 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.IOException; - -import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.BlankImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; - -import karmaresearch.vlog.EDBConfigurationException; - -public class LoadDataFromMemoryTest { - - @Test(expected = EdbIdbSeparationException.class) - public void loadEdbIdbNotSeparated() - throws EDBConfigurationException, IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { - final Variable vx = Expressions.makeVariable("x"); - final Rule rule = Expressions.makeRule(Expressions.makeAtom("q", vx), Expressions.makeAtom("p", vx)); - final Atom factIDBpredQ1 = Expressions.makeAtom("q", Expressions.makeConstant("c")); - final Atom factEDBpredQ2 = Expressions.makeAtom("q", Expressions.makeConstant("d"), - Expressions.makeConstant("d")); - try (final VLogReasoner reasoner = new VLogReasoner()) { - reasoner.addRules(rule); - reasoner.addFacts(factIDBpredQ1, factEDBpredQ2); - reasoner.load(); - } - } - - @Test - public void loadEdbIdbSeparated() - throws EDBConfigurationException, IOException, EdbIdbSeparationException, ReasonerStateException, IncompatiblePredicateArityException { - final Variable vx = Expressions.makeVariable("x"); - final Rule rule = Expressions.makeRule(Expressions.makeAtom("q", vx), Expressions.makeAtom("p", vx)); - final Atom factEDBpred = Expressions.makeAtom("q", Expressions.makeConstant("d"), - Expressions.makeConstant("d")); - - try (final VLogReasoner reasoner = new VLogReasoner()) { - reasoner.addRules(rule); - reasoner.addFacts(factEDBpred); - reasoner.load(); - } - } - - @Test(expected = IllegalArgumentException.class) - public void addFactsWithVariableTerms() throws ReasonerStateException { - final Atom factWithVariableTerms = Expressions.makeAtom("q", Expressions.makeConstant("d"), - Expressions.makeVariable("x")); - - try (final VLogReasoner reasoner = new VLogReasoner()) { - reasoner.addFacts(factWithVariableTerms); - } - } - - @Test(expected = IllegalArgumentException.class) - public void addFactsWithBlankTerms() throws ReasonerStateException { - final Atom factWithBlankTerms = Expressions.makeAtom("q", Expressions.makeConstant("d"), new BlankImpl("b")); - - try (final VLogReasoner reasoner = new VLogReasoner()) { - reasoner.addFacts(factWithBlankTerms); - } - } - -} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverterTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverterTest.java deleted file mode 100644 index a18706762..000000000 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ModelToVLogConverterTest.java +++ /dev/null @@ -1,200 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Blank; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.BlankImpl; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; - -public class ModelToVLogConverterTest { - - @Test - public void testToVLogTermVariable() { - final Variable variable = Expressions.makeVariable("var"); - final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "var"); - - final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(variable); - - assertNotNull(vLogTerm); - assertEquals(karmaresearch.vlog.Term.TermType.VARIABLE, vLogTerm.getTermType()); - assertEquals("var", vLogTerm.getName()); - assertEquals(expectedVLogTerm, vLogTerm); - } - - @Test - public void testToVLogTermConstant() { - final Constant constant = Expressions.makeConstant("const"); - final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, "const"); - - final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(constant); - - assertNotNull(vLogTerm); - assertEquals(karmaresearch.vlog.Term.TermType.CONSTANT, vLogTerm.getTermType()); - assertEquals("const", vLogTerm.getName()); - assertEquals(expectedVLogTerm, vLogTerm); - - } - - @Test - public void testToVLogTermBlank() { - final Blank blank = new BlankImpl("blank"); - final karmaresearch.vlog.Term expectedVLogTerm = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, "blank"); - - final karmaresearch.vlog.Term vLogTerm = ModelToVLogConverter.toVLogTerm(blank); - - assertNotNull(vLogTerm); - assertEquals(karmaresearch.vlog.Term.TermType.BLANK, vLogTerm.getTermType()); - assertEquals("blank", vLogTerm.getName()); - assertEquals(expectedVLogTerm, vLogTerm); - } - - @Test - public void testToVLogTermArray() { - final Variable vx = Expressions.makeVariable("x"); - final Variable vxToo = Expressions.makeVariable("x"); - final Variable vy = Expressions.makeVariable("y"); - final Constant cx = Expressions.makeConstant("x"); - final Blank bx = new BlankImpl("x"); - final List terms = Arrays.asList(vx, cx, vxToo, bx, vy); - - final karmaresearch.vlog.Term expectedVx = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "x"); - final karmaresearch.vlog.Term expectedVy = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "y"); - final karmaresearch.vlog.Term expectedCx = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, "x"); - final karmaresearch.vlog.Term expectedBx = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, "x"); - final karmaresearch.vlog.Term[] expectedTermArray = { expectedVx, expectedCx, expectedVx, expectedBx, expectedVy }; - - final karmaresearch.vlog.Term[] vLogTermArray = ModelToVLogConverter.toVLogTermArray(terms); - assertArrayEquals(expectedTermArray, vLogTermArray); - } - - @Test - public void testToVLogTermArrayEmpty() { - final List terms = new ArrayList<>(); - final karmaresearch.vlog.Term[] vLogTermArray = ModelToVLogConverter.toVLogTermArray(terms); - - assertNotNull(vLogTermArray); - assertTrue(vLogTermArray.length == 0); - } - - @Test - public void testToVLogFactTuples() { - final Constant c1 = Expressions.makeConstant("1"); - final Constant c2 = Expressions.makeConstant("2"); - final Constant c3 = Expressions.makeConstant("3"); - final Atom atom1 = Expressions.makeAtom("p1", c1); - final Atom atom2 = Expressions.makeAtom("p2", c2, c3); - - final String[][] vLogTuples = ModelToVLogConverter.toVLogFactTuples(Arrays.asList(atom1, atom2)); - - final String[][] expectedTuples = { { "1" }, { "2", "3" } }; - assertArrayEquals(expectedTuples, vLogTuples); - } - - @Test - public void testToVLogPredicate() { - final Predicate predicate = Expressions.makePredicate("pred", 1); - final String vLogPredicate = ModelToVLogConverter.toVLogPredicate(predicate); - assertEquals("pred-1", vLogPredicate); - } - - @Test - public void testToVLogAtom() { - final Constant c = Expressions.makeConstant("c"); - final Variable x = Expressions.makeVariable("x"); - final Blank b = new BlankImpl("_:b"); - final Atom atom = Expressions.makeAtom("pred", c, x, b); - - final karmaresearch.vlog.Term expectedC = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.CONSTANT, "c"); - final karmaresearch.vlog.Term expectedX = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "x"); - final karmaresearch.vlog.Term expectedB = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.BLANK, "_:b"); - - final String expectedPredicateName = "pred" + ModelToVLogConverter.PREDICATE_ARITY_SUFFIX_SEPARATOR + 3; - final karmaresearch.vlog.Term[] expectedTerms = { expectedC, expectedX, expectedB }; - final karmaresearch.vlog.Atom expectedAtom = new karmaresearch.vlog.Atom(expectedPredicateName, expectedTerms); - - final karmaresearch.vlog.Atom vLogAtom = ModelToVLogConverter.toVLogAtom(atom); - assertEquals(expectedAtom, vLogAtom); - } - - @Test - public void testToVLogRuleArray() { - final Variable x = Expressions.makeVariable("x"); - final Variable y = Expressions.makeVariable("y"); - final Variable z = Expressions.makeVariable("z"); - final Variable w = Expressions.makeVariable("w"); - final Variable v = Expressions.makeVariable("v"); - final Atom atomP1X = Expressions.makeAtom("p1", x); - final Atom atomP2XY = Expressions.makeAtom("p2", x, y); - final Atom atomP3YZ = Expressions.makeAtom("p3", y, z); - final Rule rule1 = Expressions.makeRule(atomP1X, atomP2XY, atomP3YZ); - final Atom atomQXYZ = Expressions.makeAtom("q", x, y, z); - final Atom atomQYW = Expressions.makeAtom("q", y, w); - final Atom atomQ1XWZ = Expressions.makeAtom("q1", x, w, z); - final Atom atomQ2XV = Expressions.makeAtom("q2", x, v); - final Rule rule2 = Expressions.makeRule(atomQ2XV, atomQ1XWZ, atomQYW, atomQXYZ); - - final karmaresearch.vlog.Term expX = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "x"); - final karmaresearch.vlog.Term expY = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "y"); - final karmaresearch.vlog.Term expZ = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "z"); - final karmaresearch.vlog.Term expW = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "w"); - final karmaresearch.vlog.Term expV = new karmaresearch.vlog.Term(karmaresearch.vlog.Term.TermType.VARIABLE, "v"); - final karmaresearch.vlog.Atom expAtomP1X = new karmaresearch.vlog.Atom("p1-1", expX); - final karmaresearch.vlog.Atom expAtomP2XY = new karmaresearch.vlog.Atom("p2-2", expX, expY); - final karmaresearch.vlog.Atom expAtomP3YZ = new karmaresearch.vlog.Atom("p3-2", expY, expZ); - final karmaresearch.vlog.Rule expectedRule1 = new karmaresearch.vlog.Rule(new karmaresearch.vlog.Atom[] { expAtomP1X }, - new karmaresearch.vlog.Atom[] { expAtomP2XY, expAtomP3YZ }); - final karmaresearch.vlog.Atom expAtomQXYZ = new karmaresearch.vlog.Atom("q-3", expX, expY, expZ); - final karmaresearch.vlog.Atom expAtomQYW = new karmaresearch.vlog.Atom("q-2", expY, expW); - final karmaresearch.vlog.Atom expAtomQ1XWZ = new karmaresearch.vlog.Atom("q1-3", expX, expW, expZ); - final karmaresearch.vlog.Atom expAtomQ2XV = new karmaresearch.vlog.Atom("q2-2", expX, expV); - final karmaresearch.vlog.Rule expectedRule2 = new karmaresearch.vlog.Rule(new karmaresearch.vlog.Atom[] { expAtomQ2XV }, - new karmaresearch.vlog.Atom[] { expAtomQ1XWZ, expAtomQYW, expAtomQXYZ }); - - final karmaresearch.vlog.Rule[] vLogRuleArray = ModelToVLogConverter.toVLogRuleArray(Arrays.asList(rule1, rule2)); - final karmaresearch.vlog.Rule[] expectedRuleArray = new karmaresearch.vlog.Rule[] { expectedRule1, expectedRule2 }; - assertArrayEquals(expectedRuleArray, vLogRuleArray); - } - - @Test - public void testVLogRuleRewritingStrategy() { - assertEquals(karmaresearch.vlog.VLog.RuleRewriteStrategy.NONE, ModelToVLogConverter.toVLogRuleRewriteStrategy(RuleRewriteStrategy.NONE)); - assertEquals(karmaresearch.vlog.VLog.RuleRewriteStrategy.AGGRESSIVE, - ModelToVLogConverter.toVLogRuleRewriteStrategy(RuleRewriteStrategy.SPLIT_HEAD_PIECES)); - } - -} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java deleted file mode 100644 index a22a20716..000000000 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerStateTest.java +++ /dev/null @@ -1,306 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.CsvFileUtils; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.RuleRewriteStrategy; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; - -public class ReasonerStateTest { - - private static final Predicate p = Expressions.makePredicate("p", 1); - private static final Predicate q = Expressions.makePredicate("q", 1); - private static final Variable x = Expressions.makeVariable("x"); - private static final Constant c = Expressions.makeConstant("c"); - // private static final Constant d = Expressions.makeConstant("d"); - private static final Atom exampleQueryAtom = Expressions.makeAtom("q", x); - - private static final Atom ruleHeadQx = Expressions.makeAtom(q, x); - private static final Atom ruleBodyPx = Expressions.makeAtom(p, x); - private static final Rule ruleQxPx = Expressions.makeRule(ruleHeadQx, ruleBodyPx); - private static final Atom factPc = Expressions.makeAtom(p, c); - // private static final Atom factPd = Expressions.makeAtom(q, d); - - @Test(expected = NullPointerException.class) - public void testSetAlgorithm() { - try (final Reasoner reasoner = Reasoner.getInstance();) { - reasoner.setAlgorithm(null); - } - } - - @Test(expected = IllegalArgumentException.class) - public void testSetReasoningTimeout() { - try (final Reasoner reasoner = Reasoner.getInstance();) { - reasoner.setReasoningTimeout(-3); - } - } - - @Test(expected = ReasonerStateException.class) - public void testAddRules1() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance();) { - reasoner.load(); - reasoner.addRules(ruleQxPx); - } - } - - @Test - public void testAddRules2() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance();) { - reasoner.load(); - reasoner.resetReasoner(); - reasoner.addRules(ruleQxPx); - } - } - - @Test(expected = IllegalArgumentException.class) - public void testAddRules3() throws EdbIdbSeparationException, IOException, ReasonerStateException { - try (final Reasoner reasoner = Reasoner.getInstance();) { - final List rules = new ArrayList<>(); - rules.add(ruleQxPx); - rules.add(null); - reasoner.addRules(rules); - } - } - - @Test(expected = ReasonerStateException.class) - public void testAddFacts1() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance();) { - reasoner.load(); - reasoner.addFacts(factPc); - } - } - - @Test(expected = IllegalArgumentException.class) - public void testAddFacts2() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance();) { - final List facts = new ArrayList<>(); - facts.add(factPc); - facts.add(null); - reasoner.addFacts(facts); - reasoner.load(); - } - } - - @Test - public void testResetBeforeLoad() { - try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.resetReasoner(); - } - } - - @Test(expected = NullPointerException.class) - public void setRuleRewriteStrategy1() throws ReasonerStateException { - try (final Reasoner reasoner = Reasoner.getInstance();) { - reasoner.setRuleRewriteStrategy(null); - } - } - - @Test(expected = ReasonerStateException.class) - public void setRuleRewriteStrategy2() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance();) { - reasoner.load(); - reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.NONE); - } - } - - @Test - public void setRuleRewriteStrategy3() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance();) { - reasoner.load(); - reasoner.resetReasoner(); - reasoner.setRuleRewriteStrategy(RuleRewriteStrategy.NONE); - } - } - - @Test - public void testResetDiscardInferences() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - for (final Algorithm algorithm : Algorithm.values()) { - // discard inferences regardless of the inference algorithm - try (final Reasoner reasoner = Reasoner.getInstance();) { - reasoner.addFacts(factPc); - reasoner.addRules(ruleQxPx); - reasoner.setAlgorithm(algorithm); - - reasoner.load(); - reasoner.reason(); - try (final QueryResultIterator queryQxIterator = reasoner.answerQuery(ruleHeadQx, true)) { - final Set> queryQxResults = QueryResultsUtils.collectQueryResults(queryQxIterator); - final Set> queryQxExpectedResults = new HashSet>(); - queryQxExpectedResults.add(Arrays.asList(c)); - assertEquals(queryQxResults, queryQxExpectedResults); - } - - reasoner.resetReasoner(); - reasoner.load(); - try (final QueryResultIterator queryQxIterator = reasoner.answerQuery(ruleHeadQx, true)) { - final Set> queryQxResults = QueryResultsUtils.collectQueryResults(queryQxIterator); - assertTrue(queryQxResults.isEmpty()); - } - try (final QueryResultIterator queryPxIterator = reasoner.answerQuery(ruleBodyPx, true)) { - final Set> queryPxResults = QueryResultsUtils.collectQueryResults(queryPxIterator); - final Set> queryPxExpectedResults = new HashSet>(); - queryPxExpectedResults.add(Arrays.asList(c)); - assertEquals(queryPxResults, queryPxExpectedResults); - } - } - } - } - - @Test - public void testResetKeepExplicitDatabase() throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance();) { - // assert p(c) - reasoner.addFacts(factPc); - // assert r(d) - final Predicate predicateR1 = Expressions.makePredicate("r", 1); - reasoner.addFactsFromDataSource(predicateR1, - new CsvFileDataSource(new File(CsvFileUtils.CSV_INPORT_FOLDER, "constantD.csv"))); - // p(?x) -> q(?x) - reasoner.addRules(ruleQxPx); - reasoner.load(); - checkExplicitFacts(reasoner, predicateR1); - - reasoner.resetReasoner(); - reasoner.load(); - checkExplicitFacts(reasoner, predicateR1); - - // check rule exists in knowledge base after reset - reasoner.reason(); - try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(ruleHeadQx, true)) { - assertTrue(queryResultIterator.hasNext()); - assertEquals(Arrays.asList(c), queryResultIterator.next().getTerms()); - assertFalse(queryResultIterator.hasNext()); - } - - } - } - - private void checkExplicitFacts(final Reasoner reasoner, final Predicate predicateR1) - throws ReasonerStateException { - try (final QueryResultIterator queryResultIteratorPx = reasoner.answerQuery(ruleBodyPx, true)) { - assertTrue(queryResultIteratorPx.hasNext()); - assertEquals(factPc.getTerms(), queryResultIteratorPx.next().getTerms()); - assertFalse(queryResultIteratorPx.hasNext()); - } - try (final QueryResultIterator queryResultIteratorRx = reasoner - .answerQuery(Expressions.makeAtom(predicateR1, x), true)) { - assertTrue(queryResultIteratorRx.hasNext()); - assertEquals(Arrays.asList(Expressions.makeConstant("d")), queryResultIteratorRx.next().getTerms()); - assertFalse(queryResultIteratorRx.hasNext()); - } - } - - @Test - public void testResetEmptyKnowledgeBase() throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - final Reasoner reasoner = Reasoner.getInstance(); - // 1. load and reason - reasoner.load(); - try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(exampleQueryAtom, true)) { - assertFalse(queryResultIterator.hasNext()); - } - reasoner.reason(); - try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(exampleQueryAtom, true)) { - assertFalse(queryResultIterator.hasNext()); - } - reasoner.resetReasoner(); - - // 2. load again - reasoner.load(); - try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(exampleQueryAtom, true)) { - assertFalse(queryResultIterator.hasNext()); - } - reasoner.resetReasoner(); - - // 3. load and reason again - reasoner.load(); - try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(exampleQueryAtom, true)) { - assertFalse(queryResultIterator.hasNext()); - } - reasoner.reason(); - try (final QueryResultIterator queryResultIterator = reasoner.answerQuery(exampleQueryAtom, true)) { - assertFalse(queryResultIterator.hasNext()); - } - reasoner.close(); - } - - @Test(expected = ReasonerStateException.class) - public void testFailReasonBeforeLoad() throws ReasonerStateException, IOException { - try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.reason(); - } - } - - @Test(expected = ReasonerStateException.class) - public void testFailAnswerQueryBeforeLoad() throws ReasonerStateException { - try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.answerQuery(exampleQueryAtom, true); - } - } - - @Test(expected = ReasonerStateException.class) - public void testFailExportQueryAnswerToCsvBeforeLoad() throws ReasonerStateException, IOException { - try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.exportQueryAnswersToCsv(exampleQueryAtom, CsvFileUtils.CSV_EXPORT_FOLDER + "output.csv", true); - } - } - - @Test - public void testSuccessiveCloseAfterLoad() throws EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.load(); - reasoner.close(); - reasoner.close(); - } - } - - @Test - public void testSuccessiveCloseBeforeLoad() { - try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.close(); - reasoner.close(); - } - } - -} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java deleted file mode 100644 index 8c73d9fa9..000000000 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/ReasonerTest.java +++ /dev/null @@ -1,96 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; - -/* - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; - -import karmaresearch.vlog.EDBConfigurationException; - -public class ReasonerTest { - - @Test - public void testCloseRepeatedly() throws EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - try (final VLogReasoner reasoner = new VLogReasoner()) { - reasoner.close(); - } - - try (final VLogReasoner reasoner = new VLogReasoner()) { - reasoner.load(); - reasoner.close(); - reasoner.close(); - } - } - - @Test - public void testSimpleInference() - throws EDBConfigurationException, IOException, ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { - final String constantNameC = "c"; - final String constantNameD = "d"; - - final Constant constantC = Expressions.makeConstant(constantNameC); - final Constant constantD = Expressions.makeConstant(constantNameD); - final Variable x = Expressions.makeVariable("x"); - final Atom factAc = Expressions.makeAtom("A", constantC); - final Atom factAd = Expressions.makeAtom("A", constantD); - final Atom atomAx = Expressions.makeAtom("A", x); - final Atom atomBx = Expressions.makeAtom("B", x); - final Atom atomCx = Expressions.makeAtom("C", x); - final Rule ruleBxAx = Expressions.makeRule(atomBx, atomAx); - final Rule ruleCxBx = Expressions.makeRule(atomCx, atomBx); - - try (final VLogReasoner reasoner = new VLogReasoner()) { - reasoner.addFacts(factAc, factAd); - reasoner.addRules(ruleBxAx, ruleCxBx); - reasoner.load(); - - final QueryResultIterator cxQueryResultEnumBeforeReasoning = reasoner.answerQuery(atomCx, true); - assertFalse(cxQueryResultEnumBeforeReasoning.hasNext()); - - reasoner.reason(); - - final QueryResultIterator cxQueryResultEnumAfterReasoning = reasoner.answerQuery(atomCx, true); - final Set> actualResults = QueryResultsUtils.collectQueryResults(cxQueryResultEnumAfterReasoning); - - final Set> expectedResults = new HashSet<>( - Arrays.asList(Arrays.asList(constantC), Arrays.asList(constantD))); - assertEquals(expectedResults, actualResults); - - } - } -} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java deleted file mode 100644 index ce44498b4..000000000 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/implementation/SparqlQueryResultDataSourceTest.java +++ /dev/null @@ -1,77 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner.implementation; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertEquals; - -import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URL; -import java.util.Arrays; -import java.util.LinkedHashSet; - -import org.apache.commons.lang3.StringUtils; -import org.junit.Test; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; - -public class SparqlQueryResultDataSourceTest { - - @Test - public void testToStringSimpleSparqlQueryResultDataSource() throws MalformedURLException { - final URL endpoint = new URL("http://query.wikidata.org/sparql"); - final LinkedHashSet queryVariables = new LinkedHashSet<>( - Arrays.asList(Expressions.makeVariable("b"), Expressions.makeVariable("a"))); - final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(endpoint, queryVariables, - "?a p:P22 ?b"); - final String configString = dataSource.toConfigString(); - final String expectedStringConfig = "EDB%1$d_predname=%2$s\n" + "EDB%1$d_type=SPARQL\n" - + "EDB%1$d_param0=http://query.wikidata.org/sparql\n" + "EDB%1$d_param1=b,a\n" - + "EDB%1$d_param2=?a p:P22 ?b\n"; - assertEquals(expectedStringConfig, configString); - } - - @Test - public void testUniqueVariableNamesQuery() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - final URL endpoint = new URL("http://query.wikidata.org/sparql"); - final LinkedHashSet queryVariables = new LinkedHashSet<>( - Arrays.asList(Expressions.makeVariable("b"), Expressions.makeVariable("b"))); - - final SparqlQueryResultDataSource dataSource = new SparqlQueryResultDataSource(endpoint, queryVariables, - "?a p:P22 ?b"); - assertEquals(1, dataSource.getQueryVariables().size()); - } - - @Test(expected = IllegalArgumentException.class) - public void testEmptyQueryBody() - throws ReasonerStateException, EdbIdbSeparationException, IOException, IncompatiblePredicateArityException { - final URL endpoint = new URL("http://query.wikidata.org/sparql"); - final LinkedHashSet queryVariables = new LinkedHashSet<>( - Arrays.asList(Expressions.makeVariable("a"))); - new SparqlQueryResultDataSource(endpoint, queryVariables, StringUtils.SPACE); - - } - -} diff --git a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromCsvTest.java b/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromCsvTest.java deleted file mode 100644 index a614cf769..000000000 --- a/vlog4j-core/src/test/java/org/semanticweb/vlog4j/core/reasoner/vlog/VLogDataFromCsvTest.java +++ /dev/null @@ -1,78 +0,0 @@ -package org.semanticweb.vlog4j.core.reasoner.vlog; - -/*- - * #%L - * VLog4j Core Components - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import org.junit.Test; - -import karmaresearch.vlog.AlreadyStartedException; -import karmaresearch.vlog.Atom; -import karmaresearch.vlog.EDBConfigurationException; -import karmaresearch.vlog.NotStartedException; -import karmaresearch.vlog.Term; -import karmaresearch.vlog.TermQueryResultIterator; -import karmaresearch.vlog.VLog; - -public class VLogDataFromCsvTest { - private static final String CSV_INPUT_FOLDER = "src/test/data/input/"; - - private static final String unaryPredicateNameP = "p"; - private static final String unaryPredicateNameQ = "q"; - - private final List> expectedQueryResultUnary = Arrays.asList( - Arrays.asList(VLogExpressions.makeConstant("c1")), Arrays.asList(VLogExpressions.makeConstant("c2"))); - - @Test - public void testLoadDataFomCsvString() - throws AlreadyStartedException, EDBConfigurationException, IOException, NotStartedException { - final String unaryPredicatesEDBConfig = "EDB0_predname=" + unaryPredicateNameQ + "\n" + "EDB0_type=INMEMORY\n" - + "EDB0_param0=" + CSV_INPUT_FOLDER + "\n" + "EDB0_param1=unaryFacts\n" + "EDB1_predname=" - + unaryPredicateNameP + "\n" + "EDB1_type=INMEMORY\n" + "EDB1_param0=" + CSV_INPUT_FOLDER + "\n" - + "EDB1_param1=unaryFacts"; - final VLog vLog = new VLog(); - vLog.start(unaryPredicatesEDBConfig, false); - final TermQueryResultIterator queryResultsPIterator = vLog - .query(new Atom(unaryPredicateNameP, VLogExpressions.makeVariable("x"))); - final List> queryResultsP = new ArrayList<>( - VLogQueryResultUtils.collectResults(queryResultsPIterator)); - assertEquals(expectedQueryResultUnary, queryResultsP); - - final TermQueryResultIterator queryResultsQIterator = vLog - .query(new Atom(unaryPredicateNameQ, VLogExpressions.makeVariable("x"))); - final List> queryResultsQ = new ArrayList<>( - VLogQueryResultUtils.collectResults(queryResultsQIterator)); - assertEquals(expectedQueryResultUnary, queryResultsQ); - - final TermQueryResultIterator queryResultsRIterator = vLog - .query(new Atom("t", VLogExpressions.makeVariable("x"))); - assertFalse(queryResultsRIterator.hasNext()); - queryResultsRIterator.close(); - vLog.stop(); - } - -} diff --git a/vlog4j-examples/pom.xml b/vlog4j-examples/pom.xml deleted file mode 100644 index 2c4f219fa..000000000 --- a/vlog4j-examples/pom.xml +++ /dev/null @@ -1,26 +0,0 @@ - - - - 4.0.0 - - - org.semanticweb.vlog4j - vlog4j-parent - 0.0.1 - - - vlog4j-examples - pom - - VLog4j Examples - Contains examples and usage instructions describing the basic functionality of VLog4j - - - - ${project.groupId} - vlog4j-core - ${project.version} - - - diff --git a/vlog4j-examples/src/main/data/WheelEDB.csv b/vlog4j-examples/src/main/data/WheelEDB.csv deleted file mode 100644 index 1dadecd56..000000000 --- a/vlog4j-examples/src/main/data/WheelEDB.csv +++ /dev/null @@ -1,2 +0,0 @@ -redWheel -blueWheel \ No newline at end of file diff --git a/vlog4j-examples/src/main/data/bycicleEDB.csv b/vlog4j-examples/src/main/data/bycicleEDB.csv deleted file mode 100644 index ab3091e6c..000000000 --- a/vlog4j-examples/src/main/data/bycicleEDB.csv +++ /dev/null @@ -1,3 +0,0 @@ -redBike -blueBike -blackBike \ No newline at end of file diff --git a/vlog4j-examples/src/main/data/hasPartEDB.csv b/vlog4j-examples/src/main/data/hasPartEDB.csv deleted file mode 100644 index f37d3281c..000000000 --- a/vlog4j-examples/src/main/data/hasPartEDB.csv +++ /dev/null @@ -1,2 +0,0 @@ -redBike,redWheel -blueBike,blueWheel \ No newline at end of file diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtil.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtil.java deleted file mode 100644 index 74e8d3b83..000000000 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/ExamplesUtil.java +++ /dev/null @@ -1,39 +0,0 @@ -package org.semanticweb.vlog4j.examples; - -/*- - * #%L - * VLog4j Examples - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; - -public class ExamplesUtil { - static void printOutQueryAnswers(Atom queryAtom, Reasoner reasoner) throws ReasonerStateException { - System.out.println(); - System.out.println("Answers to query " + queryAtom + " before materialisation:"); - try (QueryResultIterator answersBeforeMaterialisation = reasoner.answerQuery(queryAtom, true)) { - while (answersBeforeMaterialisation.hasNext()) { - System.out.println(" - " + answersBeforeMaterialisation.next()); - } - System.out.println(); - } - } -} diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/RestrictedChaseExecutionInMemory.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/RestrictedChaseExecutionInMemory.java deleted file mode 100644 index 05ec7fe28..000000000 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/RestrictedChaseExecutionInMemory.java +++ /dev/null @@ -1,124 +0,0 @@ -package org.semanticweb.vlog4j.examples; - -import java.io.IOException; - -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; - -/*- - * #%L - * examples - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -public class RestrictedChaseExecutionInMemory { - public static void main(String[] args) - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - - // 1. Instantiating entities, rules and facts - final Predicate bicycleIDB = Expressions.makePredicate("BicycleIDB", 1); - final Predicate bicycleEDB = Expressions.makePredicate("BicycleEDB", 1); - final Predicate wheelIDB = Expressions.makePredicate("WheelIDB", 1); - final Predicate wheelEDB = Expressions.makePredicate("WheelEDB", 1); - final Predicate hasPartIDB = Expressions.makePredicate("HasPartIDB", 2); - final Predicate hasPartEDB = Expressions.makePredicate("HasPartEDB", 2); - final Predicate isPartOfIDB = Expressions.makePredicate("IsPartOfIDB", 2); - final Predicate isPartOfEDB = Expressions.makePredicate("IsPartOfEDB", 2); - final Constant bicycle1 = Expressions.makeConstant("bicycle1"); - final Constant bicycle2 = Expressions.makeConstant("bicycle2"); - final Constant wheel1 = Expressions.makeConstant("wheel1"); - final Variable x = Expressions.makeVariable("x"); - final Variable y = Expressions.makeVariable("y"); - - // BicycleIDB(?x) :- BicycleEDB(?x) . - final Atom bicycleIDBX = Expressions.makeAtom(bicycleIDB, x); - final Atom bicycleEDBX = Expressions.makeAtom(bicycleEDB, x); - final Rule rule1 = Expressions.makeRule(bicycleIDBX, bicycleEDBX); - - // WheelIDB(?x) :- WheelEDB(?x) . - final Atom wheelIDBX = Expressions.makeAtom(wheelIDB, x); - final Atom wheelEDBX = Expressions.makeAtom(wheelEDB, x); - final Rule rule2 = Expressions.makeRule(wheelIDBX, wheelEDBX); - - // hasPartIDB(?x, ?y) :- hasPartEDB(?x, ?y) . - final Atom hasPartIDBXY = Expressions.makeAtom(hasPartIDB, x, y); - final Atom hasPartEDBXY = Expressions.makeAtom(hasPartEDB, x, y); - final Rule rule3 = Expressions.makeRule(hasPartIDBXY, hasPartEDBXY); - - // isPartOfIDB(?x, ?y) :- isPartOfEDB(?x, ?y) . - final Atom isPartOfIDBXY = Expressions.makeAtom(isPartOfIDB, x, y); - final Atom isPartOfEDBXY = Expressions.makeAtom(isPartOfEDB, x, y); - final Rule rule4 = Expressions.makeRule(isPartOfIDBXY, isPartOfEDBXY); - - // HasPartIDB(?x, !y), WheelIDB(!y) :- BicycleIDB(?x) . - final Atom wheelIDBY = Expressions.makeAtom(wheelIDB, y); - final Rule rule5 = Expressions.makeRule(Expressions.makeConjunction(hasPartIDBXY, wheelIDBY), - Expressions.makeConjunction(bicycleIDBX)); - - // IsPartOfIDB(?x, !y), BicycleIDB(!y) :- WheelIDB(?x) . - final Atom bycicleIDBY = Expressions.makeAtom(bicycleIDB, y); - final Rule rule6 = Expressions.makeRule(Expressions.makeConjunction(isPartOfIDBXY, bycicleIDBY), - Expressions.makeConjunction(wheelIDBX)); - - // IsPartOfIDB(?x, ?y) :- HasPartIDB(?y, ?x) . - final Atom hasPartIDBYX = Expressions.makeAtom(hasPartIDB, y, x); - final Rule rule7 = Expressions.makeRule(isPartOfIDBXY, hasPartIDBYX); - - // HasPartIDB(?x, ?y) :- IsPartOfIDB(?y, ?x) . - final Atom isPartOfIDBYX = Expressions.makeAtom(isPartOfIDB, y, x); - final Rule rule8 = Expressions.makeRule(hasPartIDBXY, isPartOfIDBYX); - - // BicycleEDB(bicycle1) . - final Atom fact1 = Expressions.makeAtom(bicycleEDB, bicycle1); - - // HasPartEDB(bicycle1, wheel1) . - final Atom fact2 = Expressions.makeAtom(hasPartEDB, bicycle1, wheel1); - - // Wheel(wheel1) . - final Atom fact3 = Expressions.makeAtom(wheelEDB, wheel1); - - // BicycleEDB(b) . - final Atom fact4 = Expressions.makeAtom(bicycleEDB, bicycle2); - - // 2. Loading, reasoning, and querying. - // Use try-with resources, or remember to call close() to free the reasoner - // resources. - try (Reasoner reasoner = Reasoner.getInstance()) { - reasoner.setAlgorithm(Algorithm.RESTRICTED_CHASE); - - reasoner.addRules(rule1, rule2, rule3, rule4, rule5, rule6, rule7, rule8); - reasoner.addFacts(fact1, fact2, fact3, fact4); - reasoner.load(); - - ExamplesUtil.printOutQueryAnswers(hasPartEDBXY, reasoner); - - reasoner.reason(); - - ExamplesUtil.printOutQueryAnswers(hasPartIDBXY, reasoner); - } - } - -} diff --git a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SkolemChaseExecutionFromToFile.java b/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SkolemChaseExecutionFromToFile.java deleted file mode 100644 index 2cacb0423..000000000 --- a/vlog4j-examples/src/main/java/org/semanticweb/vlog4j/examples/SkolemChaseExecutionFromToFile.java +++ /dev/null @@ -1,138 +0,0 @@ -package org.semanticweb.vlog4j.examples; - -/*- - * #%L - * VLog4j Examples - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.File; -import java.io.IOException; - -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.Algorithm; -import org.semanticweb.vlog4j.core.reasoner.DataSource; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.reasoner.implementation.CsvFileDataSource; - -public class SkolemChaseExecutionFromToFile { - - public static void main(String[] args) - throws EdbIdbSeparationException, IOException, ReasonerStateException, IncompatiblePredicateArityException { - - // 1. Instantiating entities and rules. - final Predicate bicycleIDB = Expressions.makePredicate("BicycleIDB", 1); - final Predicate bicycleEDB = Expressions.makePredicate("BicycleEDB", 1); - final Predicate wheelIDB = Expressions.makePredicate("WheelIDB", 1); - final Predicate wheelEDB = Expressions.makePredicate("WheelEDB", 1); - final Predicate hasPartIDB = Expressions.makePredicate("HasPartIDB", 2); - final Predicate hasPartEDB = Expressions.makePredicate("HasPartEDB", 2); - final Predicate isPartOfIDB = Expressions.makePredicate("IsPartOfIDB", 2); - final Predicate isPartOfEDB = Expressions.makePredicate("IsPartOfEDB", 2); - final Variable x = Expressions.makeVariable("x"); - final Variable y = Expressions.makeVariable("y"); - - // BicycleIDB(?x) :- BicycleEDB(?x) . - final Atom bicycleIDBX = Expressions.makeAtom(bicycleIDB, x); - final Atom bicycleEDBX = Expressions.makeAtom(bicycleEDB, x); - final Rule rule1 = Expressions.makeRule(bicycleIDBX, bicycleEDBX); - - // WheelIDB(?x) :- WheelEDB(?x) . - final Atom wheelIDBX = Expressions.makeAtom(wheelIDB, x); - final Atom wheelEDBX = Expressions.makeAtom(wheelEDB, x); - final Rule rule2 = Expressions.makeRule(wheelIDBX, wheelEDBX); - - // hasPartIDB(?x, ?y) :- hasPartEDB(?x, ?y) . - final Atom hasPartIDBXY = Expressions.makeAtom(hasPartIDB, x, y); - final Atom hasPartEDBXY = Expressions.makeAtom(hasPartEDB, x, y); - final Rule rule3 = Expressions.makeRule(hasPartIDBXY, hasPartEDBXY); - - // isPartOfIDB(?x, ?y) :- isPartOfEDB(?x, ?y) . - final Atom isPartOfIDBXY = Expressions.makeAtom(isPartOfIDB, x, y); - final Atom isPartOfEDBXY = Expressions.makeAtom(isPartOfEDB, x, y); - final Rule rule4 = Expressions.makeRule(isPartOfIDBXY, isPartOfEDBXY); - - // HasPartIDB(?x, !y), WheelIDB(!y) :- BicycleIDB(?x) . - final Atom wheelIDBY = Expressions.makeAtom(wheelIDB, y); - final Rule rule5 = Expressions.makeRule(Expressions.makeConjunction(hasPartIDBXY, wheelIDBY), - Expressions.makeConjunction(bicycleIDBX)); - - // IsPartOfIDB(?x, !y) :- WheelIDB(?x) . - // Atom bycicleIDBY = Expressions.makeAtom(bicycleIDB, y); - final Rule rule6 = Expressions.makeRule(Expressions.makeConjunction(isPartOfIDBXY), - Expressions.makeConjunction(wheelIDBX)); - - // IsPartOfIDB(?x, ?y) :- HasPartIDB(?y, ?x) . - final Atom hasPartIDBYX = Expressions.makeAtom(hasPartIDB, y, x); - final Rule rule7 = Expressions.makeRule(isPartOfIDBXY, hasPartIDBYX); - - // HasPartIDB(?x, ?y) :- IsPartOfIDB(?y, ?x) . - final Atom isPartOfIDBYX = Expressions.makeAtom(isPartOfIDB, y, x); - final Rule rule8 = Expressions.makeRule(hasPartIDBXY, isPartOfIDBYX); - - // 2. Loading, reasoning, and querying. - final Reasoner reasoner = Reasoner.getInstance(); - reasoner.setAlgorithm(Algorithm.SKOLEM_CHASE); - - reasoner.addRules(rule1, rule2, rule3, rule4, rule5, rule6, rule7, rule8); - - final String filesDirPath = "src" + File.separator + "main" + File.separator + "data"; - final DataSource bicycleEDBPath = new CsvFileDataSource( - new File(filesDirPath + File.separator + "bycicleEDB.csv")); - reasoner.addFactsFromDataSource(bicycleEDB, bicycleEDBPath); - final DataSource hasPartPath = new CsvFileDataSource( - new File(filesDirPath + File.separator + "hasPartEDB.csv")); - reasoner.addFactsFromDataSource(hasPartEDB, hasPartPath); - final DataSource wheelPath = new CsvFileDataSource(new File(filesDirPath + File.separator + "wheelEDB.csv")); - reasoner.addFactsFromDataSource(wheelEDB, wheelPath); - - reasoner.load(); - - ExamplesUtil.printOutQueryAnswers(hasPartEDBXY, reasoner); - - reasoner.reason(); - - ExamplesUtil.printOutQueryAnswers(hasPartIDBXY, reasoner); - - // 3. Exporting - reasoner.exportQueryAnswersToCsv(hasPartIDBXY, filesDirPath + File.separator + "hasPartIDBXYWithBlanks.csv", - true); - - reasoner.exportQueryAnswersToCsv(hasPartIDBXY, filesDirPath + File.separator + "hasPartIDBXYWithoutBlanks.csv", - false); - - final Constant redBike = Expressions.makeConstant("redBike"); - final Atom hasPartIDBRedBikeY = Expressions.makeAtom(hasPartIDB, redBike, y); - reasoner.exportQueryAnswersToCsv(hasPartIDBRedBikeY, - filesDirPath + File.separator + "hasPartIDBRedBikeYWithBlanks.csv", true); - - // 4. Closing - // Use try-with resources, or remember to call close() to free the reasoner - // resources. - reasoner.reason(); - - } - -} diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java deleted file mode 100644 index 6563ba2df..000000000 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverter.java +++ /dev/null @@ -1,438 +0,0 @@ -package org.semanticweb.vlog4j.owlapi; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import org.semanticweb.owlapi.apibinding.OWLManager; - -/*- - * #%L - * VLog4j OWL API Support - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.semanticweb.owlapi.model.OWLAsymmetricObjectPropertyAxiom; -import org.semanticweb.owlapi.model.OWLAxiomVisitor; -import org.semanticweb.owlapi.model.OWLClassAssertionAxiom; -import org.semanticweb.owlapi.model.OWLClassExpression; -import org.semanticweb.owlapi.model.OWLDataFactory; -import org.semanticweb.owlapi.model.OWLDataPropertyAssertionAxiom; -import org.semanticweb.owlapi.model.OWLDataPropertyDomainAxiom; -import org.semanticweb.owlapi.model.OWLDataPropertyRangeAxiom; -import org.semanticweb.owlapi.model.OWLDifferentIndividualsAxiom; -import org.semanticweb.owlapi.model.OWLDisjointClassesAxiom; -import org.semanticweb.owlapi.model.OWLDisjointDataPropertiesAxiom; -import org.semanticweb.owlapi.model.OWLDisjointObjectPropertiesAxiom; -import org.semanticweb.owlapi.model.OWLDisjointUnionAxiom; -import org.semanticweb.owlapi.model.OWLEquivalentClassesAxiom; -import org.semanticweb.owlapi.model.OWLEquivalentDataPropertiesAxiom; -import org.semanticweb.owlapi.model.OWLEquivalentObjectPropertiesAxiom; -import org.semanticweb.owlapi.model.OWLFunctionalDataPropertyAxiom; -import org.semanticweb.owlapi.model.OWLFunctionalObjectPropertyAxiom; -import org.semanticweb.owlapi.model.OWLHasKeyAxiom; -import org.semanticweb.owlapi.model.OWLInverseFunctionalObjectPropertyAxiom; -import org.semanticweb.owlapi.model.OWLInverseObjectPropertiesAxiom; -import org.semanticweb.owlapi.model.OWLIrreflexiveObjectPropertyAxiom; -import org.semanticweb.owlapi.model.OWLNegativeDataPropertyAssertionAxiom; -import org.semanticweb.owlapi.model.OWLNegativeObjectPropertyAssertionAxiom; -import org.semanticweb.owlapi.model.OWLObjectPropertyAssertionAxiom; -import org.semanticweb.owlapi.model.OWLObjectPropertyDomainAxiom; -import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; -import org.semanticweb.owlapi.model.OWLObjectPropertyRangeAxiom; -import org.semanticweb.owlapi.model.OWLReflexiveObjectPropertyAxiom; -import org.semanticweb.owlapi.model.OWLSameIndividualAxiom; -import org.semanticweb.owlapi.model.OWLSubClassOfAxiom; -import org.semanticweb.owlapi.model.OWLSubDataPropertyOfAxiom; -import org.semanticweb.owlapi.model.OWLSubObjectPropertyOfAxiom; -import org.semanticweb.owlapi.model.OWLSubPropertyChainOfAxiom; -import org.semanticweb.owlapi.model.OWLSymmetricObjectPropertyAxiom; -import org.semanticweb.owlapi.model.OWLTransitiveObjectPropertyAxiom; -import org.semanticweb.owlapi.model.SWRLRule; -import org.semanticweb.owlapi.util.OWLAxiomVisitorAdapter; -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Conjunction; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.ConjunctionImpl; -import org.semanticweb.vlog4j.core.model.implementation.RuleImpl; -import org.semanticweb.vlog4j.core.model.implementation.VariableImpl; - -/** - * Class for converting OWL axioms to rules. - * - * @author Markus Kroetzsch - * - */ -public class OwlAxiomToRulesConverter extends OWLAxiomVisitorAdapter implements OWLAxiomVisitor { - - static OWLDataFactory owlDataFactory = OWLManager.getOWLDataFactory(); - - final Set rules = new HashSet<>(); - final Set facts = new HashSet<>(); - final Variable frontierVariable = new VariableImpl("X"); - int freshVariableCounter = 0; - - /** - * Returns a fresh variable, which can be used as auxiliary variable in the - * current axiom's translation. - * - * @return a variable - */ - Variable getFreshVariable() { - this.freshVariableCounter++; - return new VariableImpl("Y" + this.freshVariableCounter); - } - - void addRule(AbstractClassToRuleConverter converter) { - if (converter.isTautology()) { - return; - } - Conjunction headConjunction; - if (converter.head.isFalseOrEmpty()) { - headConjunction = new ConjunctionImpl( - Arrays.asList(OwlToRulesConversionHelper.getBottom(converter.mainTerm))); - } else { - headConjunction = new ConjunctionImpl(converter.head.getConjuncts()); - } - - Conjunction bodyConjunction; - if (converter.body.isTrueOrEmpty()) { - bodyConjunction = new ConjunctionImpl(Arrays.asList(OwlToRulesConversionHelper.getTop(converter.mainTerm))); - if (headConjunction.getVariables().isEmpty()) { - for (Atom conjunct : headConjunction.getAtoms()) { - this.facts.add(conjunct); - } - return; - } - } else { - bodyConjunction = new ConjunctionImpl(converter.body.getConjuncts()); - } - - this.rules.add(new RuleImpl(headConjunction, bodyConjunction)); - } - - /** - * Resets the internal counter used for generating fresh variables. - */ - void startAxiomConversion() { - this.freshVariableCounter = 0; - } - - void addSubClassAxiom(OWLClassExpression subClass, OWLClassExpression superClass) { - startAxiomConversion(); - - ClassToRuleHeadConverter headConverter = new ClassToRuleHeadConverter(this.frontierVariable, this); - superClass.accept(headConverter); - ClassToRuleBodyConverter bodyConverter = new ClassToRuleBodyConverter(this.frontierVariable, headConverter.body, - headConverter.head, this); - bodyConverter.handleDisjunction(subClass, this.frontierVariable); - addRule(bodyConverter); - } - - @Override - public void visit(OWLSubClassOfAxiom axiom) { - addSubClassAxiom(axiom.getSubClass(), axiom.getSuperClass()); - } - - @Override - public void visit(OWLNegativeObjectPropertyAssertionAxiom axiom) { - Term subject = OwlToRulesConversionHelper.getIndividualTerm(axiom.getSubject()); - Term object = OwlToRulesConversionHelper.getIndividualTerm(axiom.getObject()); - Atom atom = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getProperty(), subject, object); - Atom bot = OwlToRulesConversionHelper.getBottom(subject); - this.rules.add(new RuleImpl(new ConjunctionImpl(Arrays.asList(bot)), new ConjunctionImpl(Arrays.asList(atom)))); - } - - @Override - public void visit(OWLAsymmetricObjectPropertyAxiom axiom) { - startAxiomConversion(); - Variable secondVariable = getFreshVariable(); - Atom atom1 = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getProperty(), this.frontierVariable, - secondVariable); - Atom atom2 = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getProperty(), secondVariable, - this.frontierVariable); - this.rules.add(new RuleImpl( - new ConjunctionImpl(Arrays.asList(OwlToRulesConversionHelper.getBottom(this.frontierVariable))), - new ConjunctionImpl(Arrays.asList(atom1, atom2)))); - } - - @Override - public void visit(OWLReflexiveObjectPropertyAxiom axiom) { - Atom atom1 = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getProperty(), this.frontierVariable, - this.frontierVariable); - this.rules.add(new RuleImpl(new ConjunctionImpl(Arrays.asList(atom1)), - new ConjunctionImpl(Arrays.asList(OwlToRulesConversionHelper.getTop(this.frontierVariable))))); - } - - @Override - public void visit(OWLDisjointClassesAxiom axiom) { - // TODO Efficient implementation for lists of disjoint classes needed - - } - - @Override - public void visit(OWLDataPropertyDomainAxiom axiom) { - throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); - } - - @Override - public void visit(OWLObjectPropertyDomainAxiom axiom) { - OWLClassExpression existsProperty = owlDataFactory.getOWLObjectSomeValuesFrom(axiom.getProperty(), - owlDataFactory.getOWLThing()); - addSubClassAxiom(existsProperty, axiom.getDomain()); - } - - @Override - public void visit(OWLEquivalentObjectPropertiesAxiom axiom) { - startAxiomConversion(); - Variable secondVariable = getFreshVariable(); - - Atom firstAtom = null; - Atom previousAtom = null; - Atom currentAtom = null; - for (OWLObjectPropertyExpression owlObjectPropertyExpression : axiom.getProperties()) { - currentAtom = OwlToRulesConversionHelper.getObjectPropertyAtom(owlObjectPropertyExpression, - this.frontierVariable, secondVariable); - if (previousAtom == null) { - firstAtom = currentAtom; - } else { - this.rules.add(new RuleImpl(new ConjunctionImpl(Arrays.asList(currentAtom)), - new ConjunctionImpl(Arrays.asList(previousAtom)))); - } - previousAtom = currentAtom; - } - - if (currentAtom != null) { - this.rules.add(new RuleImpl(new ConjunctionImpl(Arrays.asList(firstAtom)), - new ConjunctionImpl(Arrays.asList(currentAtom)))); - } - } - - @Override - public void visit(OWLNegativeDataPropertyAssertionAxiom axiom) { - throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); - - } - - @Override - public void visit(OWLDifferentIndividualsAxiom axiom) { - throw new OwlFeatureNotSupportedException( - "DifferentIndividuals currently not supported, due to lack of equality support."); - } - - @Override - public void visit(OWLDisjointDataPropertiesAxiom axiom) { - throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); - } - - @Override - public void visit(OWLDisjointObjectPropertiesAxiom axiom) { - // TODO Efficient implementation for lists of disjoint properties needed - - } - - @Override - public void visit(OWLObjectPropertyRangeAxiom axiom) { - startAxiomConversion(); - OWLClassExpression forallPropertyDomain = owlDataFactory.getOWLObjectAllValuesFrom(axiom.getProperty(), - axiom.getRange()); - ClassToRuleHeadConverter headConverter = new ClassToRuleHeadConverter(this.frontierVariable, this); - forallPropertyDomain.accept(headConverter); - addRule(headConverter); - } - - @Override - public void visit(OWLObjectPropertyAssertionAxiom axiom) { - Term subject = OwlToRulesConversionHelper.getIndividualTerm(axiom.getSubject()); - Term object = OwlToRulesConversionHelper.getIndividualTerm(axiom.getObject()); - this.facts.add(OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getProperty(), subject, object)); - } - - @Override - public void visit(OWLFunctionalObjectPropertyAxiom axiom) { - throw new OwlFeatureNotSupportedException( - "FunctionalObjectProperty currently not supported, due to lack of equality support."); - } - - @Override - public void visit(OWLSubObjectPropertyOfAxiom axiom) { - startAxiomConversion(); - Variable secondVariable = getFreshVariable(); - Atom subRole = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getSubProperty(), this.frontierVariable, - secondVariable); - Atom superRole = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getSuperProperty(), - this.frontierVariable, secondVariable); - this.rules.add(new RuleImpl(new ConjunctionImpl(Arrays.asList(superRole)), - new ConjunctionImpl(Arrays.asList(subRole)))); - } - - @Override - public void visit(OWLDisjointUnionAxiom axiom) { - throw new OwlFeatureNotSupportedException( - "OWL DisjointUnion not supported, since the cases where it would be expressible in disjunction-free rules are not useful."); - } - - @Override - public void visit(OWLSymmetricObjectPropertyAxiom axiom) { - startAxiomConversion(); - Variable secondVariable = getFreshVariable(); - Atom atom1 = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getProperty(), this.frontierVariable, - secondVariable); - Atom atom2 = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getProperty(), secondVariable, - this.frontierVariable); - this.rules.add( - new RuleImpl(new ConjunctionImpl(Arrays.asList(atom2)), new ConjunctionImpl(Arrays.asList(atom1)))); - } - - @Override - public void visit(OWLDataPropertyRangeAxiom axiom) { - throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); - } - - @Override - public void visit(OWLFunctionalDataPropertyAxiom axiom) { - throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); - } - - @Override - public void visit(OWLEquivalentDataPropertiesAxiom axiom) { - throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); - } - - @Override - public void visit(OWLClassAssertionAxiom axiom) { - startAxiomConversion(); - Term term = OwlToRulesConversionHelper.getIndividualTerm(axiom.getIndividual()); - ClassToRuleHeadConverter headConverter = new ClassToRuleHeadConverter(term, this); - axiom.getClassExpression().accept(headConverter); - addRule(headConverter); - } - - @Override - public void visit(OWLEquivalentClassesAxiom axiom) { - OWLClassExpression firstClass = null; - OWLClassExpression previousClass = null; - OWLClassExpression currentClass = null; - for (OWLClassExpression owlClassExpression : axiom.getClassExpressions()) { - currentClass = owlClassExpression; - if (previousClass == null) { - firstClass = currentClass; - } else { - addSubClassAxiom(previousClass, currentClass); - } - previousClass = currentClass; - } - - if (currentClass != null) { - addSubClassAxiom(currentClass, firstClass); - } - } - - @Override - public void visit(OWLDataPropertyAssertionAxiom axiom) { - throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); - } - - @Override - public void visit(OWLTransitiveObjectPropertyAxiom axiom) { - startAxiomConversion(); - Variable var1 = getFreshVariable(); - Variable var2 = getFreshVariable(); - Atom atom1 = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getProperty(), this.frontierVariable, var1); - Atom atom2 = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getProperty(), var1, var2); - Atom atomHead = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getProperty(), this.frontierVariable, - var2); - this.rules.add(new RuleImpl(new ConjunctionImpl(Arrays.asList(atomHead)), - new ConjunctionImpl(Arrays.asList(atom1, atom2)))); - } - - @Override - public void visit(OWLIrreflexiveObjectPropertyAxiom axiom) { - Atom atomSelf = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getProperty(), this.frontierVariable, - this.frontierVariable); - this.rules.add(new RuleImpl( - new ConjunctionImpl(Arrays.asList(OwlToRulesConversionHelper.getBottom(this.frontierVariable))), - new ConjunctionImpl(Arrays.asList(atomSelf)))); - } - - @Override - public void visit(OWLSubDataPropertyOfAxiom axiom) { - throw new OwlFeatureNotSupportedException("OWL datatypes currently not supported in rules."); - } - - @Override - public void visit(OWLInverseFunctionalObjectPropertyAxiom axiom) { - throw new OwlFeatureNotSupportedException( - "InverseFunctionalObjectProperty currently not supported, due to lack of equality support."); - } - - @Override - public void visit(OWLSameIndividualAxiom axiom) { - throw new OwlFeatureNotSupportedException( - "SameIndividual currently not supported, due to lack of equality support."); - } - - @Override - public void visit(OWLSubPropertyChainOfAxiom axiom) { - startAxiomConversion(); - Variable previousVariable = this.frontierVariable; - Variable currentVariable = null; - final List body = new ArrayList<>(); - - for (OWLObjectPropertyExpression owlObjectPropertyExpression : axiom.getPropertyChain()) { - currentVariable = getFreshVariable(); - body.add(OwlToRulesConversionHelper.getObjectPropertyAtom(owlObjectPropertyExpression, previousVariable, - currentVariable)); - previousVariable = currentVariable; - } - - Atom headAtom = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getSuperProperty(), - this.frontierVariable, currentVariable); - - this.rules.add(new RuleImpl(new ConjunctionImpl(Arrays.asList(headAtom)), new ConjunctionImpl(body))); - } - - @Override - public void visit(OWLInverseObjectPropertiesAxiom axiom) { - startAxiomConversion(); - Variable secondVariable = getFreshVariable(); - Atom firstRole = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getFirstProperty(), - this.frontierVariable, secondVariable); - Atom secondRole = OwlToRulesConversionHelper.getObjectPropertyAtom(axiom.getSecondProperty(), secondVariable, - this.frontierVariable); - Conjunction firstRoleConjunction = new ConjunctionImpl(Arrays.asList(firstRole)); - Conjunction secondRoleConjunction = new ConjunctionImpl(Arrays.asList(secondRole)); - this.rules.add(new RuleImpl(secondRoleConjunction, firstRoleConjunction)); - this.rules.add(new RuleImpl(firstRoleConjunction, secondRoleConjunction)); - } - - @Override - public void visit(OWLHasKeyAxiom axiom) { - throw new OwlFeatureNotSupportedException("HasKey currently not supported, due to lack of equality support."); - } - - @Override - public void visit(SWRLRule rule) { - // TODO support SWRL rules - - } - -} diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlToRulesConversionHelper.java b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlToRulesConversionHelper.java deleted file mode 100644 index 92275766f..000000000 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlToRulesConversionHelper.java +++ /dev/null @@ -1,160 +0,0 @@ -package org.semanticweb.vlog4j.owlapi; - -import java.io.UnsupportedEncodingException; -import java.math.BigInteger; -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; -import java.util.Arrays; -import java.util.Collection; - -import org.semanticweb.owlapi.model.OWLAnonymousIndividual; - -/*- - * #%L - * VLog4j OWL API Support - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.model.OWLClassExpression; -import org.semanticweb.owlapi.model.OWLIndividual; -import org.semanticweb.owlapi.model.OWLNamedIndividual; -import org.semanticweb.owlapi.model.OWLObjectProperty; -import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.AtomImpl; -import org.semanticweb.vlog4j.core.model.implementation.BlankImpl; -import org.semanticweb.vlog4j.core.model.implementation.ConstantImpl; -import org.semanticweb.vlog4j.core.model.implementation.PredicateImpl; -import org.semanticweb.vlog4j.owlapi.AbstractClassToRuleConverter.SimpleConjunction; - -/** - * Utility class for helper functions that are used to convert OWL API objects - * to rules. - * - * @author Markus Kroetzsch - * - */ -public class OwlToRulesConversionHelper { - - /** - * Returns a {@link Term} to represent an {@link OWLIndividual} in rules. - * - * @param owlIndividual - * the individual to get a term for - * @return a suitable term - */ - public static Term getIndividualTerm(OWLIndividual owlIndividual) { - if (owlIndividual instanceof OWLNamedIndividual) { - return new ConstantImpl(((OWLNamedIndividual) owlIndividual).getIRI().toString()); - } else if (owlIndividual instanceof OWLAnonymousIndividual) { - return new BlankImpl(((OWLAnonymousIndividual) owlIndividual).getID().toString()); - } else { - throw new OwlFeatureNotSupportedException( - "Could not convert OWL individual '" + owlIndividual.toString() + "' to a term."); - } - } - - /** - * Returns a {@link Predicate} to represent an {@link OWLClass} in rules. - * - * @param owlClass - * the atomic class to get a predicate for - * @return a suitable unary predicate - */ - public static Predicate getClassPredicate(OWLClass owlClass) { - return new PredicateImpl(owlClass.getIRI().toString(), 1); - } - - /** - * Returns a {@link Predicate} to represent an {@link OWLObjectProperty} in - * rules. - * - * @param owlObjectProperty - * the atomic property to get a predicate for - * @return a suitable binary predicate - */ - public static Predicate getObjectPropertyPredicate(OWLObjectProperty owlObjectProperty) { - return new PredicateImpl(owlObjectProperty.getIRI().toString(), 2); - } - - public static Predicate getAuxiliaryClassPredicate(Collection owlClassExpressions) { - try { - MessageDigest messageDigest = MessageDigest.getInstance("MD5"); - for (OWLClassExpression owlClassExpression : owlClassExpressions) { - messageDigest.update(owlClassExpression.toString().getBytes("UTF-8")); - } - byte[] digest = messageDigest.digest(); - BigInteger bigInt = new BigInteger(1, digest); - String hashtext = bigInt.toString(16); - return new PredicateImpl("aux-" + hashtext, 1); - } catch (NoSuchAlgorithmException | UnsupportedEncodingException e) { - throw new RuntimeException("We are missing some core functionality of Java here", e); - } - } - - /** - * Adds a binary predicate for a given OWL object property expression to the - * given conjunction. If the expression is an inverse, source and target terms - * are swapped. If the expression is top or bottom, it is handled appropriately. - * - * @param owlObjectPropertyExpression - * the property expression - * @param sourceTerm - * the term that should be in the first parameter position of the - * original expression - * @param targetTerm - * the term that should be in the second parameter position of the - * original expression - */ - static void addConjunctForPropertyExpression(OWLObjectPropertyExpression owlObjectPropertyExpression, - Term sourceTerm, Term targetTerm, SimpleConjunction conjuncts) { - if (owlObjectPropertyExpression.isOWLTopObjectProperty()) { - conjuncts.init(); - } else if (owlObjectPropertyExpression.isOWLBottomObjectProperty()) { - conjuncts.makeFalse(); - } else { - conjuncts.add(getObjectPropertyAtom(owlObjectPropertyExpression, sourceTerm, targetTerm)); - } - } - - public static Atom getObjectPropertyAtom(OWLObjectPropertyExpression owlObjectPropertyExpression, Term sourceTerm, - Term targetTerm) { - if (owlObjectPropertyExpression.isAnonymous()) { - Predicate predicate = OwlToRulesConversionHelper - .getObjectPropertyPredicate(owlObjectPropertyExpression.getInverseProperty().asOWLObjectProperty()); - return new AtomImpl(predicate, Arrays.asList(targetTerm, sourceTerm)); - } else { - Predicate predicate = OwlToRulesConversionHelper - .getObjectPropertyPredicate(owlObjectPropertyExpression.asOWLObjectProperty()); - return new AtomImpl(predicate, Arrays.asList(sourceTerm, targetTerm)); - } - } - - public static Atom getBottom(Term term) { - Predicate predicate = new PredicateImpl("http://www.w3.org/2002/07/owl#Nothing", 1); - return new AtomImpl(predicate, Arrays.asList(term)); - } - - public static Atom getTop(Term term) { - Predicate predicate = new PredicateImpl("http://www.w3.org/2002/07/owl#Thing", 1); - return new AtomImpl(predicate, Arrays.asList(term)); - } - -} diff --git a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlToRulesConverter.java b/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlToRulesConverter.java deleted file mode 100644 index 202878318..000000000 --- a/vlog4j-owlapi/src/main/java/org/semanticweb/vlog4j/owlapi/OwlToRulesConverter.java +++ /dev/null @@ -1,75 +0,0 @@ -package org.semanticweb.vlog4j.owlapi; - -/*- - * #%L - * VLog4j OWL API Support - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.Set; - -import org.semanticweb.owlapi.model.OWLAxiom; -import org.semanticweb.owlapi.model.OWLOntology; -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Rule; - -/** - * Class for converting OWL ontologies to rules. - * - * @author Markus Kroetzsch - * - */ -public class OwlToRulesConverter { - - final OwlAxiomToRulesConverter owlAxiomToRulesConverter = new OwlAxiomToRulesConverter(); - - /** - * Converts the given OWL ontology to rules and facts, and adds the result to - * the internal buffer of rules and facts for later retrieval. - * - * @param owlOntology - * the ontology - */ - public void addOntology(OWLOntology owlOntology) { - for (OWLAxiom owlAxiom : owlOntology.getAxioms()) { - owlAxiom.accept(this.owlAxiomToRulesConverter); - } - } - - /** - * Returns the set of facts generated by transforming the given OWL ontology. No - * copy is created, so the set should not be modified if its owner is still to - * be used. - * - * @return set of facts - */ - public Set getFacts() { - return this.owlAxiomToRulesConverter.facts; - } - - /** - * Returns the set of rules generated by transforming the given OWL ontology. No - * copy is created, so the set should not be modified if its owner is still to - * be used. - * - * @return set of rules - */ - public Set getRules() { - return this.owlAxiomToRulesConverter.rules; - } - -} diff --git a/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java b/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java deleted file mode 100644 index f77b86c84..000000000 --- a/vlog4j-owlapi/src/test/java/org/semanticweb/vlog4j/owlapi/OwlAxiomToRulesConverterTest.java +++ /dev/null @@ -1,663 +0,0 @@ -package org.semanticweb.vlog4j.owlapi; - -/*- - * #%L - * VLog4j OWL API Support - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.*; - -import java.util.Arrays; -import java.util.Collections; - -import org.junit.Ignore; -import org.junit.Test; -import org.mockito.internal.util.collections.Sets; -import org.semanticweb.owlapi.apibinding.OWLManager; -import org.semanticweb.owlapi.model.IRI; -import org.semanticweb.owlapi.model.OWLAxiom; -import org.semanticweb.owlapi.model.OWLClass; -import org.semanticweb.owlapi.model.OWLClassExpression; -import org.semanticweb.owlapi.model.OWLDataFactory; -import org.semanticweb.owlapi.model.OWLIndividual; -import org.semanticweb.owlapi.model.OWLObjectIntersectionOf; -import org.semanticweb.owlapi.model.OWLObjectProperty; -import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; -import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; -import org.semanticweb.owlapi.model.OWLObjectUnionOf; -import org.semanticweb.owlapi.model.OWLSubClassOfAxiom; -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.api.Rule; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.api.Variable; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; - -public class OwlAxiomToRulesConverterTest { - - static OWLDataFactory df = OWLManager.getOWLDataFactory(); - - public static IRI getIri(String localName) { - return IRI.create("http://example.org/" + localName); - } - - public static OWLClass getOwlClass(String localName) { - return df.getOWLClass(getIri(localName)); - } - - public static OWLObjectProperty getOwlObjectProperty(String localName) { - return df.getOWLObjectProperty(getIri(localName)); - } - - public static Predicate getClassPredicate(String localName) { - return Expressions.makePredicate("http://example.org/" + localName, 1); - } - - public static Predicate getPropertyPredicate(String localName) { - return Expressions.makePredicate("http://example.org/" + localName, 2); - } - - static OWLClass cA = getOwlClass("A"); - static OWLClass cB = getOwlClass("B"); - static OWLClass cC = getOwlClass("C"); - static OWLClass cD = getOwlClass("D"); - static OWLClass cE = getOwlClass("E"); - static OWLObjectProperty pR = getOwlObjectProperty("R"); - static OWLObjectProperty pS = getOwlObjectProperty("S"); - static OWLObjectProperty pT = getOwlObjectProperty("T"); - static OWLObjectProperty pU = getOwlObjectProperty("U"); - - static Predicate nA = getClassPredicate("A"); - static Predicate nB = getClassPredicate("B"); - static Predicate nC = getClassPredicate("C"); - static Predicate nD = getClassPredicate("D"); - static Predicate nE = getClassPredicate("E"); - static Predicate nR = getPropertyPredicate("R"); - static Predicate nS = getPropertyPredicate("S"); - static Predicate nT = getPropertyPredicate("T"); - static Predicate nU = getPropertyPredicate("U"); - - static OWLIndividual inda = df.getOWLNamedIndividual(getIri("a")); - static OWLIndividual indb = df.getOWLNamedIndividual(getIri("b")); - static OWLIndividual indc = df.getOWLNamedIndividual(getIri("c")); - - @Test - public void testSimpleRule() { - OWLObjectIntersectionOf body = df.getOWLObjectIntersectionOf(cA, cB, cC); - OWLObjectIntersectionOf head = df.getOWLObjectIntersectionOf(cD, cE); - OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(body, head); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - - Atom atA = Expressions.makeAtom(nA, Arrays.asList(converter.frontierVariable)); - Atom atB = Expressions.makeAtom(nB, Arrays.asList(converter.frontierVariable)); - Atom atC = Expressions.makeAtom(nC, Arrays.asList(converter.frontierVariable)); - Atom atD = Expressions.makeAtom(nD, Arrays.asList(converter.frontierVariable)); - Atom atE = Expressions.makeAtom(nE, Arrays.asList(converter.frontierVariable)); - Rule rule = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atD, atE)), - Expressions.makeConjunction(Arrays.asList(atA, atB, atC))); - - assertEquals(Collections.singleton(rule), converter.rules); - - } - - @Test - public void testTrueBody() { - OWLClassExpression body = df.getOWLObjectIntersectionOf(df.getOWLThing(), - df.getOWLObjectAllValuesFrom(df.getOWLBottomObjectProperty(), cB)); - OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(body, cA); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - - Atom atA = Expressions.makeAtom(nA, Arrays.asList(converter.frontierVariable)); - Atom top = OwlToRulesConversionHelper.getTop(converter.frontierVariable); - Rule rule = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atA)), - Expressions.makeConjunction(Arrays.asList(top))); - - assertEquals(Collections.singleton(rule), converter.rules); - } - - @Test - public void testConjunctionTruth() { - OWLObjectIntersectionOf head = df.getOWLObjectIntersectionOf(cB, df.getOWLThing(), cC); - OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA, head); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - - Atom atA = Expressions.makeAtom(nA, Arrays.asList(converter.frontierVariable)); - Atom atB = Expressions.makeAtom(nB, Arrays.asList(converter.frontierVariable)); - Atom atC = Expressions.makeAtom(nC, Arrays.asList(converter.frontierVariable)); - Rule rule = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atB, atC)), - Expressions.makeConjunction(Arrays.asList(atA))); - - assertEquals(Collections.singleton(rule), converter.rules); - } - - @Test - public void testConjunctionTruthTruth() { - OWLObjectIntersectionOf head = df.getOWLObjectIntersectionOf(df.getOWLThing(), df.getOWLThing()); - OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA, head); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - - assertEquals(0, converter.rules.size()); - } - - @Test - public void testConjunctionFalsity() { - OWLClassExpression notSupported = df.getOWLObjectExactCardinality(10, pR); - OWLObjectIntersectionOf head = df.getOWLObjectIntersectionOf(notSupported, df.getOWLNothing(), cC); - OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA, head); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - - Atom atA = Expressions.makeAtom(nA, Arrays.asList(converter.frontierVariable)); - Atom bot = OwlToRulesConversionHelper.getBottom(converter.frontierVariable); - Rule rule = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(bot)), - Expressions.makeConjunction(Arrays.asList(atA))); - - assertEquals(Collections.singleton(rule), converter.rules); - } - - @Test(expected = OwlFeatureNotSupportedException.class) - public void testConjunctionException() { - OWLClassExpression notSupported = df.getOWLObjectExactCardinality(10, pR); - OWLObjectIntersectionOf head = df.getOWLObjectIntersectionOf(notSupported, cC); - OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cA, head); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - } - - @Test - public void testConjunctionNegativeLiterals() { - OWLClassExpression notA = df.getOWLObjectComplementOf(cA); - OWLClassExpression notB = df.getOWLObjectComplementOf(cB); - OWLClassExpression notC = df.getOWLObjectComplementOf(cC); - OWLObjectIntersectionOf head = df.getOWLObjectIntersectionOf(notB, notC); - OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(notA, head); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - - Predicate auxPredicate = OwlToRulesConversionHelper.getAuxiliaryClassPredicate(Arrays.asList(notB, notC)); - - Atom atA = Expressions.makeAtom(nA, Arrays.asList(converter.frontierVariable)); - Atom atB = Expressions.makeAtom(nB, Arrays.asList(converter.frontierVariable)); - Atom atC = Expressions.makeAtom(nC, Arrays.asList(converter.frontierVariable)); - Atom atAux = Expressions.makeAtom(auxPredicate, Arrays.asList(converter.frontierVariable)); - - Rule rule1 = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atAux)), - Expressions.makeConjunction(Arrays.asList(atB))); - Rule rule2 = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atAux)), - Expressions.makeConjunction(Arrays.asList(atC))); - Rule rule3 = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atA)), - Expressions.makeConjunction(Arrays.asList(atAux))); - - assertEquals(Sets.newSet(rule1, rule2, rule3), converter.rules); - } - - @Test - public void testContrapositive() { - OWLClassExpression notA = df.getOWLObjectComplementOf(cA); - OWLClassExpression notB = df.getOWLObjectComplementOf(cB); - OWLClassExpression notC = df.getOWLObjectComplementOf(cC); - OWLClassExpression notBOrNotC = df.getOWLObjectUnionOf(notB, notC); - OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(notA, notBOrNotC); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - - Atom atA = Expressions.makeAtom(nA, Arrays.asList(converter.frontierVariable)); - Atom atB = Expressions.makeAtom(nB, Arrays.asList(converter.frontierVariable)); - Atom atC = Expressions.makeAtom(nC, Arrays.asList(converter.frontierVariable)); - Rule rule = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atA)), - Expressions.makeConjunction(Arrays.asList(atB, atC))); - - assertEquals(Collections.singleton(rule), converter.rules); - } - - @Test - public void testPositiveUniversal() { - OWLClassExpression forallRA = df.getOWLObjectAllValuesFrom(pR, cA); - OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cB, forallRA); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - - Variable secondVariable = Expressions.makeVariable("Y1"); - Atom atA = Expressions.makeAtom(nA, Arrays.asList(secondVariable)); - Atom atB = Expressions.makeAtom(nB, Arrays.asList(converter.frontierVariable)); - Atom atR = Expressions.makeAtom(nR, Arrays.asList(converter.frontierVariable, secondVariable)); - - Rule rule = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atA)), - Expressions.makeConjunction(Arrays.asList(atR, atB))); - - assertEquals(Collections.singleton(rule), converter.rules); - } - - @Test - public void testPositiveExistential() { - OWLClassExpression existsRA = df.getOWLObjectSomeValuesFrom(pR, cA); - OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(cB, existsRA); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - - Variable secondVariable = Expressions.makeVariable("Y1"); - Atom atA = Expressions.makeAtom(nA, Arrays.asList(secondVariable)); - Atom atB = Expressions.makeAtom(nB, Arrays.asList(converter.frontierVariable)); - Atom atR = Expressions.makeAtom(nR, Arrays.asList(converter.frontierVariable, secondVariable)); - - Rule rule = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atR, atA)), - Expressions.makeConjunction(Arrays.asList(atB))); - - assertEquals(Collections.singleton(rule), converter.rules); - } - - @Test - public void testNegativeUniversal() { - OWLClassExpression forallRA = df.getOWLObjectAllValuesFrom(pR, cA); - OWLClassExpression notB = df.getOWLObjectComplementOf(cB); - OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(forallRA, notB); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - - Predicate auxPredicate = OwlToRulesConversionHelper.getAuxiliaryClassPredicate(Arrays.asList(cA)); - Variable secondVariable = Expressions.makeVariable("Y1"); - - Atom atB = Expressions.makeAtom(nB, Arrays.asList(converter.frontierVariable)); - Atom atR = Expressions.makeAtom(nR, Arrays.asList(converter.frontierVariable, secondVariable)); - Atom atAux = Expressions.makeAtom(auxPredicate, Arrays.asList(secondVariable)); - Atom atA = Expressions.makeAtom(nA, Arrays.asList(secondVariable)); - Atom bot = OwlToRulesConversionHelper.getBottom(secondVariable); - - Rule rule1 = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atR, atAux)), - Expressions.makeConjunction(Arrays.asList(atB))); - Rule rule2 = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(bot)), - Expressions.makeConjunction(Arrays.asList(atAux, atA))); - - assertEquals(Sets.newSet(rule1, rule2), converter.rules); - } - - @Test - public void testNegativeExistential() { - OWLClassExpression existRA = df.getOWLObjectSomeValuesFrom(pR, cA); - OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(existRA, cB); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - - Variable secondVariable = Expressions.makeVariable("Y1"); - Atom atR = Expressions.makeAtom(nR, Arrays.asList(converter.frontierVariable, secondVariable)); - Atom atA = Expressions.makeAtom(nA, Arrays.asList(secondVariable)); - Atom atB = Expressions.makeAtom(nB, Arrays.asList(converter.frontierVariable)); - - Rule rule = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atB)), - Expressions.makeConjunction(Arrays.asList(atR, atA))); - - assertEquals(Collections.singleton(rule), converter.rules); - } - - @Test - public void testSelf() { - OWLClassExpression selfR = df.getOWLObjectHasSelf(pR); - OWLClassExpression selfS = df.getOWLObjectHasSelf(pS); - OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(selfR, selfS); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - - Atom atR = Expressions.makeAtom(nR, Arrays.asList(converter.frontierVariable, converter.frontierVariable)); - Atom atS = Expressions.makeAtom(nS, Arrays.asList(converter.frontierVariable, converter.frontierVariable)); - - Rule rule = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atS)), - Expressions.makeConjunction(Arrays.asList(atR))); - - assertEquals(Collections.singleton(rule), converter.rules); - } - - @Test - public void testHasValue() { - OWLClassExpression hasRa = df.getOWLObjectHasValue(pR, inda); - OWLClassExpression hasSb = df.getOWLObjectHasValue(pS, indb); - OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(hasRa, hasSb); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - - Term consta = Expressions.makeConstant(getIri("a").toString()); - Term constb = Expressions.makeConstant(getIri("b").toString()); - Atom atR = Expressions.makeAtom(nR, Arrays.asList(converter.frontierVariable, consta)); - Atom atS = Expressions.makeAtom(nS, Arrays.asList(converter.frontierVariable, constb)); - - Rule rule = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atS)), - Expressions.makeConjunction(Arrays.asList(atR))); - - assertEquals(Collections.singleton(rule), converter.rules); - } - - @Test - public void testObjectPropertyAssertions() { - OWLAxiom Rab = df.getOWLObjectPropertyAssertionAxiom(pR, inda, indb); - OWLAxiom invSab = df.getOWLObjectPropertyAssertionAxiom(df.getOWLObjectInverseOf(pS), inda, indb); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - Rab.accept(converter); - invSab.accept(converter); - - Term consta = Expressions.makeConstant(getIri("a").toString()); - Term constb = Expressions.makeConstant(getIri("b").toString()); - Atom atR = Expressions.makeAtom(nR, Arrays.asList(consta, constb)); - Atom atS = Expressions.makeAtom(nS, Arrays.asList(constb, consta)); - - assertEquals(Sets.newSet(atR, atS), converter.facts); - } - - @Test - public void testClassAssertions() { - OWLAxiom Ca = df.getOWLClassAssertionAxiom(cC, indc); - OWLClassExpression BandhasRb = df.getOWLObjectIntersectionOf(cB, df.getOWLObjectHasValue(pR, indb)); - OWLAxiom BandhasRba = df.getOWLClassAssertionAxiom(BandhasRb, inda); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - Ca.accept(converter); - BandhasRba.accept(converter); - - Term consta = Expressions.makeConstant(getIri("a").toString()); - Term constb = Expressions.makeConstant(getIri("b").toString()); - Term constc = Expressions.makeConstant(getIri("c").toString()); - Atom atC = Expressions.makeAtom(nC, Arrays.asList(constc)); - Atom atB = Expressions.makeAtom(nB, Arrays.asList(consta)); - Atom atR = Expressions.makeAtom(nR, Arrays.asList(consta, constb)); - - assertEquals(Sets.newSet(atC, atB, atR), converter.facts); - } - - @Test - public void testNegativeObjectPropertyAssertions() { - OWLAxiom Rab = df.getOWLNegativeObjectPropertyAssertionAxiom(pR, inda, indb); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - Rab.accept(converter); - - Term consta = Expressions.makeConstant(getIri("a").toString()); - Term constb = Expressions.makeConstant(getIri("b").toString()); - Atom atR = Expressions.makeAtom(nR, Arrays.asList(consta, constb)); - Atom bot = OwlToRulesConversionHelper.getBottom(consta); - - Rule rule = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(bot)), - Expressions.makeConjunction(Arrays.asList(atR))); - - assertEquals(Collections.singleton(rule), converter.rules); - } - - @Test - public void testSubObjectPropertyOf() { - OWLAxiom axiom = df.getOWLSubObjectPropertyOfAxiom(pR, df.getOWLObjectInverseOf(pS)); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - - Variable secondVariable = Expressions.makeVariable("Y1"); - Atom atR = Expressions.makeAtom(nR, Arrays.asList(converter.frontierVariable, secondVariable)); - Atom atS = Expressions.makeAtom(nS, Arrays.asList(secondVariable, converter.frontierVariable)); - Rule rule = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atS)), - Expressions.makeConjunction(Arrays.asList(atR))); - - assertEquals(Sets.newSet(rule), converter.rules); - } - - @Test - public void testAsymmetricObjectPropertyOf() { - OWLAxiom axiom = df.getOWLAsymmetricObjectPropertyAxiom(pR); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - - Variable secondVariable = Expressions.makeVariable("Y1"); - Atom at1 = Expressions.makeAtom(nR, Arrays.asList(converter.frontierVariable, secondVariable)); - Atom at2 = Expressions.makeAtom(nR, Arrays.asList(secondVariable, converter.frontierVariable)); - Rule rule = Expressions.makeRule( - Expressions.makeConjunction( - Arrays.asList(OwlToRulesConversionHelper.getBottom(converter.frontierVariable))), - Expressions.makeConjunction(Arrays.asList(at1, at2))); - - assertEquals(Sets.newSet(rule), converter.rules); - } - - @Test - public void testSymmetricObjectPropertyOf() { - OWLAxiom axiom = df.getOWLSymmetricObjectPropertyAxiom(pR); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - - Variable secondVariable = Expressions.makeVariable("Y1"); - Atom at1 = Expressions.makeAtom(nR, Arrays.asList(converter.frontierVariable, secondVariable)); - Atom at2 = Expressions.makeAtom(nR, Arrays.asList(secondVariable, converter.frontierVariable)); - Rule rule = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(at2)), - Expressions.makeConjunction(Arrays.asList(at1))); - - assertEquals(Sets.newSet(rule), converter.rules); - } - - @Test - public void testIrreflexiveObjectPropertyOf() { - OWLAxiom axiom = df.getOWLIrreflexiveObjectPropertyAxiom(pR); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - - Atom at1 = Expressions.makeAtom(nR, Arrays.asList(converter.frontierVariable, converter.frontierVariable)); - Rule rule = Expressions.makeRule( - Expressions.makeConjunction( - Arrays.asList(OwlToRulesConversionHelper.getBottom(converter.frontierVariable))), - Expressions.makeConjunction(Arrays.asList(at1))); - - assertEquals(Sets.newSet(rule), converter.rules); - } - - @Test - public void testReflexiveObjectPropertyOf() { - OWLAxiom axiom = df.getOWLReflexiveObjectPropertyAxiom(pR); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - - Atom at1 = Expressions.makeAtom(nR, Arrays.asList(converter.frontierVariable, converter.frontierVariable)); - Rule rule = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(at1)), Expressions - .makeConjunction(Arrays.asList(OwlToRulesConversionHelper.getTop(converter.frontierVariable)))); - - assertEquals(Sets.newSet(rule), converter.rules); - } - - @Test - public void testInverseObjectProperties() { - OWLAxiom axiom = df.getOWLInverseObjectPropertiesAxiom(pR, pS); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - - Variable secondVariable = Expressions.makeVariable("Y1"); - Atom atR = Expressions.makeAtom(nR, Arrays.asList(converter.frontierVariable, secondVariable)); - Atom atS = Expressions.makeAtom(nS, Arrays.asList(secondVariable, converter.frontierVariable)); - Rule rule1 = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atS)), - Expressions.makeConjunction(Arrays.asList(atR))); - Rule rule2 = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atR)), - Expressions.makeConjunction(Arrays.asList(atS))); - - assertEquals(Sets.newSet(rule1, rule2), converter.rules); - } - - @Test - public void testEquivalentObjectProperties() { - OWLAxiom axiom = df.getOWLEquivalentObjectPropertiesAxiom(pR, df.getOWLObjectInverseOf(pS), pT); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - - Variable secondVariable = Expressions.makeVariable("Y1"); - Atom atR = Expressions.makeAtom(nR, Arrays.asList(converter.frontierVariable, secondVariable)); - Atom atS = Expressions.makeAtom(nS, Arrays.asList(secondVariable, converter.frontierVariable)); - Atom atT = Expressions.makeAtom(nT, Arrays.asList(converter.frontierVariable, secondVariable)); - Rule ruleRS = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atS)), - Expressions.makeConjunction(Arrays.asList(atR))); - Rule ruleST = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atT)), - Expressions.makeConjunction(Arrays.asList(atS))); - Rule ruleTR = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atR)), - Expressions.makeConjunction(Arrays.asList(atT))); - Rule ruleRT = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atT)), - Expressions.makeConjunction(Arrays.asList(atR))); - Rule ruleTS = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atS)), - Expressions.makeConjunction(Arrays.asList(atT))); - Rule ruleSR = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atR)), - Expressions.makeConjunction(Arrays.asList(atS))); - - // We have to test against two possible iteration orders, which may occur - // non-deterministically and affect the result: R S T or R T S - // (other orders lead to the same outcome) - assertTrue(converter.rules.equals(Sets.newSet(ruleRS, ruleST, ruleTR)) - || converter.rules.equals(Sets.newSet(ruleRT, ruleTS, ruleSR))); - } - - @Test - public void testSubObjectPropertyChain() { - OWLAxiom axiom = df.getOWLSubPropertyChainOfAxiom(Arrays.asList(pR, df.getOWLObjectInverseOf(pS), pT), pU); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - - Variable var1 = Expressions.makeVariable("Y1"); - Variable var2 = Expressions.makeVariable("Y2"); - Variable var3 = Expressions.makeVariable("Y3"); - Atom atR = Expressions.makeAtom(nR, Arrays.asList(converter.frontierVariable, var1)); - Atom atS = Expressions.makeAtom(nS, Arrays.asList(var2, var1)); - Atom atT = Expressions.makeAtom(nT, Arrays.asList(var2, var3)); - Atom atU = Expressions.makeAtom(nU, Arrays.asList(converter.frontierVariable, var3)); - Rule rule = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atU)), - Expressions.makeConjunction(Arrays.asList(atR, atS, atT))); - - assertEquals(Sets.newSet(rule), converter.rules); - } - - public void testTransitiveProperty() { - OWLAxiom axiom = df.getOWLTransitiveObjectPropertyAxiom(pR); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - - Variable var1 = Expressions.makeVariable("Y1"); - Variable var2 = Expressions.makeVariable("Y2"); - Atom at1 = Expressions.makeAtom(nR, Arrays.asList(converter.frontierVariable, var1)); - Atom at2 = Expressions.makeAtom(nR, Arrays.asList(var1, var2)); - Atom ath = Expressions.makeAtom(nR, Arrays.asList(converter.frontierVariable, var2)); - Rule rule = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(ath)), - Expressions.makeConjunction(Arrays.asList(at1, at2))); - - assertEquals(Sets.newSet(rule), converter.rules); - } - - @Test - public void testEquivalentClasses() { - OWLAxiom axiom = df.getOWLEquivalentClassesAxiom(cA, cB, cC); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - - Atom atA = Expressions.makeAtom(nA, Arrays.asList(converter.frontierVariable)); - Atom atB = Expressions.makeAtom(nB, Arrays.asList(converter.frontierVariable)); - Atom atC = Expressions.makeAtom(nC, Arrays.asList(converter.frontierVariable)); - Rule ruleAB = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atB)), - Expressions.makeConjunction(Arrays.asList(atA))); - Rule ruleBC = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atC)), - Expressions.makeConjunction(Arrays.asList(atB))); - Rule ruleCA = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atA)), - Expressions.makeConjunction(Arrays.asList(atC))); - Rule ruleAC = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atC)), - Expressions.makeConjunction(Arrays.asList(atA))); - Rule ruleCB = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atB)), - Expressions.makeConjunction(Arrays.asList(atC))); - Rule ruleBA = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atA)), - Expressions.makeConjunction(Arrays.asList(atB))); - - // We have to test against two possible iteration orders, which may occur - // non-deterministically and affect the result: A B C or A C B - // (other orders lead to the same outcome) - assertTrue(converter.rules.equals(Sets.newSet(ruleAB, ruleBC, ruleCA)) - || converter.rules.equals(Sets.newSet(ruleAC, ruleCB, ruleBA))); - } - - @Test - public void testObjectPropertyDomain() { - OWLAxiom axiom = df.getOWLObjectPropertyDomainAxiom(pR, cA); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - - Variable secondVariable = Expressions.makeVariable("Y1"); - Atom atR = Expressions.makeAtom(nR, Arrays.asList(converter.frontierVariable, secondVariable)); - Atom atA = Expressions.makeAtom(nA, Arrays.asList(converter.frontierVariable)); - - Rule rule = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atA)), - Expressions.makeConjunction(Arrays.asList(atR))); - - assertEquals(Collections.singleton(rule), converter.rules); - } - - @Test - public void testObjectPropertyRange() { - OWLAxiom axiom = df.getOWLObjectPropertyRangeAxiom(pR, cA); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - - Variable secondVariable = Expressions.makeVariable("Y1"); - Atom atR = Expressions.makeAtom(nR, Arrays.asList(converter.frontierVariable, secondVariable)); - Atom atA = Expressions.makeAtom(nA, Arrays.asList(secondVariable)); - - Rule rule = Expressions.makeRule(Expressions.makeConjunction(Arrays.asList(atA)), - Expressions.makeConjunction(Arrays.asList(atR))); - - assertEquals(Collections.singleton(rule), converter.rules); - } - - @Ignore - public void test() { - OWLObjectPropertyExpression Sinv = df.getOWLObjectInverseOf(pS); - OWLObjectSomeValuesFrom SomeSinvE = df.getOWLObjectSomeValuesFrom(Sinv, cE); - OWLObjectSomeValuesFrom SomeRSomeSinvE = df.getOWLObjectSomeValuesFrom(pR, SomeSinvE); - OWLObjectUnionOf AorB = df.getOWLObjectUnionOf(cA, cB); - OWLObjectIntersectionOf AorBandCandSomeRSomeSinvE = df.getOWLObjectIntersectionOf(AorB, cC, SomeRSomeSinvE); - OWLSubClassOfAxiom axiom = df.getOWLSubClassOfAxiom(AorBandCandSomeRSomeSinvE, cD); - - OwlAxiomToRulesConverter converter = new OwlAxiomToRulesConverter(); - axiom.accept(converter); - - for (Rule rule : converter.rules) { - System.out.println(rule); - } - } - -} diff --git a/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RDFModelToAtomsConverter.java b/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RDFModelToAtomsConverter.java deleted file mode 100644 index 620f5b113..000000000 --- a/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RDFModelToAtomsConverter.java +++ /dev/null @@ -1,109 +0,0 @@ -package org.semanticweb.vlog4j.rdf; - -import static org.semanticweb.vlog4j.rdf.RDFValueToTermConverter.rdfValueToTerm; - -/*- - * #%L - * VLog4j RDF Support - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.util.Set; -import java.util.stream.Collectors; - -import org.openrdf.model.BNode; -import org.openrdf.model.Literal; -import org.openrdf.model.Model; -import org.openrdf.model.Resource; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.Value; -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Blank; -import org.semanticweb.vlog4j.core.model.api.Constant; -import org.semanticweb.vlog4j.core.model.api.Predicate; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; - -/** - * Class for converting RDF {@link Model}s to {@link Atom} sets. Converts each - * {@code } triple statement of the given - * {@code rdfModel} into an {@link Atom} of the form - * {@code TRIPLE(subject, predicate, object)}. The ternary predicate used for - * all atoms generated from RDF triples is - * {@link RDFModelToAtomsConverter#RDF_TRIPLE_PREDICATE}. Subject, predicate and - * object {@link Value}s are converted to corresponding {@link Term}s: - *
    - *
  • {@link URI}s are converted to {@link Constant}s with the escaped URI - * String as name.
  • - *
  • {@link Literal}s are converted to {@link Constant}s with names containing - * the canonical form of the literal label, the data type and the language.
  • - *
  • {@link BNode}s are converted to {@link Blank}s with the generated blank - * ID as name. {@link BNode}s have unique generated IDs in the context a - * {@link Model}s. Blanks with the same name loaded from different models will - * have different ids.
  • - *
- * - * @author Irina Dragoste - * - */ -public final class RDFModelToAtomsConverter { - - /** - * The name of the ternary predicate of atoms generated from RDF triples: - * "TRIPLE". - */ - public static final String RDF_TRIPLE_PREDICATE_NAME = "TRIPLE"; - - /** - * The ternary predicate of atoms generated from RDF triples. It has - * {@code name}({@link Predicate#getName()}) "TRIPLE" and - * {@code arity}({@link Predicate#getArity()}) 3. - */ - public static final Predicate RDF_TRIPLE_PREDICATE = Expressions.makePredicate(RDF_TRIPLE_PREDICATE_NAME, 3); - - private RDFModelToAtomsConverter() { - } - - /** - * Converts each {@code } triple statement of the - * given {@code rdfModel} into an {@link Atom} of the form - * {@code TRIPLE(subject, predicate, object)}. See - * {@link RDFModelToAtomsConverter#RDF_TRIPLE_PREDICATE}, the ternary predicate - * used for all atoms generated from RDF triples. - * - * @param rdfModel - * a {@link Model} of an RDF document, containing triple statements - * that will be converter to facts. - * @return a set of atoms corresponding to the statements of given - * {@code rdfModel}. - */ - public static Set rdfModelToAtoms(Model rdfModel) { - return rdfModel.stream().map(RDFModelToAtomsConverter::rdfStatementToAtom).collect(Collectors.toSet()); - } - - static Atom rdfStatementToAtom(final Statement statement) { - final Resource subject = statement.getSubject(); - - final URI predicate = statement.getPredicate(); - - final Value object = statement.getObject(); - - return Expressions.makeAtom(RDF_TRIPLE_PREDICATE, rdfValueToTerm(subject), rdfValueToTerm(predicate), - rdfValueToTerm(object)); - } - -} diff --git a/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RDFValueToTermConverter.java b/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RDFValueToTermConverter.java deleted file mode 100644 index 1e2dd053e..000000000 --- a/vlog4j-rdf/src/main/java/org/semanticweb/vlog4j/rdf/RDFValueToTermConverter.java +++ /dev/null @@ -1,97 +0,0 @@ -package org.semanticweb.vlog4j.rdf; - -/*- - * #%L - * VLog4j RDF Support - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.openrdf.model.BNode; -import org.openrdf.model.Literal; -import org.openrdf.model.URI; -import org.openrdf.model.Value; -import org.openrdf.model.datatypes.XMLDatatypeUtil; -import org.openrdf.rio.ntriples.NTriplesUtil; -import org.semanticweb.vlog4j.core.model.api.Term; -import org.semanticweb.vlog4j.core.model.implementation.BlankImpl; -import org.semanticweb.vlog4j.core.model.implementation.ConstantImpl; - -final class RDFValueToTermConverter { - - private RDFValueToTermConverter() { - } - - static Term rdfValueToTerm(Value value) { - if (value instanceof BNode) { - return rdfBlankNodeToBlank((BNode) value); - } else if (value instanceof Literal) { - return rdfLiteralToConstant((Literal) value); - } else if (value instanceof URI) { - return rdfURItoConstant((URI) value); - } else - throw new RuntimeException("Unown Value type: " + value.getClass()); - } - - static Term rdfBlankNodeToBlank(BNode bNode) { - // IDs are generated to be unique in every Model. - return new BlankImpl(bNode.getID()); - } - - static Term rdfURItoConstant(URI uri) { - final String escapedURIString = NTriplesUtil.escapeString(uri.toString()); - return new ConstantImpl(escapedURIString); - } - - static Term rdfLiteralToConstant(Literal literal) { - final String normalizedStringValueLiteral = buildNormalizedStringValue(literal); - return new ConstantImpl(normalizedStringValueLiteral); - } - - /** - * Serializes the given {@code literal} to the the NTriples format for - * {@link Literal}s, using a canonical representation. - * - * @param literal - * @return a unique string representation of given {@code literal} in canonical - * form. - */ - static String buildNormalizedStringValue(Literal literal) { - final URI datatype = literal.getDatatype(); - - final StringBuilder sb = new StringBuilder(); - // Do some character escaping on the label: - sb.append("\""); - final String normalizedLabel = (datatype != null) ? XMLDatatypeUtil.normalize(literal.getLabel(), datatype) - : literal.getLabel(); - sb.append(NTriplesUtil.escapeString(normalizedLabel)); - sb.append("\""); - - if (literal.getLanguage() != null) { - // Append the literal's language - sb.append("@"); - sb.append(literal.getLanguage()); - } else { - if (datatype != null) { - // Append the literal's datatype - sb.append("^^"); - sb.append(NTriplesUtil.toNTriplesString(datatype)); - } - } - return sb.toString(); - } - -} diff --git a/vlog4j-rdf/src/test/data/blanks_context1.ttl b/vlog4j-rdf/src/test/data/blanks_context1.ttl deleted file mode 100644 index e97ea71bf..000000000 --- a/vlog4j-rdf/src/test/data/blanks_context1.ttl +++ /dev/null @@ -1,5 +0,0 @@ -_:genid1 . -_:genid1 _:genid2 . -_:genid2 _:genid1 . -_:genid1 _:genid1 . -_:genid2 . diff --git a/vlog4j-rdf/src/test/data/blanks_context2.ttl b/vlog4j-rdf/src/test/data/blanks_context2.ttl deleted file mode 100644 index e97ea71bf..000000000 --- a/vlog4j-rdf/src/test/data/blanks_context2.ttl +++ /dev/null @@ -1,5 +0,0 @@ -_:genid1 . -_:genid1 _:genid2 . -_:genid2 _:genid1 . -_:genid1 _:genid1 . -_:genid2 . diff --git a/vlog4j-rdf/src/test/data/exampleFacts.ttl b/vlog4j-rdf/src/test/data/exampleFacts.ttl deleted file mode 100644 index 184bbd914..000000000 --- a/vlog4j-rdf/src/test/data/exampleFacts.ttl +++ /dev/null @@ -1,19 +0,0 @@ - "student" . -_:genid15 . - . -_:genid17 _:genid16 . -_:genid15 _:genid17 . -_:genid16 . -_:genid16 . -_:genid16 . -_:genid17 . - _:genid15 . - "systems staff worker" . - . -_:genid18 . - "is age" . - "-005"^^xsd:integer . - "-05.0"^^xsd:decimal . - "04.2E9"^^xsd:double . - false . - true . \ No newline at end of file diff --git a/vlog4j-rdf/src/test/data/exampleFactsNoBlanks.ttl b/vlog4j-rdf/src/test/data/exampleFactsNoBlanks.ttl deleted file mode 100644 index 216ce7a1e..000000000 --- a/vlog4j-rdf/src/test/data/exampleFactsNoBlanks.ttl +++ /dev/null @@ -1,10 +0,0 @@ - "student" . - . - "systems staff worker" . - . - "is age" . - "-005"^^xsd:integer . - "-05.0"^^xsd:decimal . - "04.2E9"^^xsd:double . - false . - true . \ No newline at end of file diff --git a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/BlankIdsInDifferentModelsTest.java b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/BlankIdsInDifferentModelsTest.java deleted file mode 100644 index f1ace8293..000000000 --- a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/BlankIdsInDifferentModelsTest.java +++ /dev/null @@ -1,95 +0,0 @@ -package org.semanticweb.vlog4j.rdf; - -/*- - * #%L - * VLog4j RDF Support - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import java.io.File; -import java.io.IOException; -import java.util.HashSet; -import java.util.Set; - -import org.junit.Test; -import org.openrdf.model.BNode; -import org.openrdf.model.Model; -import org.openrdf.model.Resource; -import org.openrdf.model.Value; -import org.openrdf.rio.RDFFormat; -import org.openrdf.rio.RDFHandlerException; -import org.openrdf.rio.RDFParseException; - -public class BlankIdsInDifferentModelsTest { - - @Test - public void testBlanksHaveDifferentIdsInDifferentModelContexts() - throws RDFParseException, RDFHandlerException, IOException { - - final String blanksTurtleFile1 = TestingUtils.TURTLE_TEST_FILES_PATH + "blanks_context1.ttl"; - final Model model1 = TestingUtils.parseFile(new File(blanksTurtleFile1), RDFFormat.TURTLE); - final Set blankNodeIdsForModel1File1 = collectBlankNodeIds(model1); - assertEquals(2, blankNodeIdsForModel1File1.size()); - - final Model model2 = TestingUtils.parseFile(new File(blanksTurtleFile1), RDFFormat.TURTLE); - final Set blankNodeIdsForModel2File1 = collectBlankNodeIds(model2); - assertEquals(2, blankNodeIdsForModel2File1.size()); - - // assert that there is no common Blank in two different models (even if they - // have been - // loaded from the same file) - final Set intersectionModel1Model2 = new HashSet<>(blankNodeIdsForModel1File1); - intersectionModel1Model2.retainAll(blankNodeIdsForModel2File1); - assertTrue(intersectionModel1Model2.isEmpty()); - - final String blanksTurtleFile2SameContentAsFile1 = TestingUtils.TURTLE_TEST_FILES_PATH + "blanks_context2.ttl"; - final Model model3 = TestingUtils.parseFile(new File(blanksTurtleFile2SameContentAsFile1), RDFFormat.TURTLE); - final Set blankNodeIdsForModel3File2 = collectBlankNodeIds(model3); - assertEquals(2, blankNodeIdsForModel3File2.size()); - - // assert that there is no common Blank in two different models, even if the - // files contain the same blank names - final Set intersectionModel1Model3 = new HashSet<>(blankNodeIdsForModel1File1); - intersectionModel1Model3.retainAll(blankNodeIdsForModel3File2); - assertTrue(intersectionModel1Model3.isEmpty()); - - // assert that there is no common Blank in two different models, even if the - // files contain the same blank names - final Set intersectionModel2Model3 = new HashSet<>(blankNodeIdsForModel2File1); - intersectionModel2Model3.retainAll(blankNodeIdsForModel3File2); - assertTrue(intersectionModel2Model3.isEmpty()); - } - - private Set collectBlankNodeIds(Model model) { - final HashSet blankNodeIds = new HashSet<>(); - model.forEach(statement -> { - final Resource subject = statement.getSubject(); - if (subject instanceof BNode) { - blankNodeIds.add(((BNode) subject).getID()); - } - final Value object = statement.getObject(); - if (object instanceof BNode) { - blankNodeIds.add(((BNode) object).getID()); - } - }); - return blankNodeIds; - } - -} diff --git a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestLoadFactsFromRDFToVLogReasoner.java b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestLoadFactsFromRDFToVLogReasoner.java deleted file mode 100644 index 52ba6a4ab..000000000 --- a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestLoadFactsFromRDFToVLogReasoner.java +++ /dev/null @@ -1,65 +0,0 @@ -package org.semanticweb.vlog4j.rdf; - -import static org.junit.Assert.assertTrue; - -/*- - * #%L - * VLog4j RDF Support - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.File; -import java.io.IOException; -import java.util.Set; - -import org.junit.Test; -import org.openrdf.model.Model; -import org.openrdf.rio.RDFFormat; -import org.openrdf.rio.RDFHandlerException; -import org.openrdf.rio.RDFParseException; -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.implementation.Expressions; -import org.semanticweb.vlog4j.core.reasoner.Reasoner; -import org.semanticweb.vlog4j.core.reasoner.exceptions.EdbIdbSeparationException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.IncompatiblePredicateArityException; -import org.semanticweb.vlog4j.core.reasoner.exceptions.ReasonerStateException; -import org.semanticweb.vlog4j.core.reasoner.implementation.QueryResultIterator; - -public class TestLoadFactsFromRDFToVLogReasoner { - - // TODO add rules, reason - // TODO add data of each type - - @Test - public void testLoadFactsFromRDF() throws RDFParseException, RDFHandlerException, IOException, - ReasonerStateException, EdbIdbSeparationException, IncompatiblePredicateArityException { - final Model model = TestingUtils.parseFile( - new File(TestingUtils.TURTLE_TEST_FILES_PATH + "exampleFactsNoBlanks.ttl"), RDFFormat.TURTLE); - final Set facts = RDFModelToAtomsConverter.rdfModelToAtoms(model); - try (final Reasoner reasoner = Reasoner.getInstance()) { - reasoner.addFacts(facts); - reasoner.load(); - final QueryResultIterator answerQuery = reasoner.answerQuery( - Expressions.makeAtom(RDFModelToAtomsConverter.RDF_TRIPLE_PREDICATE, Expressions.makeVariable("x"), - Expressions.makeVariable("y"), Expressions.makeVariable("z")), - true); - assertTrue(answerQuery.hasNext()); - answerQuery.close(); - } - - } -} diff --git a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestTurtleToFacts.java b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestTurtleToFacts.java deleted file mode 100644 index c23c10df8..000000000 --- a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestTurtleToFacts.java +++ /dev/null @@ -1,90 +0,0 @@ -package org.semanticweb.vlog4j.rdf; - -/*- - * #%L - * VLog4j RDF Support - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import java.io.File; -import java.io.IOException; -import java.util.Collection; -import java.util.HashSet; -import java.util.Set; - -import org.junit.Test; -import org.openrdf.model.Model; -import org.openrdf.rio.RDFFormat; -import org.openrdf.rio.RDFHandlerException; -import org.openrdf.rio.RDFParseException; -import org.semanticweb.vlog4j.core.model.api.Atom; -import org.semanticweb.vlog4j.core.model.api.Blank; - -public class TestTurtleToFacts { - - @Test - public void testRDFFileToAtomsConverter() throws RDFParseException, RDFHandlerException, IOException { - - final Model model = TestingUtils.parseFile(new File(TestingUtils.TURTLE_TEST_FILES_PATH + "exampleFacts.ttl"), - RDFFormat.TURTLE); - final Set facts = RDFModelToAtomsConverter.rdfModelToAtoms(model); - - System.out.println(facts); - // TODO asserts: url long name for constants and literal datatypes - // TODO asserts: normalized literal label - // TODO asserts: escaped " characters in literal - // FIXME test builtin datatypes? - // TODO test literal of all literal datatypes - // TODO test with/without language - // TODO test if reasoning is possible with this predicate / fact names - } - - @Test - public void testBlanksWithSameRDFNameAreDifferentInDifferentModelContexts() - throws RDFParseException, RDFHandlerException, IOException { - final String blanksTurtleFile1 = TestingUtils.TURTLE_TEST_FILES_PATH + "blanks_context1.ttl"; - - final Model model1File1 = TestingUtils.parseFile(new File(blanksTurtleFile1), RDFFormat.TURTLE); - final Set atomsFromModel1 = RDFModelToAtomsConverter.rdfModelToAtoms(model1File1); - final Set blanksFromModel1 = extractBlanks(atomsFromModel1); - assertEquals(2, blanksFromModel1.size()); - - final Model model2File1 = TestingUtils.parseFile(new File(blanksTurtleFile1), RDFFormat.TURTLE); - final Set atomsFromModel2 = RDFModelToAtomsConverter.rdfModelToAtoms(model2File1); - final Set blanksFromModel2 = extractBlanks(atomsFromModel2); - assertEquals(2, blanksFromModel2.size()); - - // assert that there is no common Blank in two different models (even if they - // have been - // loaded from the same file) - final Set intersection = new HashSet<>(blanksFromModel1); - intersection.retainAll(blanksFromModel2); - assertTrue(intersection.isEmpty()); - - } - - private Set extractBlanks(Collection atoms) { - final Set blanks = new HashSet<>(); - atoms.forEach(atom -> blanks.addAll(atom.getBlanks())); - return blanks; - - } - -} diff --git a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestingUtils.java b/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestingUtils.java deleted file mode 100644 index 479ac75f7..000000000 --- a/vlog4j-rdf/src/test/java/org/semanticweb/vlog4j/rdf/TestingUtils.java +++ /dev/null @@ -1,58 +0,0 @@ -package org.semanticweb.vlog4j.rdf; - -/*- - * #%L - * VLog4j RDF Support - * %% - * Copyright (C) 2018 VLog4j Developers - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.net.URI; - -import org.openrdf.model.Model; -import org.openrdf.model.impl.LinkedHashModel; -import org.openrdf.rio.RDFFormat; -import org.openrdf.rio.RDFHandlerException; -import org.openrdf.rio.RDFParseException; -import org.openrdf.rio.RDFParser; -import org.openrdf.rio.Rio; -import org.openrdf.rio.helpers.StatementCollector; - -final class TestingUtils { - - private TestingUtils() { - } - - static final String TURTLE_TEST_FILES_PATH = "src/test/data/"; - - static Model parseFile(File file, RDFFormat rdfFormat) throws RDFParseException, RDFHandlerException, IOException { - - final URI baseURI = file.toURI(); - final InputStream inputStream = new FileInputStream(file); - final RDFParser rdfParser = Rio.createParser(rdfFormat); - - final Model model = new LinkedHashModel(); - rdfParser.setRDFHandler(new StatementCollector(model)); - rdfParser.parse(inputStream, baseURI.toString()); - - return model; - } - -}

+ * Exception handling is omitted for simplicity. + * + * @author Christian Lewe + * @author Markus Kroetzsch + * + */ +public class AddDataFromRdfFile { + + public static void main(final String[] args) throws IOException, ParsingException { + ExamplesUtils.configureLogging(); + + /* 1. Prepare rules and create some related vocabulary objects used later. */ + + final String rules = "" // first define some namespaces and abbreviations: + + "@prefix ex: ." + + "@prefix rdf: ." + // specify data sources: + + "@source triple[3] : load-rdf(\"" + ExamplesUtils.INPUT_FOLDER + "ternaryBicycleEDB.nt.gz\") ." + // every bicycle has some part that is a wheel: + + "triple(?S, ex:hasPart, !X), triple(!X, rdf:type, ex:wheel) :- triple(?S, rdf:type, ex:bicycle) ." + // every wheel is part of some bicycle: + + "triple(?S, ex:isPartOf, !X) :- triple(?S, rdf:type, ex:wheel) ." + // hasPart and isPartOf are mutually inverse relations: + + "triple(?S, ex:isPartOf, ?O) :- triple(?O, ex:hasPart, ?S) ." + + "triple(?S, ex:hasPart, ?O) :- triple(?O, ex:isPartOf, ?S) ."; + + final KnowledgeBase kb = RuleParser.parse(rules); + + /* + * 2. reasoning, querying and exporting, while using try-with-resources to close + * the reasoner automatically. + */ + + try (final Reasoner reasoner = new VLogReasoner(kb)) { + /* The reasoner will use the Restricted Chase by default. */ + reasoner.reason(); + System.out.println("After materialisation:"); + final PositiveLiteral hasPartIDB = RuleParser + .parsePositiveLiteral("triple(?X, , ?Y)"); + ExamplesUtils.printOutQueryAnswers(hasPartIDB, reasoner); + + /* Exporting query answers to {@code .csv} files. */ + reasoner.exportQueryAnswersToCsv(hasPartIDB, ExamplesUtils.OUTPUT_FOLDER + "ternaryHasPartWithBlanks.csv", + true); + reasoner.exportQueryAnswersToCsv(hasPartIDB, + ExamplesUtils.OUTPUT_FOLDER + "ternaryHasPartWithoutBlanks.csv", false); + + final PositiveLiteral existsHasPartRedBike = RuleParser + .parsePositiveLiteral("triple(, , ?X)"); + reasoner.exportQueryAnswersToCsv(existsHasPartRedBike, + ExamplesUtils.OUTPUT_FOLDER + "existsHasPartRedBikeWithBlanks.csv", true); + } + } + +} diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java new file mode 100644 index 000000000..b32c784ea --- /dev/null +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/AddDataFromSparqlQueryResults.java @@ -0,0 +1,208 @@ +package org.semanticweb.rulewerk.examples.core; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; +import java.net.URL; +import java.util.Arrays; +import java.util.LinkedHashSet; +import java.util.List; + +import org.semanticweb.rulewerk.core.model.api.Conjunction; +import org.semanticweb.rulewerk.core.model.api.DataSource; +import org.semanticweb.rulewerk.core.model.api.PositiveLiteral; +import org.semanticweb.rulewerk.core.model.api.Predicate; +import org.semanticweb.rulewerk.core.model.api.Rule; +import org.semanticweb.rulewerk.core.model.api.Term; +import org.semanticweb.rulewerk.core.model.api.Variable; +import org.semanticweb.rulewerk.core.model.implementation.DataSourceDeclarationImpl; +import org.semanticweb.rulewerk.core.model.implementation.Expressions; +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.QueryResultIterator; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.core.reasoner.implementation.SparqlQueryResultDataSource; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; +import org.semanticweb.rulewerk.examples.ExamplesUtils; + +/** + * This is a simple example of adding data from the result of a SPARQL query on + * a remote database endpoint, using {@link SparqlQueryResultDataSource}. In + * this example, we will query Wikidata for titles of publications that have + * authors who have children together. + * + * @author Irina Dragoste + * + */ +public class AddDataFromSparqlQueryResults { + + /** + * WikiData author + * property id. + */ + private static final String WIKIDATA_AUTHOR_PROPERTY = "wdt:P50"; + /** + * WikiData title + * property id. Published title of a work, such as a newspaper article, a + * literary work, a website, or a performance work + */ + private static final String WIKIDATA_TITLE_PROPERTY = "wdt:P1476"; + /** + * WikiData mother + * property id. + */ + private static final String WIKIDATA_MOTHER_PROPERTY = "wdt:P25"; + /** + * WikiData father + * property id. + */ + private static final String WIKIDATA_FATHER_PROPERTY = "wdt:P22"; + + public static void main(final String[] args) throws IOException { + + ExamplesUtils.configureLogging(); + + /* + * The WikiData SPARQL query endpoint. + */ + final URL wikidataSparqlEndpoint = new URL("https://query.wikidata.org/sparql"); + + /* + * SPARQL query body that looks for publications where two authors of the + * publication are the mother, respectively father of the same child. + */ + final String queryBody = " ?publication " + WIKIDATA_TITLE_PROPERTY + " ?title ." + "?publication " + + WIKIDATA_AUTHOR_PROPERTY + " ?mother ." + " ?publication " + WIKIDATA_AUTHOR_PROPERTY + " ?father ." + + " ?child " + WIKIDATA_MOTHER_PROPERTY + " ?mother ." + " ?child " + WIKIDATA_FATHER_PROPERTY + + " ?father ."; + + final Variable titleVariable = Expressions.makeUniversalVariable("title"); + final Variable motherVariable = Expressions.makeUniversalVariable("mother"); + final Variable fatherVariable = Expressions.makeUniversalVariable("father"); + + /* + * The query variables are the variables from the query body which will appear + * in the query result, in the given order. Fact resulting from this query will + * have as terms the title of the publication, the mother publication author and + * the father publication author. + */ + final LinkedHashSet queryVariables = new LinkedHashSet<>( + Arrays.asList(titleVariable, motherVariable, fatherVariable)); + + /* + * We query Wikidata with the SPARQL query composed of the query variables and + * query body. The query result is a DataSource we will associate to a + * predicate. + */ + final DataSource sparqlQueryResultDataSource = new SparqlQueryResultDataSource(wikidataSparqlEndpoint, + queryVariables, queryBody); + + /* + * Predicate that will be mapped to the SPARQL query result. It must have the + * same arity as the query variables size. In this case, we have 3 query + * variables (title, mother and father). + */ + final Predicate queryPredicate = Expressions.makePredicate("publicationParents", 3); + + try (Reasoner reasoner = new VLogReasoner(new KnowledgeBase())) { + + final KnowledgeBase kb = reasoner.getKnowledgeBase(); + /* + * The SPARQL query results will be added to the reasoner knowledge base, as + * facts associated to the predicate publicationParents. + */ + + kb.addStatement(new DataSourceDeclarationImpl(queryPredicate, sparqlQueryResultDataSource)); + reasoner.reason(); + + /* + * We construct a query PositiveLiteral for the predicated associated to the + * SPARQL query result. + */ + final PositiveLiteral query = Expressions.makePositiveLiteral(queryPredicate, Expressions.makeUniversalVariable("x"), + Expressions.makeUniversalVariable("y"), Expressions.makeUniversalVariable("z")); + + /* We query the reasoner for facts of the SPARQL query result predicate. */ + System.out.println("Titles of publications by co-authors who have a child together:"); + try (QueryResultIterator queryResultIterator = reasoner.answerQuery(query, false)) { + queryResultIterator.forEachRemaining(queryResult -> { + final List queryResultTerms = queryResult.getTerms(); + + System.out.println("- title: " + queryResultTerms.get(0) + ", mother author: " + + queryResultTerms.get(1) + ", father author: " + queryResultTerms.get(2)); + }); + } + + /* + * To do some basic reasoning, we would now like to add the following rule that + * extracts (unique) mothers, fathers, and pairs from the queried data: + * haveChildrenTogether(?y, ?z), isMother(?y), isFather(?z) :- + * publicationParents(?x, ?y, ?z) . + */ + final PositiveLiteral haveChildrenTogether = Expressions.makePositiveLiteral("haveChildrenTogether", + Expressions.makeUniversalVariable("y"), Expressions.makeUniversalVariable("z")); + final PositiveLiteral isMother = Expressions.makePositiveLiteral("isMother", Expressions.makeUniversalVariable("y")); + final PositiveLiteral isFather = Expressions.makePositiveLiteral("isFather", Expressions.makeUniversalVariable("z")); + final Conjunction ruleHeadConjunction = Expressions + .makePositiveConjunction(haveChildrenTogether, isMother, isFather); + final Rule rule = Expressions.makeRule(ruleHeadConjunction, Expressions.makeConjunction(query)); + + /* + * We add the created rule, and reason on the data added from the Wikidata + * SPARQL query result. + */ + kb.addStatement(rule); + reasoner.reason(); + + /* We query the reasoner for facts of the haveChildrenTogether predicate. */ + System.out.println("Co-authors who have a child:"); + try (QueryResultIterator queryResultIterator = reasoner.answerQuery(haveChildrenTogether, false)) { + queryResultIterator.forEachRemaining(queryResult -> { + final List queryResultTerms = queryResult.getTerms(); + + System.out + .println("- author1: " + queryResultTerms.get(0) + ", author2: " + queryResultTerms.get(1)); + }); + } + + /* We query the reasoner for facts of the isMother predicate. */ + System.out.println("Mothers:"); + try (QueryResultIterator queryResultIterator = reasoner.answerQuery(isMother, false)) { + queryResultIterator.forEachRemaining(queryResult -> { + final List queryResultTerms = queryResult.getTerms(); + + System.out.println("- mother: " + queryResultTerms.get(0)); + }); + } + + /* We query the reasoner for facts of the isFather predicate. */ + System.out.println("Fathers:"); + try (QueryResultIterator queryResultIterator = reasoner.answerQuery(isFather, false)) { + queryResultIterator.forEachRemaining(queryResult -> { + final List queryResultTerms = queryResult.getTerms(); + + System.out.println("- father: " + queryResultTerms.get(0)); + }); + } + + } + } + +} diff --git a/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java new file mode 100644 index 000000000..6eeb04d60 --- /dev/null +++ b/rulewerk-examples/src/main/java/org/semanticweb/rulewerk/examples/core/ConfigureReasonerLogging.java @@ -0,0 +1,120 @@ +package org.semanticweb.rulewerk.examples.core; + +/*- + * #%L + * Rulewerk Examples + * %% + * Copyright (C) 2018 - 2020 Rulewerk Developers + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import java.io.IOException; + +import org.semanticweb.rulewerk.core.reasoner.KnowledgeBase; +import org.semanticweb.rulewerk.core.reasoner.LogLevel; +import org.semanticweb.rulewerk.core.reasoner.Reasoner; +import org.semanticweb.rulewerk.reasoner.vlog.VLogReasoner; +import org.semanticweb.rulewerk.parser.ParsingException; +import org.semanticweb.rulewerk.parser.RuleParser; + +/** + * This class exemplifies setting a log file and log level for VLog reasoner + * logging information (like materialisation duration, number of iterations, + * number of derivations). + *

    - *
  • If the reasoner is loaded (see {@link #load()}), but has not - * reasoned yet, the query will be evaluated on the explicit set of facts.
  • - *
  • Otherwise, if this method is called after reasoning (see - * {@link #reason()}, the query will be evaluated on the explicit and implicit - * facts inferred trough reasoning.
  • - *