From 9eaed8ed22ed2b999c490e39bd527fe3c28d4166 Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Mon, 2 Aug 2021 14:03:07 +0200 Subject: [PATCH 01/32] Update docusaurus + minor refactoring. --- build.sbt | 67 +- .../rdfshape/server/wikibase/Wikibase.scala | 44 +- .../rdfshape/logging/LoggingManager.scala | 28 +- website/docusaurus.config.js | 28 +- website/package.json | 6 +- website/src/components/HomepageFeatures.js | 111 +- website/yarn.lock | 982 +++++++++--------- 7 files changed, 660 insertions(+), 606 deletions(-) diff --git a/build.sbt b/build.sbt index 497f1d7d..8308a9aa 100644 --- a/build.sbt +++ b/build.sbt @@ -21,19 +21,9 @@ Global / excludeLintKeys ++= Set( /* ------------------------------------------------------------------------- */ /* GITHUB INTEGRATION settings */ - -// "sbt-github-actions" plugin settings -val JavaCIVersion = "adopt@1.11" -val ScalaCIVersion = "2.13.6" -ThisBuild / githubWorkflowJavaVersions := Seq(JavaCIVersion) -ThisBuild / githubWorkflowScalaVersions := Seq(ScalaCIVersion) - -/* ------------------------------------------------------------------------- */ - /* GROUPED SETTINGS */ // Shared dependencies for all modules. lazy val sharedDependencies = Seq() - // Shared packaging settings for all modules. lazy val packagingSettings = Seq( Compile / mainClass := Some("es.weso.rdfshape.Main"), @@ -43,7 +33,10 @@ lazy val packagingSettings = Seq( // Output filename on "sbt-native-packager" tasks Universal / packageName := (Global / packageName).value ) +ThisBuild / githubWorkflowJavaVersions := Seq(JavaCIVersion) +ThisBuild / githubWorkflowScalaVersions := Seq(ScalaCIVersion) +/* ------------------------------------------------------------------------- */ // Shared compilation settings for all modules. // https://docs.scala-lang.org/overviews/compiler-options/index.html lazy val compilationSettings = Seq( @@ -56,7 +49,6 @@ lazy val compilationSettings = Seq( "-Yrangepos" ) ) - // Scaladoc settings for docs generation. Run task "doc" or "server / doc". // https://www.scala-sbt.org/1.x/docs/Howto-Scaladoc.html /* https://github.com/scala/scala/blob/2.13.x/src/scaladoc/scala/tools/nsc/doc/Settings.scala */ @@ -90,7 +82,6 @@ lazy val scaladocSettings: Seq[Def.Setting[_]] = Seq( // Need to generate docs to publish to oss Compile / packageDoc / publishArtifact := true ) - // Setup Mdoc + Docusaurus settings lazy val mdocSettings = Seq( mdocVariables := Map( @@ -105,6 +96,7 @@ lazy val mdocSettings = Seq( "CLIENT_URL" -> "https://rdfshape.weso.es/", "WESOLOCAL_URL" -> "https://github.com/weso/wesolocal/wiki/RDFShape" ), + mdocExtraArguments := Seq("--no-link-hygiene"), /* When creating/publishing the docusaurus site, update the dynamic mdoc and * the static scaladoc first */ docusaurusCreateSite := docusaurusCreateSite @@ -115,7 +107,6 @@ lazy val mdocSettings = Seq( .dependsOn(Compile / unidoc) .value ) - // Unidoc settings, mirroring scaladoc settings lazy val unidocSettings: Seq[Def.Setting[_]] = Seq( // Generate docs for the root project and the server module @@ -150,7 +141,6 @@ lazy val unidocSettings: Seq[Def.Setting[_]] = Seq( "-private" ) ) - // Shared publish settings for all modules. lazy val publishSettings = Seq( organization := "es.weso", @@ -187,7 +177,6 @@ lazy val publishSettings = Seq( , publishMavenStyle := true // generate POM, not ivy ) - // Aggregate resolver settings passed down to modules to resolve dependencies // Helper to resolve dependencies from GitHub packages lazy val resolverSettings = Seq( @@ -196,7 +185,6 @@ lazy val resolverSettings = Seq( Resolver.sonatypeRepo("snapshots") ) ) - // Shared settings for the BuildInfo Plugin // See https://github.com/sbt/sbt-buildinfo lazy val buildInfoSettings = Seq( @@ -211,11 +199,7 @@ lazy val buildInfoSettings = Seq( buildInfoPackage := "buildinfo", buildInfoObject := "BuildInfo" ) - lazy val noPublishSettings = publish / skip := true - -/* ------------------------------------------------------------------------- */ - /* PROJECT and MODULE settings */ // Root project: rdfshape lazy val rdfshape = project @@ -250,7 +234,6 @@ lazy val rdfshape = project groovy ) ) - // Server project in /modules: server lazy val server = project .in(file("modules/server")) @@ -289,6 +272,7 @@ lazy val server = project ) ) +/* ------------------------------------------------------------------------- */ // Documentation project, for MDoc + Docusaurus documentation lazy val docs = project .in(file("rdfshape-docs")) @@ -303,14 +287,12 @@ lazy val docs = project name := s"${(Global / packageName).value}-api-docs", moduleName := s"${(Global / packageName).value}-api-docs" ) - lazy val MUnitFramework = new TestFramework("munit.Framework") +/* DEPENDENCY versions */ +lazy val http4sVersion = "1.0.0-M21" +lazy val catsVersion = "2.5.0" /* ------------------------------------------------------------------------- */ - -/* DEPENDENCY versions */ -lazy val http4sVersion = "1.0.0-M21" -lazy val catsVersion = "2.5.0" lazy val mongodbVersion = "4.1.1" lazy val any23Version = "2.2" lazy val rdf4jVersion = "2.2.4" @@ -327,7 +309,6 @@ lazy val scalatagsVersion = "0.7.0" lazy val shaclexVersion = "0.1.91" lazy val umlShaclexVersion = "0.0.82" lazy val wesoUtilsVersion = "0.1.98" - // Dependency modules lazy val http4sDsl = "org.http4s" %% "http4s-dsl" % http4sVersion lazy val http4sBlazeServer = @@ -336,34 +317,30 @@ lazy val http4sBlazeClient = "org.http4s" %% "http4s-blaze-client" % http4sVersion lazy val http4sEmberClient = "org.http4s" %% "http4s-ember-client" % http4sVersion -lazy val http4sCirce = "org.http4s" %% "http4s-circe" % http4sVersion - -lazy val catsCore = "org.typelevel" %% "cats-core" % catsVersion -lazy val catsKernel = "org.typelevel" %% "cats-kernel" % catsVersion - -lazy val mongodb = "org.mongodb.scala" %% "mongo-scala-driver" % mongodbVersion - -lazy val any23_core = "org.apache.any23" % "apache-any23-core" % any23Version -lazy val any23_api = "org.apache.any23" % "apache-any23-api" % any23Version +lazy val http4sCirce = "org.http4s" %% "http4s-circe" % http4sVersion +lazy val catsCore = "org.typelevel" %% "cats-core" % catsVersion +lazy val catsKernel = "org.typelevel" %% "cats-kernel" % catsVersion +lazy val mongodb = "org.mongodb.scala" %% "mongo-scala-driver" % mongodbVersion +lazy val any23_core = "org.apache.any23" % "apache-any23-core" % any23Version +lazy val any23_api = "org.apache.any23" % "apache-any23-api" % any23Version lazy val any23_scraper = "org.apache.any23.plugins" % "apache-any23-html-scraper" % "2.2" - -lazy val rdf4j_runtime = "org.eclipse.rdf4j" % "rdf4j-runtime" % rdf4jVersion -lazy val graphvizJava = "guru.nidi" % "graphviz-java" % graphvizJavaVersion -lazy val plantuml = "net.sourceforge.plantuml" % "plantuml" % plantumlVersion - -lazy val logbackClassic = "ch.qos.logback" % "logback-classic" % logbackVersion +lazy val rdf4j_runtime = "org.eclipse.rdf4j" % "rdf4j-runtime" % rdf4jVersion +lazy val graphvizJava = "guru.nidi" % "graphviz-java" % graphvizJavaVersion +lazy val plantuml = "net.sourceforge.plantuml" % "plantuml" % plantumlVersion +lazy val logbackClassic = "ch.qos.logback" % "logback-classic" % logbackVersion lazy val scalaLogging = "com.typesafe.scala-logging" %% "scala-logging" % loggingVersion lazy val groovy = "org.codehaus.groovy" % "groovy" % groovyVersion - -lazy val munit = "org.scalameta" %% "munit" % munitVersion +lazy val munit = "org.scalameta" %% "munit" % munitVersion lazy val munitEffect = "org.typelevel" %% "munit-cats-effect-3" % munitEffectVersion - lazy val scalaj = "org.scalaj" %% "scalaj-http" % scalajVersion lazy val scalatags = "com.lihaoyi" %% "scalatags" % scalatagsVersion // WESO dependencies lazy val shaclex = "es.weso" %% "shexs" % shaclexVersion lazy val umlShaclex = "es.weso" %% "umlshaclex" % umlShaclexVersion lazy val wesoUtils = "es.weso" %% "utilstest" % wesoUtilsVersion +// "sbt-github-actions" plugin settings +val JavaCIVersion = "adopt@1.11" +val ScalaCIVersion = "2.13.6" diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/wikibase/Wikibase.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/wikibase/Wikibase.scala index 7f8055e6..70f65e2e 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/wikibase/Wikibase.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/wikibase/Wikibase.scala @@ -1,24 +1,44 @@ package es.weso.rdfshape.server.wikibase + import es.weso.rdf.nodes._ import org.http4s._ import org.http4s.implicits._ -abstract class Wikibase { - def name: String - def endpointUrl: IRI - def schemaEntityUri(str: String): Uri +/** Abstract representation of a wikibase instance + * + * @param name Given name of the wikibase instance + * @param baseUrl Base URL where the instance is deployed (e.g. [[https://www.wikidata.org/]]) + * @param endpointUrl API endpoint of the wikibase instance, where queries are usually made + */ +abstract sealed class Wikibase( + val name: String, + val baseUrl: Uri, + val endpointUrl: IRI +) { + + /** Given a schema identifier, return it's location inside the wikibase instance + * + * @param schema String representation of the schema identifier + * @return Uri where the schema can be accessed + */ + def schemaEntityUri(schema: String): Uri } -case object Wikidata extends Wikibase { - def name = "wikidata" - def url = uri"https://www.wikidata.org" - def endpointUrl: IRI = IRI("https://query.wikidata.org/sparql") +/** A sub-instance of the more general Wikibase class, containing the data required to access [[https://www.wikidata.org/ Wikidata]] + * + * @see {@link es.weso.rdfshape.server.wikibase.Wikibase} + */ +case object Wikidata + extends Wikibase( + name = "wikidata", + baseUrl = uri"https://www.wikidata.org", + endpointUrl = IRI("https://query.wikidata.org/sparql") + ) { - def schemaEntityUri(wdSchema: String): Uri = { - val uri = uri"https://www.wikidata.org".withPath( - Uri.Path.unsafeFromString(s"/wiki/Special:EntitySchemaText/${wdSchema}") + def schemaEntityUri(schema: String): Uri = { + val uri = baseUrl.withPath( + Uri.Path.unsafeFromString(s"/wiki/Special:EntitySchemaText/$schema") ) uri } - } diff --git a/src/main/scala/es/weso/rdfshape/logging/LoggingManager.scala b/src/main/scala/es/weso/rdfshape/logging/LoggingManager.scala index 5d189184..d5eae5a0 100644 --- a/src/main/scala/es/weso/rdfshape/logging/LoggingManager.scala +++ b/src/main/scala/es/weso/rdfshape/logging/LoggingManager.scala @@ -18,20 +18,7 @@ object LoggingManager { private val defaultLogbackConfigurationFile = "logback-configurations/logback.groovy" - /** Given a verbosity numeric value, map it to its corresponding logging level - * @param verbosity Verbosity numeric value - * @return A string representing the minimum level of the logs to be shown on console - */ - def mapVerbosityValueToLogLevel(verbosity: Int): String = { - verbosity match { - case 0 => LoggingLevel.ERROR // No verbose argument. Show errors. - case 1 => LoggingLevel.WARN // -v. Show warnings. - case 2 => LoggingLevel.INFO // -vv. Show info. - case _ => LoggingLevel.DEBUG // -vvv and forth. Show debug information. - } - } - - /** Set the System Properties that will be read in logback's configuration file to define logback's behavior. + /** Set the System Properties that will be read in logback's configuration file to define logback's behavior * @see setUpLogbackConfiguration * @see setUpLogbackLogLevel */ @@ -63,6 +50,19 @@ object LoggingManager { mapVerbosityValueToLogLevel(verbosity) ) } + + /** Given a verbosity numeric value, map it to its corresponding logging level + * @param verbosity Verbosity numeric value + * @return A string representing the minimum level of the logs to be shown on console + */ + def mapVerbosityValueToLogLevel(verbosity: Int): String = { + verbosity match { + case 0 => LoggingLevel.ERROR // No verbose argument. Show errors. + case 1 => LoggingLevel.WARN // -v. Show warnings. + case 2 => LoggingLevel.INFO // -vv. Show info. + case _ => LoggingLevel.DEBUG // -vvv and forth. Show debug information. + } + } } /** Enum classifying the accepted logging levels by their String representation. diff --git a/website/docusaurus.config.js b/website/docusaurus.config.js index cfddc48e..9de88762 100644 --- a/website/docusaurus.config.js +++ b/website/docusaurus.config.js @@ -13,6 +13,10 @@ module.exports = { onBrokenLinks: "throw", onBrokenMarkdownLinks: "warn", favicon: "favicon.ico", + trailingSlash: true, + customFields: { + docsUrl, + }, themeConfig: { image: "img/preview.png", hideableSidebar: false, @@ -23,8 +27,8 @@ module.exports = { switchConfig: { darkIcon: "🌙", lightIcon: "\u2600", - darkIconStyle: { marginLeft: "2px" }, - lightIconStyle: { marginLeft: "1px" }, + darkIconStyle: {marginLeft: "2px"}, + lightIconStyle: {marginLeft: "1px"}, }, }, navbar: { @@ -37,14 +41,20 @@ module.exports = { // Web docs { to: "/docs", - position: "left", label: "Web docs", + position: "left", + }, + // Scaladoc + { + to: docsUrl, + label: "Scaladoc", + position: "left", }, // Link to repo { href: "https://github.com/weso/rdfshape-api", label: "GitHub", - position: "left", + position: "right", }, ], }, @@ -74,11 +84,11 @@ module.exports = { items: [ { label: "GitHub", - href: "https://github.com/weso", + to: "https://github.com/weso", }, { label: "Twitter", - href: "https://twitter.com/wesoviedo", + to: "https://twitter.com/wesoviedo", }, ], }, @@ -91,7 +101,7 @@ module.exports = { }, { label: "More software by WESO", - href: "https://www.weso.es/#software", + to: "https://www.weso.es/#software", }, ], }, @@ -111,9 +121,7 @@ module.exports = { theme: { customCss: require.resolve("./src/css/custom.css"), }, - sitemap: { - trailingSlash: true, - }, + sitemap: {}, }, ], ], diff --git a/website/package.json b/website/package.json index 9058504d..9b928c4a 100644 --- a/website/package.json +++ b/website/package.json @@ -15,8 +15,8 @@ "write-heading-ids": "docusaurus write-heading-ids" }, "dependencies": { - "@docusaurus/core": "2.0.0-beta.0", - "@docusaurus/preset-classic": "2.0.0-beta.0", + "@docusaurus/core": "^2.0.0-beta.4", + "@docusaurus/preset-classic": "^2.0.0-beta.4", "@mdx-js/react": "^1.6.21", "@svgr/webpack": "^5.5.0", "clsx": "^1.1.1", @@ -37,4 +37,4 @@ "last 1 safari version" ] } -} \ No newline at end of file +} diff --git a/website/src/components/HomepageFeatures.js b/website/src/components/HomepageFeatures.js index 934a09d6..c45c7c3f 100644 --- a/website/src/components/HomepageFeatures.js +++ b/website/src/components/HomepageFeatures.js @@ -2,69 +2,70 @@ import React from "react"; import clsx from "clsx"; import styles from "./HomepageFeatures.module.css"; import Link from "@docusaurus/core/lib/client/exports/Link"; +import {customFields} from "../../docusaurus.config" -const docsUrl = "https://weso.github.io/rdfshape-api/api/es/weso/rdfshape/" +const docsUrl = customFields.docsUrl const apiDocsUrl = "https://app.swaggerhub.com/apis/weso/RDFShape" const FeatureList = [ - { - title: "Scaladoc", - Svg: require("../../static/img/scala-icon.svg").default, - description: ( - <> - Check out the automatically generated Scaladoc, up to date with our latest stable - build - - ), - link: docsUrl - }, - { - title: "Web documentation", - Svg: require("../../static/img/webdocs.svg").default, - description: ( - <> - Friendly guides and short articles related to the project and the usage of the API - - ), - link: "/docs" - }, - { - title: "API Docs", - Svg: require("../../static/img/rocket.svg").default, - description: ( - <> - Browse the API Docs and test the API directly in Swagger Hub without having to learn about the - underlying infrastructure - - ), - link: apiDocsUrl - }, + { + title: "Scaladoc", + Svg: require("../../static/img/scala-icon.svg").default, + description: ( + <> + Check out the automatically generated Scaladoc, up to date with our latest stable + build + + ), + link: docsUrl + }, + { + title: "Web documentation", + Svg: require("../../static/img/webdocs.svg").default, + description: ( + <> + Friendly guides and short articles related to the project and the usage of the API + + ), + link: "/docs" + }, + { + title: "API Docs", + Svg: require("../../static/img/rocket.svg").default, + description: ( + <> + Browse the API Docs and test the API directly in Swagger Hub without having to learn about the + underlying infrastructure + + ), + link: apiDocsUrl + }, ]; function Feature({Svg, title, description, link}) { - return ( -
-
- -
-
-

{title}

-

{description}

-
-
- ); + return ( +
+
+ +
+
+

{title}

+

{description}

+
+
+ ); } export default function HomepageFeatures() { - return ( -
-
-
- {FeatureList.map((props, idx) => ( - - ))} -
-
-
- ); + return ( +
+
+
+ {FeatureList.map((props, idx) => ( + + ))} +
+
+
+ ); } diff --git a/website/yarn.lock b/website/yarn.lock index fadfadda..9e875a2e 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -2,24 +2,24 @@ # yarn lockfile v1 -"@algolia/autocomplete-core@1.0.0-alpha.44": - version "1.0.0-alpha.44" - resolved "https://registry.yarnpkg.com/@algolia/autocomplete-core/-/autocomplete-core-1.0.0-alpha.44.tgz#e626dba45f5f3950d6beb0ab055395ef0f7e8bb2" - integrity sha512-2iMXthldMIDXtlbg9omRKLgg1bLo2ZzINAEqwhNjUeyj1ceEyL1ck6FY0VnJpf2LsjmNthHCz2BuFk+nYUeDNA== +"@algolia/autocomplete-core@1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@algolia/autocomplete-core/-/autocomplete-core-1.2.1.tgz#95fc07cfa40b5a38e3f80acd75d1fb94968215a8" + integrity sha512-/SLS6636Wpl7eFiX7eEy0E3wBo60sUm1qRYybJBDt1fs8reiJ1+OSy+dZgrLBfLL4mSFqRIIUHXbVp25QdZ+iw== dependencies: - "@algolia/autocomplete-shared" "1.0.0-alpha.44" + "@algolia/autocomplete-shared" "1.2.1" -"@algolia/autocomplete-preset-algolia@1.0.0-alpha.44": - version "1.0.0-alpha.44" - resolved "https://registry.yarnpkg.com/@algolia/autocomplete-preset-algolia/-/autocomplete-preset-algolia-1.0.0-alpha.44.tgz#0ea0b255d0be10fbe262e281472dd6e4619b62ba" - integrity sha512-DCHwo5ovzg9k2ejUolGNTLFnIA7GpsrkbNJTy1sFbMnYfBmeK8egZPZnEl7lBTr27OaZu7IkWpTepLVSztZyng== +"@algolia/autocomplete-preset-algolia@1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@algolia/autocomplete-preset-algolia/-/autocomplete-preset-algolia-1.2.1.tgz#bda1741823268ff76ba78306259036f000198e01" + integrity sha512-Lf4PpPVgHNXm1ytrnVdrZYV7hAYSCpAI/TrebF8UC6xflPY6sKb1RL/2OfrO9On7SDjPBtNd+6MArSar5JmK0g== dependencies: - "@algolia/autocomplete-shared" "1.0.0-alpha.44" + "@algolia/autocomplete-shared" "1.2.1" -"@algolia/autocomplete-shared@1.0.0-alpha.44": - version "1.0.0-alpha.44" - resolved "https://registry.yarnpkg.com/@algolia/autocomplete-shared/-/autocomplete-shared-1.0.0-alpha.44.tgz#db13902ad1667e455711b77d08cae1a0feafaa48" - integrity sha512-2oQZPERYV+yNx/yoVWYjZZdOqsitJ5dfxXJjL18yczOXH6ujnsq+DTczSrX+RjzjQdVeJ1UAG053EJQF/FOiMg== +"@algolia/autocomplete-shared@1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@algolia/autocomplete-shared/-/autocomplete-shared-1.2.1.tgz#96f869fb2285ed6a34a5ac2509722c065df93016" + integrity sha512-RHCwcXAYFwDXTlomstjWRFIzOfyxtQ9KmViacPE5P5hxUSSjkmG3dAb77xdydift1PaZNbho5TNTCi5UZe0RpA== "@algolia/cache-browser-local-storage@4.9.3": version "4.9.3" @@ -1145,25 +1145,25 @@ "@babel/helper-validator-identifier" "^7.14.5" to-fast-properties "^2.0.0" -"@docsearch/css@3.0.0-alpha.36": - version "3.0.0-alpha.36" - resolved "https://registry.yarnpkg.com/@docsearch/css/-/css-3.0.0-alpha.36.tgz#0af69a86b845974d0f8cab62db0218f66b6ad2d6" - integrity sha512-zSN2SXuZPDqQaSFzYa1kOwToukqzhLHG7c66iO+/PlmWb6/RZ5cjTkG6VCJynlohRWea7AqZKWS/ptm8kM2Dmg== +"@docsearch/css@3.0.0-alpha.39": + version "3.0.0-alpha.39" + resolved "https://registry.yarnpkg.com/@docsearch/css/-/css-3.0.0-alpha.39.tgz#1ebd390d93e06aad830492f5ffdc8e05d058813f" + integrity sha512-lr10MFTgcR3NRea/FtJ7uNtIpQz0XVwYxbpO5wxykgfHu1sxZTr6zwkuPquRgFYXnccxsTvfoIiK3rMH0fLr/w== -"@docsearch/react@^3.0.0-alpha.33": - version "3.0.0-alpha.36" - resolved "https://registry.yarnpkg.com/@docsearch/react/-/react-3.0.0-alpha.36.tgz#f2dbd53ba9c389bc19aea89a3ad21782fa6b4bb5" - integrity sha512-synYZDHalvMzesFiy7kK+uoz4oTdWSTbe2cU+iiUjwFMyQ+WWjWwGVnvcvk+cjj9pRCVaZo5y5WpqNXq1j8k9Q== +"@docsearch/react@^3.0.0-alpha.39": + version "3.0.0-alpha.39" + resolved "https://registry.yarnpkg.com/@docsearch/react/-/react-3.0.0-alpha.39.tgz#bbd253f6fc591f63c1a171e7ef2da26b253164d9" + integrity sha512-urTIt82tan6CU+D2kO6xXpWQom/r1DA7L/55m2JiCIK/3SLh2z15FJFVN2abeK7B4wl8pCfWunYOwCsSHhWDLA== dependencies: - "@algolia/autocomplete-core" "1.0.0-alpha.44" - "@algolia/autocomplete-preset-algolia" "1.0.0-alpha.44" - "@docsearch/css" "3.0.0-alpha.36" + "@algolia/autocomplete-core" "1.2.1" + "@algolia/autocomplete-preset-algolia" "1.2.1" + "@docsearch/css" "3.0.0-alpha.39" algoliasearch "^4.0.0" -"@docusaurus/core@2.0.0-beta.0": - version "2.0.0-beta.0" - resolved "https://registry.yarnpkg.com/@docusaurus/core/-/core-2.0.0-beta.0.tgz#05506ee02e7d40e9f4c8d7b4f918d26d3b191159" - integrity sha512-xWwpuEwFRKJmZvNGOpr/dyRDnx/psckLPsozQTg2hu3u81Wqu9gigWgYK/C2fPlEjxMcVw0/2WH+zwpbyWmF2Q== +"@docusaurus/core@2.0.0-beta.4", "@docusaurus/core@^2.0.0-beta.4": + version "2.0.0-beta.4" + resolved "https://registry.yarnpkg.com/@docusaurus/core/-/core-2.0.0-beta.4.tgz#b41c5064c8737405cfceb1a373c9c5aa3410fd95" + integrity sha512-ITa976MPFl9KbYchMOWCCX6SU6EFDSdGeGOHtpaNcrJ9e9Sj7o77fKmMH/ciShwz1g8brTm3VxZ0FwleU8lTig== dependencies: "@babel/core" "^7.12.16" "@babel/generator" "^7.12.15" @@ -1175,47 +1175,49 @@ "@babel/runtime" "^7.12.5" "@babel/runtime-corejs3" "^7.12.13" "@babel/traverse" "^7.12.13" - "@docusaurus/cssnano-preset" "2.0.0-beta.0" + "@docusaurus/cssnano-preset" "2.0.0-beta.4" "@docusaurus/react-loadable" "5.5.0" - "@docusaurus/types" "2.0.0-beta.0" - "@docusaurus/utils" "2.0.0-beta.0" - "@docusaurus/utils-validation" "2.0.0-beta.0" - "@endiliey/static-site-generator-webpack-plugin" "^4.0.0" + "@docusaurus/types" "2.0.0-beta.4" + "@docusaurus/utils" "2.0.0-beta.4" + "@docusaurus/utils-common" "2.0.0-beta.4" + "@docusaurus/utils-validation" "2.0.0-beta.4" + "@slorber/static-site-generator-webpack-plugin" "^4.0.0" "@svgr/webpack" "^5.5.0" autoprefixer "^10.2.5" babel-loader "^8.2.2" babel-plugin-dynamic-import-node "2.3.0" - boxen "^5.0.0" - chalk "^4.1.0" + boxen "^5.0.1" + chalk "^4.1.1" chokidar "^3.5.1" - clean-css "^5.1.1" + clean-css "^5.1.2" commander "^5.1.0" - copy-webpack-plugin "^8.1.0" + copy-webpack-plugin "^9.0.0" core-js "^3.9.1" css-loader "^5.1.1" - css-minimizer-webpack-plugin "^2.0.0" - cssnano "^5.0.1" + css-minimizer-webpack-plugin "^3.0.1" + cssnano "^5.0.4" del "^6.0.0" detect-port "^1.3.0" + escape-html "^1.0.3" eta "^1.12.1" express "^4.17.1" file-loader "^6.2.0" - fs-extra "^9.1.0" + fs-extra "^10.0.0" github-slugger "^1.3.0" globby "^11.0.2" html-minifier-terser "^5.1.1" html-tags "^3.1.0" - html-webpack-plugin "^5.2.0" + html-webpack-plugin "^5.3.2" import-fresh "^3.3.0" is-root "^2.1.0" leven "^3.1.0" lodash "^4.17.20" - mini-css-extract-plugin "^1.4.0" + mini-css-extract-plugin "^1.6.0" module-alias "^2.2.2" nprogress "^0.2.0" - postcss "^8.2.10" - postcss-loader "^5.2.0" - prompts "^2.4.0" + postcss "^8.2.15" + postcss-loader "^5.3.0" + prompts "^2.4.1" react-dev-utils "^11.0.1" react-error-overlay "^6.0.9" react-helmet "^6.1.0" @@ -1225,90 +1227,93 @@ react-router-config "^5.1.1" react-router-dom "^5.2.0" resolve-pathname "^3.0.0" - rtl-detect "^1.0.2" + rtl-detect "^1.0.3" semver "^7.3.4" serve-handler "^6.1.3" shelljs "^0.8.4" std-env "^2.2.1" strip-ansi "^6.0.0" - terser-webpack-plugin "^5.1.1" - tslib "^2.1.0" + terser-webpack-plugin "^5.1.3" + tslib "^2.2.0" update-notifier "^5.1.0" url-loader "^4.1.1" - wait-on "^5.2.1" - webpack "^5.28.0" - webpack-bundle-analyzer "^4.4.0" + wait-on "^5.3.0" + webpack "^5.40.0" + webpack-bundle-analyzer "^4.4.2" webpack-dev-server "^3.11.2" - webpack-merge "^5.7.3" + webpack-merge "^5.8.0" webpackbar "^5.0.0-3" -"@docusaurus/cssnano-preset@2.0.0-beta.0": - version "2.0.0-beta.0" - resolved "https://registry.yarnpkg.com/@docusaurus/cssnano-preset/-/cssnano-preset-2.0.0-beta.0.tgz#a79223479666059565d60a505bed2bbcac770384" - integrity sha512-gqQHeQCDHZDd5NaiKZwDiyg75sBCqDyAsvmFukkDAty8xE7u9IhzbOQKvCAtwseuvzu2BNN41gnJ8bz7vZzQiw== +"@docusaurus/cssnano-preset@2.0.0-beta.4": + version "2.0.0-beta.4" + resolved "https://registry.yarnpkg.com/@docusaurus/cssnano-preset/-/cssnano-preset-2.0.0-beta.4.tgz#a40c0bee39143a531ca4dde05bb3a84bec416668" + integrity sha512-KsmFEob0ElffnFFbz93wcYH4IncU4LDnKBerdomU0Wdg/vXTLo3Q7no8df9yjbcBXVRaSX+/tNFapY9Iu/4Cew== dependencies: - cssnano-preset-advanced "^5.0.0" - postcss "^8.2.10" - postcss-sort-media-queries "^3.8.9" + cssnano-preset-advanced "^5.1.1" + postcss "^8.2.15" + postcss-sort-media-queries "^3.10.11" -"@docusaurus/mdx-loader@2.0.0-beta.0": - version "2.0.0-beta.0" - resolved "https://registry.yarnpkg.com/@docusaurus/mdx-loader/-/mdx-loader-2.0.0-beta.0.tgz#7a58933994b2face62e34698db2f9c88c53c6d61" - integrity sha512-oQLS2ZeUnqw79CV37glglZpaYgFfA5Az5lT83m5tJfMUZjoK4ehG1XWBeUzWy8QQNI452yAID8jz8jihEQeCcw== +"@docusaurus/mdx-loader@2.0.0-beta.4": + version "2.0.0-beta.4" + resolved "https://registry.yarnpkg.com/@docusaurus/mdx-loader/-/mdx-loader-2.0.0-beta.4.tgz#cc1a88d693078be56c82571d1d88004dad0f18f4" + integrity sha512-dwYKFKcsgiMB/TECoieKnwQemBAozd2a+cm4xzrWhDzElvwlQPo/j45OOUb6U/H8NJp7DnAynLBqSyKJ3YZb4g== dependencies: "@babel/parser" "^7.12.16" "@babel/traverse" "^7.12.13" - "@docusaurus/core" "2.0.0-beta.0" - "@docusaurus/utils" "2.0.0-beta.0" + "@docusaurus/core" "2.0.0-beta.4" + "@docusaurus/utils" "2.0.0-beta.4" "@mdx-js/mdx" "^1.6.21" "@mdx-js/react" "^1.6.21" + chalk "^4.1.1" escape-html "^1.0.3" file-loader "^6.2.0" - fs-extra "^9.1.0" + fs-extra "^10.0.0" github-slugger "^1.3.0" - gray-matter "^4.0.2" + gray-matter "^4.0.3" mdast-util-to-string "^2.0.0" remark-emoji "^2.1.0" stringify-object "^3.3.0" unist-util-visit "^2.0.2" url-loader "^4.1.1" - webpack "^5.28.0" - -"@docusaurus/plugin-content-blog@2.0.0-beta.0": - version "2.0.0-beta.0" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-blog/-/plugin-content-blog-2.0.0-beta.0.tgz#ea7d3679ab252e8f0e58aaf80f1fc6001c72c755" - integrity sha512-lz63i5k/23RJ3Rk/2fIsYAoD8Wua3b5b0AbH2JoOhQu1iAIQiV8m91Z3XALBSzA3nBtAOIweNI7yzWL+JFSTvw== - dependencies: - "@docusaurus/core" "2.0.0-beta.0" - "@docusaurus/mdx-loader" "2.0.0-beta.0" - "@docusaurus/types" "2.0.0-beta.0" - "@docusaurus/utils" "2.0.0-beta.0" - "@docusaurus/utils-validation" "2.0.0-beta.0" - chalk "^4.1.0" + webpack "^5.40.0" + +"@docusaurus/plugin-content-blog@2.0.0-beta.4": + version "2.0.0-beta.4" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-blog/-/plugin-content-blog-2.0.0-beta.4.tgz#dfa70cc364debd77e28683b733b254d6abec197c" + integrity sha512-NyLqoem/r/m8mNO3H1PbbPayA5KjgRTeB5T7j949uvGwlK34c+W6bSvr3OSRJdmFXqhFL4CG8E8wbSq7h+8WEA== + dependencies: + "@docusaurus/core" "2.0.0-beta.4" + "@docusaurus/mdx-loader" "2.0.0-beta.4" + "@docusaurus/types" "2.0.0-beta.4" + "@docusaurus/utils" "2.0.0-beta.4" + "@docusaurus/utils-validation" "2.0.0-beta.4" + chalk "^4.1.1" + escape-string-regexp "^4.0.0" feed "^4.2.2" - fs-extra "^9.1.0" + fs-extra "^10.0.0" globby "^11.0.2" loader-utils "^2.0.0" lodash "^4.17.20" reading-time "^1.3.0" remark-admonitions "^1.2.1" - tslib "^2.1.0" - webpack "^5.28.0" - -"@docusaurus/plugin-content-docs@2.0.0-beta.0": - version "2.0.0-beta.0" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-docs/-/plugin-content-docs-2.0.0-beta.0.tgz#a5a1e0e95e499eefee53e4f61aeb99ac4a669648" - integrity sha512-WdDQUh2rRCbfJswVc0vY9EaAspxgziqpVEZja8+BmQR/TZh7HuLplT6GJbiFbE4RvwM3+PwG/jHMPglYDK60kw== - dependencies: - "@docusaurus/core" "2.0.0-beta.0" - "@docusaurus/mdx-loader" "2.0.0-beta.0" - "@docusaurus/types" "2.0.0-beta.0" - "@docusaurus/utils" "2.0.0-beta.0" - "@docusaurus/utils-validation" "2.0.0-beta.0" - chalk "^4.1.0" + tslib "^2.2.0" + webpack "^5.40.0" + +"@docusaurus/plugin-content-docs@2.0.0-beta.4": + version "2.0.0-beta.4" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-docs/-/plugin-content-docs-2.0.0-beta.4.tgz#6322bf44fd43ba2f1e79711d2651e1143c7b725a" + integrity sha512-aVYycpOvtgPQ78a10jakCtrI7DEAffw+zVdZT6tgO8QIn5hNPcr5NB7Ms3kSZw83fMZwJqStHHGp0y13zt/gLw== + dependencies: + "@docusaurus/core" "2.0.0-beta.4" + "@docusaurus/mdx-loader" "2.0.0-beta.4" + "@docusaurus/types" "2.0.0-beta.4" + "@docusaurus/utils" "2.0.0-beta.4" + "@docusaurus/utils-validation" "2.0.0-beta.4" + chalk "^4.1.1" combine-promises "^1.1.0" + escape-string-regexp "^4.0.0" execa "^5.0.0" - fs-extra "^9.1.0" + fs-extra "^10.0.0" globby "^11.0.2" import-fresh "^3.2.2" js-yaml "^4.0.0" @@ -1316,81 +1321,80 @@ lodash "^4.17.20" remark-admonitions "^1.2.1" shelljs "^0.8.4" - tslib "^2.1.0" + tslib "^2.2.0" utility-types "^3.10.0" - webpack "^5.28.0" - -"@docusaurus/plugin-content-pages@2.0.0-beta.0": - version "2.0.0-beta.0" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-pages/-/plugin-content-pages-2.0.0-beta.0.tgz#1cab3ebe0a08be74576f10c95675291bf84f848e" - integrity sha512-mk5LVVSvn+HJPKBaAs/Pceq/hTGxF2LVBvJEquuQz0NMAW3QdBWaYRRpOrL9CO8v+ygn5RuLslXsyZBsDNuhww== - dependencies: - "@docusaurus/core" "2.0.0-beta.0" - "@docusaurus/mdx-loader" "2.0.0-beta.0" - "@docusaurus/types" "2.0.0-beta.0" - "@docusaurus/utils" "2.0.0-beta.0" - "@docusaurus/utils-validation" "2.0.0-beta.0" + webpack "^5.40.0" + +"@docusaurus/plugin-content-pages@2.0.0-beta.4": + version "2.0.0-beta.4" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-pages/-/plugin-content-pages-2.0.0-beta.4.tgz#47bdaa8d8711502f6ba75ba036ebd64a3991034e" + integrity sha512-VZ/iuxT1kgBh/1+W3Li88UZVjqHtHOt4TyFoVwHmf2p91BPHiF7zpiLb4hYL8s694/V+AdfWf4ostSyEoeMx8A== + dependencies: + "@docusaurus/core" "2.0.0-beta.4" + "@docusaurus/mdx-loader" "2.0.0-beta.4" + "@docusaurus/types" "2.0.0-beta.4" + "@docusaurus/utils" "2.0.0-beta.4" + "@docusaurus/utils-validation" "2.0.0-beta.4" globby "^11.0.2" lodash "^4.17.20" - minimatch "^3.0.4" remark-admonitions "^1.2.1" - slash "^3.0.0" - tslib "^2.1.0" - webpack "^5.28.0" - -"@docusaurus/plugin-debug@2.0.0-beta.0": - version "2.0.0-beta.0" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-debug/-/plugin-debug-2.0.0-beta.0.tgz#bee672b8858d88bdb229d4301785ff4692ebd17f" - integrity sha512-m75sZdF8Yccxfih3qfdQg9DucMTrYBnmeTA8GNmdVaK701Ip8t50d1pDJchtu0FSEh6vzVB9C6D2YD5YgVFp8A== - dependencies: - "@docusaurus/core" "2.0.0-beta.0" - "@docusaurus/types" "2.0.0-beta.0" - "@docusaurus/utils" "2.0.0-beta.0" - react-json-view "^1.21.1" tslib "^2.1.0" + webpack "^5.40.0" -"@docusaurus/plugin-google-analytics@2.0.0-beta.0": - version "2.0.0-beta.0" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-2.0.0-beta.0.tgz#ee287fb991202d8e9b792129dcc5542ef3ccd6c9" - integrity sha512-7lHrg1L+adc8VbiaLexa15i4fdq4MRPUTLMxRPAWz+QskhisW89Ryi2/gDmfMNqLblX84Qg2RASa+2gqO4wepw== - dependencies: - "@docusaurus/core" "2.0.0-beta.0" - -"@docusaurus/plugin-google-gtag@2.0.0-beta.0": - version "2.0.0-beta.0" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-2.0.0-beta.0.tgz#4836770130cf54ff2cd83affbff9644ee7293e9e" - integrity sha512-V7zaYbhAMv0jexm5H/5sAnoM1GHibcn9QQk5UWC++x1kE0KRuLDZHV+9OyvW5wr0wWFajod/b88SpUpSMF5u+g== - dependencies: - "@docusaurus/core" "2.0.0-beta.0" - -"@docusaurus/plugin-sitemap@2.0.0-beta.0": - version "2.0.0-beta.0" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-sitemap/-/plugin-sitemap-2.0.0-beta.0.tgz#985d4cc3af86499f616ced024ba1fab8329e601d" - integrity sha512-dvmk8Sr+6pBkiKDb7Rjdp0GeFDWPUlayoJWK3fN3g0Fno6uxFfYhNZyXJ+ObyCA7HoW5rzeBMiO+uAja19JXTg== +"@docusaurus/plugin-debug@2.0.0-beta.4": + version "2.0.0-beta.4" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-debug/-/plugin-debug-2.0.0-beta.4.tgz#7a69fee980a352cd338dba24d8e0d67f6f64ef0b" + integrity sha512-jc9o45NUuhVnFcoq6/6juxJQGgD2Q71IUokoOgw3sytHHOv1jv+eLWP1LDX71MHA1ElZ1MZTlz5mCd1wlzdCOw== dependencies: - "@docusaurus/core" "2.0.0-beta.0" - "@docusaurus/types" "2.0.0-beta.0" - "@docusaurus/utils" "2.0.0-beta.0" - "@docusaurus/utils-validation" "2.0.0-beta.0" - fs-extra "^9.1.0" - sitemap "^6.3.6" + "@docusaurus/core" "2.0.0-beta.4" + "@docusaurus/types" "2.0.0-beta.4" + "@docusaurus/utils" "2.0.0-beta.4" + react-json-view "^1.21.3" tslib "^2.1.0" -"@docusaurus/preset-classic@2.0.0-beta.0": - version "2.0.0-beta.0" - resolved "https://registry.yarnpkg.com/@docusaurus/preset-classic/-/preset-classic-2.0.0-beta.0.tgz#79eb4366e6b5eb7061370019127e40172432d770" - integrity sha512-cFpR0UaAeUt5qVx1bpidhlar6tiRNITIQlxP4bOVsjbxVTZhZ/cNuIz7C+2zFPCuKIflGXdTIQOrucPmd7z51Q== - dependencies: - "@docusaurus/core" "2.0.0-beta.0" - "@docusaurus/plugin-content-blog" "2.0.0-beta.0" - "@docusaurus/plugin-content-docs" "2.0.0-beta.0" - "@docusaurus/plugin-content-pages" "2.0.0-beta.0" - "@docusaurus/plugin-debug" "2.0.0-beta.0" - "@docusaurus/plugin-google-analytics" "2.0.0-beta.0" - "@docusaurus/plugin-google-gtag" "2.0.0-beta.0" - "@docusaurus/plugin-sitemap" "2.0.0-beta.0" - "@docusaurus/theme-classic" "2.0.0-beta.0" - "@docusaurus/theme-search-algolia" "2.0.0-beta.0" +"@docusaurus/plugin-google-analytics@2.0.0-beta.4": + version "2.0.0-beta.4" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-2.0.0-beta.4.tgz#88d17bd1a2b5da35fe625fae43d32430595c087e" + integrity sha512-mqMEnfMKIoR1UfIX+jiAcUolwYntqSNaW8Gg2tg8dlGvC3payT1gpNJaew6TWyrtE29vuZz6a830bIXBYm4uAA== + dependencies: + "@docusaurus/core" "2.0.0-beta.4" + +"@docusaurus/plugin-google-gtag@2.0.0-beta.4": + version "2.0.0-beta.4" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-2.0.0-beta.4.tgz#52f5922857680dfb2acc2099f08ac97d3edcb725" + integrity sha512-MZ0Rr6LBZLKMVFXxV7Kr+l0U3Yz/Yn8L2E5z9DbgVi+9tyLn4xlMzuMPG3gN9TZ8kPcQ1ZWwv9crA+138UzIkw== + dependencies: + "@docusaurus/core" "2.0.0-beta.4" + +"@docusaurus/plugin-sitemap@2.0.0-beta.4": + version "2.0.0-beta.4" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-sitemap/-/plugin-sitemap-2.0.0-beta.4.tgz#60b189107af772ef2bbc94b83055eff8a3013da3" + integrity sha512-0sU1aMQmMN7fE3TlSM2wBZN/gFsuvo79DYxw8TIVtNakA84oDxurH/rhDQHwJ34JQufm5CuWNC1ICHtyI3qyWw== + dependencies: + "@docusaurus/core" "2.0.0-beta.4" + "@docusaurus/types" "2.0.0-beta.4" + "@docusaurus/utils" "2.0.0-beta.4" + "@docusaurus/utils-common" "2.0.0-beta.4" + "@docusaurus/utils-validation" "2.0.0-beta.4" + fs-extra "^10.0.0" + sitemap "^7.0.0" + tslib "^2.2.0" + +"@docusaurus/preset-classic@^2.0.0-beta.4": + version "2.0.0-beta.4" + resolved "https://registry.yarnpkg.com/@docusaurus/preset-classic/-/preset-classic-2.0.0-beta.4.tgz#7f57be3368ed645ab634928d8564fe29b45136cd" + integrity sha512-fW8/iyGLJfBTtbCBQtnRcbDa+ZZMq6Ak20+8+ORB8mzjK4BNYmt9wIbfq0oV9/QBLyryQBYcsRimJoXpLZmWOg== + dependencies: + "@docusaurus/core" "2.0.0-beta.4" + "@docusaurus/plugin-content-blog" "2.0.0-beta.4" + "@docusaurus/plugin-content-docs" "2.0.0-beta.4" + "@docusaurus/plugin-content-pages" "2.0.0-beta.4" + "@docusaurus/plugin-debug" "2.0.0-beta.4" + "@docusaurus/plugin-google-analytics" "2.0.0-beta.4" + "@docusaurus/plugin-google-gtag" "2.0.0-beta.4" + "@docusaurus/plugin-sitemap" "2.0.0-beta.4" + "@docusaurus/theme-classic" "2.0.0-beta.4" + "@docusaurus/theme-search-algolia" "2.0.0-beta.4" "@docusaurus/react-loadable@5.5.0": version "5.5.0" @@ -1399,110 +1403,112 @@ dependencies: prop-types "^15.6.2" -"@docusaurus/theme-classic@2.0.0-beta.0": - version "2.0.0-beta.0" - resolved "https://registry.yarnpkg.com/@docusaurus/theme-classic/-/theme-classic-2.0.0-beta.0.tgz#0ad74264dc592590bd7d8a6f6327cb83bbabc665" - integrity sha512-cBNtwAyg3be7Gk41FazMtgyibAcfuYaGHhGHIDRsXfc/qp3RhbiGiei7tyh200QT0NgKZxiVQy/r4d0mtjC++Q== - dependencies: - "@docusaurus/core" "2.0.0-beta.0" - "@docusaurus/plugin-content-blog" "2.0.0-beta.0" - "@docusaurus/plugin-content-docs" "2.0.0-beta.0" - "@docusaurus/plugin-content-pages" "2.0.0-beta.0" - "@docusaurus/theme-common" "2.0.0-beta.0" - "@docusaurus/types" "2.0.0-beta.0" - "@docusaurus/utils" "2.0.0-beta.0" - "@docusaurus/utils-validation" "2.0.0-beta.0" +"@docusaurus/theme-classic@2.0.0-beta.4": + version "2.0.0-beta.4" + resolved "https://registry.yarnpkg.com/@docusaurus/theme-classic/-/theme-classic-2.0.0-beta.4.tgz#0f30f8d22770ab8bb2e2cacb006f2a2f675e16ce" + integrity sha512-gekEt/YuAEs7CLEJhBC5mE3AqXiDNL6U3WI9emokatpbPY7B12DLJ11QWJZ4mfKYWHuiTwPeybXkOySWMLuaaA== + dependencies: + "@docusaurus/core" "2.0.0-beta.4" + "@docusaurus/plugin-content-blog" "2.0.0-beta.4" + "@docusaurus/plugin-content-docs" "2.0.0-beta.4" + "@docusaurus/plugin-content-pages" "2.0.0-beta.4" + "@docusaurus/theme-common" "2.0.0-beta.4" + "@docusaurus/types" "2.0.0-beta.4" + "@docusaurus/utils" "2.0.0-beta.4" + "@docusaurus/utils-common" "2.0.0-beta.4" + "@docusaurus/utils-validation" "2.0.0-beta.4" "@mdx-js/mdx" "^1.6.21" "@mdx-js/react" "^1.6.21" - chalk "^4.1.0" + chalk "^4.1.1" clsx "^1.1.1" - copy-text-to-clipboard "^3.0.0" - fs-extra "^9.1.0" + copy-text-to-clipboard "^3.0.1" + fs-extra "^10.0.0" globby "^11.0.2" - infima "0.2.0-alpha.23" + infima "0.2.0-alpha.29" lodash "^4.17.20" parse-numeric-range "^1.2.0" - postcss "^8.2.10" - prism-react-renderer "^1.1.1" + postcss "^8.2.15" + prism-react-renderer "^1.2.1" prismjs "^1.23.0" prop-types "^15.7.2" react-router-dom "^5.2.0" rtlcss "^3.1.2" -"@docusaurus/theme-common@2.0.0-beta.0": - version "2.0.0-beta.0" - resolved "https://registry.yarnpkg.com/@docusaurus/theme-common/-/theme-common-2.0.0-beta.0.tgz#3674ef6482cc39efa034fd8d8b1c831588896329" - integrity sha512-2rcVmQpvbdAgnzTWuM7Bfpu+2TQm928bhlvxn226jQy7IYz8ySRlIode63HhCtpx03hpdMCkrK6HxhfEcvHjQg== +"@docusaurus/theme-common@2.0.0-beta.4": + version "2.0.0-beta.4" + resolved "https://registry.yarnpkg.com/@docusaurus/theme-common/-/theme-common-2.0.0-beta.4.tgz#a60527fd436691621b10aeecfac09bf0feece019" + integrity sha512-RJ78rfb/K2dc/u/WDCZB8Q8mj19l7UtDx3F1yFC4WMwAd5tT8V5xlKc5UpHVJrKdc1c3Z4g+ki0wFm+LpCZj0w== dependencies: - "@docusaurus/core" "2.0.0-beta.0" - "@docusaurus/plugin-content-blog" "2.0.0-beta.0" - "@docusaurus/plugin-content-docs" "2.0.0-beta.0" - "@docusaurus/plugin-content-pages" "2.0.0-beta.0" - "@docusaurus/types" "2.0.0-beta.0" + "@docusaurus/core" "2.0.0-beta.4" + "@docusaurus/plugin-content-blog" "2.0.0-beta.4" + "@docusaurus/plugin-content-docs" "2.0.0-beta.4" + "@docusaurus/plugin-content-pages" "2.0.0-beta.4" + "@docusaurus/types" "2.0.0-beta.4" + clsx "^1.1.1" + fs-extra "^10.0.0" tslib "^2.1.0" -"@docusaurus/theme-search-algolia@2.0.0-beta.0": - version "2.0.0-beta.0" - resolved "https://registry.yarnpkg.com/@docusaurus/theme-search-algolia/-/theme-search-algolia-2.0.0-beta.0.tgz#bfdee3981d8da72377b9045459950686d28a01fd" - integrity sha512-/GhgAm4yuwqTXWTsWnqpFYxpjTv+t45Wk8q/LmTVINa+A7b6jkMkch2lygagIt69/ufDm2Uw6eYhgrmF4DJqfQ== +"@docusaurus/theme-search-algolia@2.0.0-beta.4": + version "2.0.0-beta.4" + resolved "https://registry.yarnpkg.com/@docusaurus/theme-search-algolia/-/theme-search-algolia-2.0.0-beta.4.tgz#0c2051523428c4486ef1dd7cb5271b0d871f5c8e" + integrity sha512-W/DfGhlAe1Vl+IJiL9rCw8yswdUrX0lTyCMNRAFi749YN4vCWo2RoxylbUuWoV6lUKoIYfj3EGyotRT2OLqtZw== dependencies: - "@docsearch/react" "^3.0.0-alpha.33" - "@docusaurus/core" "2.0.0-beta.0" - "@docusaurus/theme-common" "2.0.0-beta.0" - "@docusaurus/utils" "2.0.0-beta.0" - "@docusaurus/utils-validation" "2.0.0-beta.0" + "@docsearch/react" "^3.0.0-alpha.39" + "@docusaurus/core" "2.0.0-beta.4" + "@docusaurus/theme-common" "2.0.0-beta.4" + "@docusaurus/utils" "2.0.0-beta.4" + "@docusaurus/utils-validation" "2.0.0-beta.4" algoliasearch "^4.8.4" algoliasearch-helper "^3.3.4" clsx "^1.1.1" eta "^1.12.1" lodash "^4.17.20" -"@docusaurus/types@2.0.0-beta.0": - version "2.0.0-beta.0" - resolved "https://registry.yarnpkg.com/@docusaurus/types/-/types-2.0.0-beta.0.tgz#f473f417bdf690cfd52611ddf6d89ff939d1f2a4" - integrity sha512-z9PI+GbtYwqTXnkX4/a/A6psDX2p8N2uWlN2f4ifrm8WY4WhR9yiTOh0uo0pIqqaUQQvkEq3o5hOXuXLECEs+w== +"@docusaurus/types@2.0.0-beta.4": + version "2.0.0-beta.4" + resolved "https://registry.yarnpkg.com/@docusaurus/types/-/types-2.0.0-beta.4.tgz#9eef0a88b008ebd65bb9870b7ff0050de0e620c4" + integrity sha512-2aMCliUCBYhZO8UiiPIKpRu2KECtqt0nRu44EbN6rj1STf695AIOhJC1Zo5TiuW2WbiljSbkJTgG3XdBZ3FUBw== dependencies: commander "^5.1.0" joi "^17.4.0" querystring "0.2.0" - webpack "^5.28.0" - webpack-merge "^5.7.3" + webpack "^5.40.0" + webpack-merge "^5.8.0" -"@docusaurus/utils-validation@2.0.0-beta.0": - version "2.0.0-beta.0" - resolved "https://registry.yarnpkg.com/@docusaurus/utils-validation/-/utils-validation-2.0.0-beta.0.tgz#3e6491c269a397fed29717a1cb69109df9483461" - integrity sha512-ELl/FVJ6xBz35TisZ1NmJhjbiVXDeU++K531PEFPCPmwnQPh7S6hZXdPnR71/Kc3BmuN9X2ZkwGOqNKVfys2Bg== +"@docusaurus/utils-common@2.0.0-beta.4": + version "2.0.0-beta.4" + resolved "https://registry.yarnpkg.com/@docusaurus/utils-common/-/utils-common-2.0.0-beta.4.tgz#eb2e5876f5f79d037fa7e1867177658661b9c1c2" + integrity sha512-QaKs96/95ztKgZqHMUS/vNl+GzZ/6vKVEPjBXWt7Fdhg2soT1Iu4cShnibEO5HaVlwSfnJbVmDLVm8phQRdr0A== dependencies: - "@docusaurus/utils" "2.0.0-beta.0" - chalk "^4.1.0" + "@docusaurus/types" "2.0.0-beta.4" + tslib "^2.2.0" + +"@docusaurus/utils-validation@2.0.0-beta.4": + version "2.0.0-beta.4" + resolved "https://registry.yarnpkg.com/@docusaurus/utils-validation/-/utils-validation-2.0.0-beta.4.tgz#417ff389d61aab4c6544f169e31bb86573b518df" + integrity sha512-t1sxSeyVU02NkcFhPvE7eQFA0CFUst68hTnie6ZS3ToY3nlzdbYRPOAZY5MPr3zRMwum6yFAXgqVA+5fnR0OGg== + dependencies: + "@docusaurus/utils" "2.0.0-beta.4" + chalk "^4.1.1" joi "^17.4.0" tslib "^2.1.0" -"@docusaurus/utils@2.0.0-beta.0": - version "2.0.0-beta.0" - resolved "https://registry.yarnpkg.com/@docusaurus/utils/-/utils-2.0.0-beta.0.tgz#6f2690fd6fcd942f0d690db1dffb96742762deb3" - integrity sha512-bvrT1EQu0maavr0Hb/lke9jmpzgVL/9tn5VQtbyahf472eJFY0bQDExllDrHK+l784SUvucqX0iaQeg0q6ySUw== +"@docusaurus/utils@2.0.0-beta.4": + version "2.0.0-beta.4" + resolved "https://registry.yarnpkg.com/@docusaurus/utils/-/utils-2.0.0-beta.4.tgz#6e572371b0a59360b49102d014579f5364f1d8da" + integrity sha512-6nI3ETBp0ZSt5yp5Fc5nthQjR1MmLgl2rXC3hcscrSUZx0QvzJFzTiRgD9EAIJtR/i2JkUK18eaFiBjMBoXEbQ== dependencies: - "@docusaurus/types" "2.0.0-beta.0" + "@docusaurus/types" "2.0.0-beta.4" "@types/github-slugger" "^1.3.0" - chalk "^4.1.0" + chalk "^4.1.1" escape-string-regexp "^4.0.0" - fs-extra "^9.1.0" - gray-matter "^4.0.2" + fs-extra "^10.0.0" + globby "^11.0.4" + gray-matter "^4.0.3" lodash "^4.17.20" + micromatch "^4.0.4" resolve-pathname "^3.0.0" - tslib "^2.1.0" - -"@endiliey/static-site-generator-webpack-plugin@^4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@endiliey/static-site-generator-webpack-plugin/-/static-site-generator-webpack-plugin-4.0.0.tgz#94bfe58fd83aeda355de797fcb5112adaca3a6b1" - integrity sha512-3MBqYCs30qk1OBRC697NqhGouYbs71D1B8hrk/AFJC6GwF2QaJOQZtA1JYAaGSe650sZ8r5ppRTtCRXepDWlng== - dependencies: - bluebird "^3.7.1" - cheerio "^0.22.0" - eval "^0.1.4" - url "^0.11.0" - webpack-sources "^1.4.3" + tslib "^2.2.0" "@hapi/hoek@^9.0.0": version "9.2.0" @@ -1599,6 +1605,17 @@ resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.14.0.tgz#9fb3a3cf3132328151f353de4632e01e52102bea" integrity sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ== +"@slorber/static-site-generator-webpack-plugin@^4.0.0": + version "4.0.1" + resolved "https://registry.yarnpkg.com/@slorber/static-site-generator-webpack-plugin/-/static-site-generator-webpack-plugin-4.0.1.tgz#0c8852146441aaa683693deaa5aee2f991d94841" + integrity sha512-PSv4RIVO1Y3kvHxjvqeVisk3E9XFoO04uwYBDWe217MFqKspplYswTuKLiJu0aLORQWzuQjfVsSlLPojwfYsLw== + dependencies: + bluebird "^3.7.1" + cheerio "^0.22.0" + eval "^0.1.4" + url "^0.11.0" + webpack-sources "^1.4.3" + "@svgr/babel-plugin-add-jsx-attribute@^5.4.0": version "5.4.0" resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-5.4.0.tgz#81ef61947bb268eb9d50523446f9c638fb355906" @@ -1735,10 +1752,10 @@ resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.48.tgz#18dc8091b285df90db2f25aa7d906cfc394b7f74" integrity sha512-LfZwXoGUDo0C3me81HXgkBg5CTQYb6xzEl+fNmbO4JdRiSKQ8A0GD1OBBvKAIsbCUgoyAty7m99GqqMQe784ew== -"@types/estree@^0.0.47": - version "0.0.47" - resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.47.tgz#d7a51db20f0650efec24cd04994f523d93172ed4" - integrity sha512-c5ciR06jK8u9BstrmJyO97m+klJrrhCf9u3rLu3DEAJBirxRqSCvDQoYKmxuYwQI5SZChAWu+tq9oVlGRuzPAg== +"@types/estree@^0.0.50": + version "0.0.50" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.50.tgz#1e0caa9364d3fccd2931c3ed96fdbeaa5d4cca83" + integrity sha512-C6N5s2ZFtuZRj54k2/zyRhNDjJwwcViAM3Nbm8zjBpbqAdZ00mr0CFxvSKeO8Y/e03WVFLpQMdHYVfUd6SB+Hw== "@types/github-slugger@^1.3.0": version "1.3.0" @@ -1770,6 +1787,11 @@ resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.7.tgz#98a993516c859eb0d5c4c8f098317a9ea68db9ad" integrity sha512-cxWFQVseBm6O9Gbw1IWb8r6OS4OhSt3hPZLkFApLjM8TEXROBuQGLAH2i2gZpcXdLBIrpXuTDhH7Vbm1iXmNGA== +"@types/json-schema@^7.0.8": + version "7.0.8" + resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.8.tgz#edf1bf1dbf4e04413ca8e5b17b3b7d7d54b59818" + integrity sha512-YSBPTLTVm2e2OoQIDYx8HaeWJ5tTToLH67kXR7zYNGupXMEHa2++G8k+DczX2cFVgalypqtyZIcU19AFcmOpmg== + "@types/mdast@^3.0.0": version "3.0.3" resolved "https://registry.yarnpkg.com/@types/mdast/-/mdast-3.0.3.tgz#2d7d671b1cd1ea3deb306ea75036c2a0407d2deb" @@ -1787,10 +1809,10 @@ resolved "https://registry.yarnpkg.com/@types/node/-/node-15.12.2.tgz#1f2b42c4be7156ff4a6f914b2fb03d05fa84e38d" integrity sha512-zjQ69G564OCIWIOHSXyQEEDpdpGl+G348RAKY0XXy9Z5kU9Vzv1GMNnkar/ZJ8dzXB3COzD9Mo9NtRZ4xfgUww== -"@types/node@^14.14.28": - version "14.17.3" - resolved "https://registry.yarnpkg.com/@types/node/-/node-14.17.3.tgz#6d327abaa4be34a74e421ed6409a0ae2f47f4c3d" - integrity sha512-e6ZowgGJmTuXa3GyaPbTGxX17tnThl2aSSizrFthQ7m9uLGZBXiGhgE55cjRZTF5kjZvYn9EOPOMljdjwbflxw== +"@types/node@^15.0.1": + version "15.14.5" + resolved "https://registry.yarnpkg.com/@types/node/-/node-15.14.5.tgz#7b5b3532053fd14c771ad6598a4ee2c7a85aceca" + integrity sha512-6ewfMNmkumZieB/EeJ4cdP1bbJyOlOt5MTwbKMr7WKxyCt2j09H8YWRK6zOd/Jh35Vu/gls39ZUmeu4vHu1WKQ== "@types/parse-json@^4.0.0": version "4.0.0" @@ -1819,125 +1841,125 @@ resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.3.tgz#9c088679876f374eb5983f150d4787aa6fb32d7e" integrity sha512-FvUupuM3rlRsRtCN+fDudtmytGO6iHJuuRKS1Ss0pG5z8oX0diNEw94UEL7hgDbpN94rgaK5R7sWm6RrSkZuAQ== -"@webassemblyjs/ast@1.11.0": - version "1.11.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.11.0.tgz#a5aa679efdc9e51707a4207139da57920555961f" - integrity sha512-kX2W49LWsbthrmIRMbQZuQDhGtjyqXfEmmHyEi4XWnSZtPmxY0+3anPIzsnRb45VH/J55zlOfWvZuY47aJZTJg== +"@webassemblyjs/ast@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.11.1.tgz#2bfd767eae1a6996f432ff7e8d7fc75679c0b6a7" + integrity sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw== dependencies: - "@webassemblyjs/helper-numbers" "1.11.0" - "@webassemblyjs/helper-wasm-bytecode" "1.11.0" + "@webassemblyjs/helper-numbers" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" -"@webassemblyjs/floating-point-hex-parser@1.11.0": - version "1.11.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.0.tgz#34d62052f453cd43101d72eab4966a022587947c" - integrity sha512-Q/aVYs/VnPDVYvsCBL/gSgwmfjeCb4LW8+TMrO3cSzJImgv8lxxEPM2JA5jMrivE7LSz3V+PFqtMbls3m1exDA== +"@webassemblyjs/floating-point-hex-parser@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz#f6c61a705f0fd7a6aecaa4e8198f23d9dc179e4f" + integrity sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ== -"@webassemblyjs/helper-api-error@1.11.0": - version "1.11.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.0.tgz#aaea8fb3b923f4aaa9b512ff541b013ffb68d2d4" - integrity sha512-baT/va95eXiXb2QflSx95QGT5ClzWpGaa8L7JnJbgzoYeaA27FCvuBXU758l+KXWRndEmUXjP0Q5fibhavIn8w== +"@webassemblyjs/helper-api-error@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz#1a63192d8788e5c012800ba6a7a46c705288fd16" + integrity sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg== -"@webassemblyjs/helper-buffer@1.11.0": - version "1.11.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.0.tgz#d026c25d175e388a7dbda9694e91e743cbe9b642" - integrity sha512-u9HPBEl4DS+vA8qLQdEQ6N/eJQ7gT7aNvMIo8AAWvAl/xMrcOSiI2M0MAnMCy3jIFke7bEee/JwdX1nUpCtdyA== +"@webassemblyjs/helper-buffer@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz#832a900eb444884cde9a7cad467f81500f5e5ab5" + integrity sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA== -"@webassemblyjs/helper-numbers@1.11.0": - version "1.11.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.0.tgz#7ab04172d54e312cc6ea4286d7d9fa27c88cd4f9" - integrity sha512-DhRQKelIj01s5IgdsOJMKLppI+4zpmcMQ3XboFPLwCpSNH6Hqo1ritgHgD0nqHeSYqofA6aBN/NmXuGjM1jEfQ== +"@webassemblyjs/helper-numbers@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz#64d81da219fbbba1e3bd1bfc74f6e8c4e10a62ae" + integrity sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ== dependencies: - "@webassemblyjs/floating-point-hex-parser" "1.11.0" - "@webassemblyjs/helper-api-error" "1.11.0" + "@webassemblyjs/floating-point-hex-parser" "1.11.1" + "@webassemblyjs/helper-api-error" "1.11.1" "@xtuc/long" "4.2.2" -"@webassemblyjs/helper-wasm-bytecode@1.11.0": - version "1.11.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.0.tgz#85fdcda4129902fe86f81abf7e7236953ec5a4e1" - integrity sha512-MbmhvxXExm542tWREgSFnOVo07fDpsBJg3sIl6fSp9xuu75eGz5lz31q7wTLffwL3Za7XNRCMZy210+tnsUSEA== +"@webassemblyjs/helper-wasm-bytecode@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz#f328241e41e7b199d0b20c18e88429c4433295e1" + integrity sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q== -"@webassemblyjs/helper-wasm-section@1.11.0": - version "1.11.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.0.tgz#9ce2cc89300262509c801b4af113d1ca25c1a75b" - integrity sha512-3Eb88hcbfY/FCukrg6i3EH8H2UsD7x8Vy47iVJrP967A9JGqgBVL9aH71SETPx1JrGsOUVLo0c7vMCN22ytJew== +"@webassemblyjs/helper-wasm-section@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz#21ee065a7b635f319e738f0dd73bfbda281c097a" + integrity sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg== dependencies: - "@webassemblyjs/ast" "1.11.0" - "@webassemblyjs/helper-buffer" "1.11.0" - "@webassemblyjs/helper-wasm-bytecode" "1.11.0" - "@webassemblyjs/wasm-gen" "1.11.0" + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" -"@webassemblyjs/ieee754@1.11.0": - version "1.11.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.11.0.tgz#46975d583f9828f5d094ac210e219441c4e6f5cf" - integrity sha512-KXzOqpcYQwAfeQ6WbF6HXo+0udBNmw0iXDmEK5sFlmQdmND+tr773Ti8/5T/M6Tl/413ArSJErATd8In3B+WBA== +"@webassemblyjs/ieee754@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz#963929e9bbd05709e7e12243a099180812992614" + integrity sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ== dependencies: "@xtuc/ieee754" "^1.2.0" -"@webassemblyjs/leb128@1.11.0": - version "1.11.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.11.0.tgz#f7353de1df38aa201cba9fb88b43f41f75ff403b" - integrity sha512-aqbsHa1mSQAbeeNcl38un6qVY++hh8OpCOzxhixSYgbRfNWcxJNJQwe2rezK9XEcssJbbWIkblaJRwGMS9zp+g== +"@webassemblyjs/leb128@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.11.1.tgz#ce814b45574e93d76bae1fb2644ab9cdd9527aa5" + integrity sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw== dependencies: "@xtuc/long" "4.2.2" -"@webassemblyjs/utf8@1.11.0": - version "1.11.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.0.tgz#86e48f959cf49e0e5091f069a709b862f5a2cadf" - integrity sha512-A/lclGxH6SpSLSyFowMzO/+aDEPU4hvEiooCMXQPcQFPPJaYcPQNKGOCLUySJsYJ4trbpr+Fs08n4jelkVTGVw== - -"@webassemblyjs/wasm-edit@1.11.0": - version "1.11.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.0.tgz#ee4a5c9f677046a210542ae63897094c2027cb78" - integrity sha512-JHQ0damXy0G6J9ucyKVXO2j08JVJ2ntkdJlq1UTiUrIgfGMmA7Ik5VdC/L8hBK46kVJgujkBIoMtT8yVr+yVOQ== - dependencies: - "@webassemblyjs/ast" "1.11.0" - "@webassemblyjs/helper-buffer" "1.11.0" - "@webassemblyjs/helper-wasm-bytecode" "1.11.0" - "@webassemblyjs/helper-wasm-section" "1.11.0" - "@webassemblyjs/wasm-gen" "1.11.0" - "@webassemblyjs/wasm-opt" "1.11.0" - "@webassemblyjs/wasm-parser" "1.11.0" - "@webassemblyjs/wast-printer" "1.11.0" - -"@webassemblyjs/wasm-gen@1.11.0": - version "1.11.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.0.tgz#3cdb35e70082d42a35166988dda64f24ceb97abe" - integrity sha512-BEUv1aj0WptCZ9kIS30th5ILASUnAPEvE3tVMTrItnZRT9tXCLW2LEXT8ezLw59rqPP9klh9LPmpU+WmRQmCPQ== - dependencies: - "@webassemblyjs/ast" "1.11.0" - "@webassemblyjs/helper-wasm-bytecode" "1.11.0" - "@webassemblyjs/ieee754" "1.11.0" - "@webassemblyjs/leb128" "1.11.0" - "@webassemblyjs/utf8" "1.11.0" - -"@webassemblyjs/wasm-opt@1.11.0": - version "1.11.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.0.tgz#1638ae188137f4bb031f568a413cd24d32f92978" - integrity sha512-tHUSP5F4ywyh3hZ0+fDQuWxKx3mJiPeFufg+9gwTpYp324mPCQgnuVKwzLTZVqj0duRDovnPaZqDwoyhIO8kYg== - dependencies: - "@webassemblyjs/ast" "1.11.0" - "@webassemblyjs/helper-buffer" "1.11.0" - "@webassemblyjs/wasm-gen" "1.11.0" - "@webassemblyjs/wasm-parser" "1.11.0" - -"@webassemblyjs/wasm-parser@1.11.0": - version "1.11.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.0.tgz#3e680b8830d5b13d1ec86cc42f38f3d4a7700754" - integrity sha512-6L285Sgu9gphrcpDXINvm0M9BskznnzJTE7gYkjDbxET28shDqp27wpruyx3C2S/dvEwiigBwLA1cz7lNUi0kw== - dependencies: - "@webassemblyjs/ast" "1.11.0" - "@webassemblyjs/helper-api-error" "1.11.0" - "@webassemblyjs/helper-wasm-bytecode" "1.11.0" - "@webassemblyjs/ieee754" "1.11.0" - "@webassemblyjs/leb128" "1.11.0" - "@webassemblyjs/utf8" "1.11.0" - -"@webassemblyjs/wast-printer@1.11.0": - version "1.11.0" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.11.0.tgz#680d1f6a5365d6d401974a8e949e05474e1fab7e" - integrity sha512-Fg5OX46pRdTgB7rKIUojkh9vXaVN6sGYCnEiJN1GYkb0RPwShZXp6KTDqmoMdQPKhcroOXh3fEzmkWmCYaKYhQ== - dependencies: - "@webassemblyjs/ast" "1.11.0" +"@webassemblyjs/utf8@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.1.tgz#d1f8b764369e7c6e6bae350e854dec9a59f0a3ff" + integrity sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ== + +"@webassemblyjs/wasm-edit@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz#ad206ebf4bf95a058ce9880a8c092c5dec8193d6" + integrity sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/helper-wasm-section" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" + "@webassemblyjs/wasm-opt" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + "@webassemblyjs/wast-printer" "1.11.1" + +"@webassemblyjs/wasm-gen@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz#86c5ea304849759b7d88c47a32f4f039ae3c8f76" + integrity sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/ieee754" "1.11.1" + "@webassemblyjs/leb128" "1.11.1" + "@webassemblyjs/utf8" "1.11.1" + +"@webassemblyjs/wasm-opt@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz#657b4c2202f4cf3b345f8a4c6461c8c2418985f2" + integrity sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + +"@webassemblyjs/wasm-parser@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz#86ca734534f417e9bd3c67c7a1c75d8be41fb199" + integrity sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-api-error" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/ieee754" "1.11.1" + "@webassemblyjs/leb128" "1.11.1" + "@webassemblyjs/utf8" "1.11.1" + +"@webassemblyjs/wast-printer@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz#d0c73beda8eec5426f10ae8ef55cee5e7084c2f0" + integrity sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg== + dependencies: + "@webassemblyjs/ast" "1.11.1" "@xtuc/long" "4.2.2" "@xtuc/ieee754@^1.2.0": @@ -1963,11 +1985,16 @@ acorn-walk@^8.0.0: resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.1.0.tgz#d3c6a9faf00987a5e2b9bdb506c2aa76cd707f83" integrity sha512-mjmzmv12YIG/G8JQdQuz2MUDShEJ6teYpT5bmWA4q7iwoGen8xtt3twF3OvzIUl+Q06aWIjvnwQUKvQ6TtMRjg== -acorn@^8.0.4, acorn@^8.2.1: +acorn@^8.0.4: version "8.4.0" resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.4.0.tgz#af53266e698d7cffa416714b503066a82221be60" integrity sha512-ULr0LDaEqQrMFGyQ3bhJkLsbtrQ8QibAseGZeaSUiT/6zb9IvIkomWHJIvgvwad+hinRAgsI51JcWk2yvwyL+w== +acorn@^8.4.1: + version "8.4.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.4.1.tgz#56c36251fc7cabc7096adc18f05afe814321a28c" + integrity sha512-asabaBSkEKosYKMITunzX177CXxQ4Q8BSSzMTKD+FefUhipQC70gfW5SiUDhYQ3vk8G+81HqQk7Fv9OXwwn9KA== + address@1.1.2, address@^1.0.1: version "1.1.2" resolved "https://registry.yarnpkg.com/address/-/address-1.1.2.tgz#bf1116c9c758c51b7a933d296b72c221ed9428b6" @@ -2193,11 +2220,6 @@ async@^2.6.2: dependencies: lodash "^4.17.14" -at-least-node@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2" - integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== - atob@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" @@ -2378,7 +2400,7 @@ boolbase@^1.0.0, boolbase@~1.0.0: resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" integrity sha1-aN/1++YMUes3cl6p4+0xDcwed24= -boxen@^5.0.0: +boxen@^5.0.0, boxen@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/boxen/-/boxen-5.0.1.tgz#657528bdd3f59a772b8279b831f27ec2c744664b" integrity sha512-49VBlw+PrWEF51aCmy7QIteYPIFZxSpvqBdP/2itCPPlJ49kj9zg/XPRFrdkne2W+CfwXUls8exMvu1RysZpKA== @@ -2565,6 +2587,14 @@ chalk@^4.1.0: ansi-styles "^4.1.0" supports-color "^7.1.0" +chalk@^4.1.1: + version "4.1.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + character-entities-legacy@^1.0.0: version "1.1.4" resolved "https://registry.yarnpkg.com/character-entities-legacy/-/character-entities-legacy-1.1.4.tgz#94bc1845dce70a5bb9d2ecc748725661293d8fc1" @@ -2668,10 +2698,10 @@ clean-css@^4.2.3: dependencies: source-map "~0.6.0" -clean-css@^5.1.1: - version "5.1.2" - resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-5.1.2.tgz#6ea0da7286b4ddc2469a1b776e2461a5007eed54" - integrity sha512-QcaGg9OuMo+0Ds933yLOY+gHPWbxhxqF0HDexmToPf8pczvmvZGYzd+QqWp9/mkucAOKViI+dSFOqoZIvXbeBw== +clean-css@^5.1.2: + version "5.1.4" + resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-5.1.4.tgz#d191c98347f9fc36b301f99bb827898151175782" + integrity sha512-e6JAuR0T2ahg7fOSv98Nxqh7mHWOac5TaCSgrr61h/6mkPLwlxX38hzob4h6IKj/UHlrrLXvAEjWqXlvi8r8lQ== dependencies: source-map "~0.6.0" @@ -2902,23 +2932,23 @@ copy-descriptor@^0.1.0: resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= -copy-text-to-clipboard@^3.0.0: +copy-text-to-clipboard@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/copy-text-to-clipboard/-/copy-text-to-clipboard-3.0.1.tgz#8cbf8f90e0a47f12e4a24743736265d157bce69c" integrity sha512-rvVsHrpFcL4F2P8ihsoLdFHmd404+CMg71S756oRSeQgqk51U3kicGdnvfkrxva0xXH92SjGS62B0XIJsbh+9Q== -copy-webpack-plugin@^8.1.0: - version "8.1.1" - resolved "https://registry.yarnpkg.com/copy-webpack-plugin/-/copy-webpack-plugin-8.1.1.tgz#3f697e162764925c2f0d235f380676125508fd26" - integrity sha512-rYM2uzRxrLRpcyPqGceRBDpxxUV8vcDqIKxAUKfcnFpcrPxT5+XvhTxv7XLjo5AvEJFPdAE3zCogG2JVahqgSQ== +copy-webpack-plugin@^9.0.0: + version "9.0.1" + resolved "https://registry.yarnpkg.com/copy-webpack-plugin/-/copy-webpack-plugin-9.0.1.tgz#b71d21991599f61a4ee00ba79087b8ba279bbb59" + integrity sha512-14gHKKdYIxF84jCEgPgYXCPpldbwpxxLbCmA7LReY7gvbaT555DgeBWBgBZM116tv/fO6RRJrsivBqRyRlukhw== dependencies: fast-glob "^3.2.5" - glob-parent "^5.1.1" + glob-parent "^6.0.0" globby "^11.0.3" normalize-path "^3.0.0" p-limit "^3.1.0" schema-utils "^3.0.0" - serialize-javascript "^5.0.1" + serialize-javascript "^6.0.0" core-js-compat@^3.14.0: version "3.14.0" @@ -3019,17 +3049,17 @@ css-loader@^5.1.1: schema-utils "^3.0.0" semver "^7.3.5" -css-minimizer-webpack-plugin@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-2.0.0.tgz#3c42f6624ed4cf4780dd963e23ee649e5a25c1a8" - integrity sha512-cG/uc94727tx5pBNtb1Sd7gvUPzwmcQi1lkpfqTpdkuNq75hJCw7bIVsCNijLm4dhDcr1atvuysl2rZqOG8Txw== +css-minimizer-webpack-plugin@^3.0.1: + version "3.0.2" + resolved "https://registry.yarnpkg.com/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-3.0.2.tgz#8fadbdf10128cb40227bff275a4bb47412534245" + integrity sha512-B3I5e17RwvKPJwsxjjWcdgpU/zqylzK1bPVghcmpFHRL48DXiBgrtqz1BJsn68+t/zzaLp9kYAaEDvQ7GyanFQ== dependencies: - cssnano "^5.0.0" - jest-worker "^26.3.0" + cssnano "^5.0.6" + jest-worker "^27.0.2" p-limit "^3.0.2" - postcss "^8.2.9" + postcss "^8.3.5" schema-utils "^3.0.0" - serialize-javascript "^5.0.1" + serialize-javascript "^6.0.0" source-map "^0.6.1" css-select-base-adapter@^0.1.1: @@ -3120,7 +3150,7 @@ cssesc@^3.0.0: resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== -cssnano-preset-advanced@^5.0.0: +cssnano-preset-advanced@^5.1.1: version "5.1.3" resolved "https://registry.yarnpkg.com/cssnano-preset-advanced/-/cssnano-preset-advanced-5.1.3.tgz#a2c6cf2fe39108b81e88810e3c399d1c0fe030ea" integrity sha512-pS4+Q2Hoo/FevZs2JqA2BG8Vn5o5VeXgj+z6kGndKTq3RFYvlKeJ1ZPnLXo9zyYKwmSqWW0rWqtGxxmigIte0Q== @@ -3172,14 +3202,15 @@ cssnano-utils@^2.0.1: resolved "https://registry.yarnpkg.com/cssnano-utils/-/cssnano-utils-2.0.1.tgz#8660aa2b37ed869d2e2f22918196a9a8b6498ce2" integrity sha512-i8vLRZTnEH9ubIyfdZCAdIdgnHAUeQeByEeQ2I7oTilvP9oHO6RScpeq3GsFUVqeB8uZgOQ9pw8utofNn32hhQ== -cssnano@^5.0.0, cssnano@^5.0.1: - version "5.0.6" - resolved "https://registry.yarnpkg.com/cssnano/-/cssnano-5.0.6.tgz#2a91ad34c6521ae31eab3da9c90108ea3093535d" - integrity sha512-NiaLH/7yqGksFGsFNvSRe2IV/qmEBAeDE64dYeD8OBrgp6lE8YoMeQJMtsv5ijo6MPyhuoOvFhI94reahBRDkw== +cssnano@^5.0.4, cssnano@^5.0.6: + version "5.0.7" + resolved "https://registry.yarnpkg.com/cssnano/-/cssnano-5.0.7.tgz#e81894bdf31aa01a0ca3d1d0eee47be18f7f3012" + integrity sha512-7C0tbb298hef3rq+TtBbMuezBQ9VrFtrQEsPNuBKNVgWny/67vdRsnq8EoNu7TRjAHURgYvWlRIpCUmcMZkRzw== dependencies: - cosmiconfig "^7.0.0" cssnano-preset-default "^5.1.3" is-resolvable "^1.1.0" + lilconfig "^2.0.3" + yaml "^1.10.2" csso@^4.0.2, csso@^4.2.0: version "4.2.0" @@ -3589,10 +3620,10 @@ es-abstract@^1.17.2, es-abstract@^1.18.0-next.2, es-abstract@^1.18.2: string.prototype.trimstart "^1.0.4" unbox-primitive "^1.0.1" -es-module-lexer@^0.4.0: - version "0.4.1" - resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-0.4.1.tgz#dda8c6a14d8f340a24e34331e0fab0cb50438e0e" - integrity sha512-ooYciCUtfw6/d2w56UVeqHPcoCFAiJdz5XOkYpv/Txl1HMUozpXjz/2RIQgqwKdXNDPSF1W7mJCFse3G+HDyAA== +es-module-lexer@^0.7.1: + version "0.7.1" + resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-0.7.1.tgz#c2c8e0f46f2df06274cdaf0dd3f3b33e0a0b267d" + integrity sha512-MgtWFl5No+4S3TmhDmCz2ObFGm6lEpTnzbQi+Dd+pw4mlTIZTmM2iAs5gRlmx5zS9luzobCSBSI90JM/1/JgOw== es-to-primitive@^1.2.1: version "1.2.1" @@ -4033,12 +4064,11 @@ fresh@0.5.2: resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac= -fs-extra@^9.1.0: - version "9.1.0" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.1.0.tgz#5954460c764a8da2094ba3554bf839e6b9a7c86d" - integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ== +fs-extra@^10.0.0: + version "10.0.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-10.0.0.tgz#9ff61b655dde53fb34a82df84bb214ce802e17c1" + integrity sha512-C5owb14u9eJwizKGdchcDUQeFtlSHHthBk8pbX9Vc1PFZrLombudjDnNns88aYslCyF6IY5SUw3Roz6xShcEIQ== dependencies: - at-least-node "^1.0.0" graceful-fs "^4.2.0" jsonfile "^6.0.1" universalify "^2.0.0" @@ -4129,13 +4159,20 @@ glob-parent@^3.1.0: is-glob "^3.1.0" path-dirname "^1.0.0" -glob-parent@^5.1.0, glob-parent@^5.1.1, glob-parent@~5.1.2: +glob-parent@^5.1.0, glob-parent@~5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== dependencies: is-glob "^4.0.1" +glob-parent@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.1.tgz#42054f685eb6a44e7a7d189a96efa40a54971aa7" + integrity sha512-kEVjS71mQazDBHKcsq4E9u/vUzaLcw1A8EtUeydawvIWQCJM0qQ08G1H7/XTjFUulla6XQiDOG6MXSaG0HDKog== + dependencies: + is-glob "^4.0.1" + glob-to-regexp@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz#c75297087c851b9a578bd217dd59a92f59fe546e" @@ -4193,7 +4230,7 @@ globby@11.0.1: merge2 "^1.3.0" slash "^3.0.0" -globby@^11.0.1, globby@^11.0.2, globby@^11.0.3: +globby@^11.0.1, globby@^11.0.2, globby@^11.0.3, globby@^11.0.4: version "11.0.4" resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.4.tgz#2cbaff77c2f2a62e71e9b2813a67b97a3a3001a5" integrity sha512-9O4MVG9ioZJ08ffbcyVYyLOJLk5JQ688pJ4eMGLpdWLHq/Wr1D9BlriLQyL0E+jbkuePVZXYFj47QM/v093wHg== @@ -4238,7 +4275,7 @@ graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.6.tgz#ff040b2b0853b23c3d31027523706f1885d76bee" integrity sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ== -gray-matter@^4.0.2: +gray-matter@^4.0.3: version "4.0.3" resolved "https://registry.yarnpkg.com/gray-matter/-/gray-matter-4.0.3.tgz#e893c064825de73ea1f5f7d88c7a9f7274288798" integrity sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q== @@ -4497,15 +4534,15 @@ html-void-elements@^1.0.0: resolved "https://registry.yarnpkg.com/html-void-elements/-/html-void-elements-1.0.5.tgz#ce9159494e86d95e45795b166c2021c2cfca4483" integrity sha512-uE/TxKuyNIcx44cIWnjr/rfIATDH7ZaOMmstu0CwhFG1Dunhlp4OC6/NMbhiwoq5BpW0ubi303qnEk/PZj614w== -html-webpack-plugin@^5.2.0: - version "5.3.1" - resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-5.3.1.tgz#8797327548e3de438e3494e0c6d06f181a7f20d1" - integrity sha512-rZsVvPXUYFyME0cuGkyOHfx9hmkFa4pWfxY/mdY38PsBEaVNsRoA+Id+8z6DBDgyv3zaw6XQszdF8HLwfQvcdQ== +html-webpack-plugin@^5.3.2: + version "5.3.2" + resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-5.3.2.tgz#7b04bf80b1f6fe84a6d3f66c8b79d64739321b08" + integrity sha512-HvB33boVNCz2lTyBsSiMffsJ+m0YLIQ+pskblXgN9fnjS1BgEcuAfdInfXfGrkdXV406k9FiDi86eVCDBgJOyQ== dependencies: "@types/html-minifier-terser" "^5.0.0" html-minifier-terser "^5.0.1" - lodash "^4.17.20" - pretty-error "^2.1.1" + lodash "^4.17.21" + pretty-error "^3.0.4" tapable "^2.0.0" htmlparser2@^3.9.1: @@ -4654,10 +4691,10 @@ indent-string@^4.0.0: resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== -infima@0.2.0-alpha.23: - version "0.2.0-alpha.23" - resolved "https://registry.yarnpkg.com/infima/-/infima-0.2.0-alpha.23.tgz#2c17b473784ae8244fd985f126f9c27a49b24523" - integrity sha512-V0RTjB1otjpH3E2asbydx3gz7ovdSJsuV7r9JTdBggqRilnelTJUcXxLawBQQKsjQi5qPcRTjxnlaV8xyyKhhw== +infima@0.2.0-alpha.29: + version "0.2.0-alpha.29" + resolved "https://registry.yarnpkg.com/infima/-/infima-0.2.0-alpha.29.tgz#4ccf27c4c696e9a0884b333ad9ced5f65b7ae5f3" + integrity sha512-b6XX4QJekAYBPz2Y0XcXrDRaX/+96V95/WKWedY4zAWZ6xlzdxCrnyUgNaC4575aHcA2bfarLlTsP8FHFhjZFQ== inflight@^1.0.4: version "1.0.6" @@ -5114,15 +5151,6 @@ isobject@^3.0.0, isobject@^3.0.1: resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= -jest-worker@^26.3.0: - version "26.6.2" - resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-26.6.2.tgz#7f72cbc4d643c365e27b9fd775f9d0eaa9c7a8ed" - integrity sha512-KWYVV1c4i+jbMpaBC+U++4Va0cp8OisU185o73T1vo99hqi7w8tSJfUXYswwqqrjzwxa6KpRK54WhPvwf5w6PQ== - dependencies: - "@types/node" "*" - merge-stream "^2.0.0" - supports-color "^7.0.0" - jest-worker@^27.0.2: version "27.0.2" resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-27.0.2.tgz#4ebeb56cef48b3e7514552f80d0d80c0129f0b05" @@ -5279,6 +5307,11 @@ leven@^3.1.0: resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== +lilconfig@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-2.0.3.tgz#68f3005e921dafbd2a2afb48379986aa6d2579fd" + integrity sha512-EHKqr/+ZvdKCifpNrJCKxBTgk5XupZA3y/aCPY9mxfgBzmgh93Mt/WqjjQ38oMxXuvDokaKiM3lAgvSH2sjtHg== + lines-and-columns@^1.1.6: version "1.1.6" resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.1.6.tgz#1c00c743b433cd0a4e80758f7b64a57440d9ff00" @@ -5589,7 +5622,7 @@ micromatch@^3.1.10, micromatch@^3.1.4: snapdragon "^0.8.1" to-regex "^3.0.2" -micromatch@^4.0.2: +micromatch@^4.0.2, micromatch@^4.0.4: version "4.0.4" resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.4.tgz#896d519dfe9db25fce94ceb7a500919bf881ebf9" integrity sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg== @@ -5649,10 +5682,10 @@ mini-create-react-context@^0.4.0: "@babel/runtime" "^7.12.1" tiny-warning "^1.0.3" -mini-css-extract-plugin@^1.4.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-1.6.0.tgz#b4db2525af2624899ed64a23b0016e0036411893" - integrity sha512-nPFKI7NSy6uONUo9yn2hIfb9vyYvkFu95qki0e21DQ9uaqNKDP15DGpK0KnV6wDroWxPHtExrdEwx/yDQ8nVRw== +mini-css-extract-plugin@^1.6.0: + version "1.6.2" + resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-1.6.2.tgz#83172b4fd812f8fc4a09d6f6d16f924f53990ca8" + integrity sha512-WhDvO3SjGm40oV5y26GjMJYjd2UMqrLAGKy5YS2/3QKJy2F7jgynuHTir/tgUUOiNQu5saXHdc8reo7YuhhT4Q== dependencies: loader-utils "^2.0.0" schema-utils "^3.0.0" @@ -6322,7 +6355,7 @@ postcss-discard-unused@^5.0.1: dependencies: postcss-selector-parser "^6.0.5" -postcss-loader@^5.2.0: +postcss-loader@^5.3.0: version "5.3.0" resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-5.3.0.tgz#1657f869e48d4fdb018a40771c235e499ee26244" integrity sha512-/+Z1RAmssdiSLgIZwnJHwBMnlABPgF7giYzTN2NOfr9D21IJZ4mQC1R2miwp80zno9M4zMD/umGI8cR+2EL5zw== @@ -6528,7 +6561,7 @@ postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4, postcss-selector cssesc "^3.0.0" util-deprecate "^1.0.2" -postcss-sort-media-queries@^3.8.9: +postcss-sort-media-queries@^3.10.11: version "3.11.12" resolved "https://registry.yarnpkg.com/postcss-sort-media-queries/-/postcss-sort-media-queries-3.11.12.tgz#bfc449fadedfe2765ca4566c30b24694635ad182" integrity sha512-PNhEOWR/btZ0bNNRqqdW4TWxBPQ1mu2I6/Zpco80vBUDSyEjtduUAorY0Vm68rvDlGea3+sgEnQ36iQ1A/gG8Q== @@ -6562,7 +6595,7 @@ postcss-zindex@^5.0.1: resolved "https://registry.yarnpkg.com/postcss-zindex/-/postcss-zindex-5.0.1.tgz#c585724beb69d356af8c7e68847b28d6298ece03" integrity sha512-nwgtJJys+XmmSGoYCcgkf/VczP8Mp/0OfSv3v0+fw0uABY4yxw+eFs0Xp9nAZHIKnS5j+e9ywQ+RD+ONyvl5pA== -postcss@^8.2.10, postcss@^8.2.15, postcss@^8.2.4, postcss@^8.2.9: +postcss@^8.2.15, postcss@^8.2.4: version "8.3.5" resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.3.5.tgz#982216b113412bc20a86289e91eb994952a5b709" integrity sha512-NxTuJocUhYGsMiMFHDUkmjSKT3EdH4/WbGF6GCi1NDGk+vbcUTun4fpbOqaPtD8IIsztA2ilZm2DhYCuyN58gA== @@ -6571,25 +6604,34 @@ postcss@^8.2.10, postcss@^8.2.15, postcss@^8.2.4, postcss@^8.2.9: nanoid "^3.1.23" source-map-js "^0.6.2" +postcss@^8.3.5: + version "8.3.6" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.3.6.tgz#2730dd76a97969f37f53b9a6096197be311cc4ea" + integrity sha512-wG1cc/JhRgdqB6WHEuyLTedf3KIRuD0hG6ldkFEZNCjRxiC+3i6kkWUUbiJQayP28iwG35cEmAbe98585BYV0A== + dependencies: + colorette "^1.2.2" + nanoid "^3.1.23" + source-map-js "^0.6.2" + prepend-http@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc= -pretty-error@^2.1.1: - version "2.1.2" - resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-2.1.2.tgz#be89f82d81b1c86ec8fdfbc385045882727f93b6" - integrity sha512-EY5oDzmsX5wvuynAByrmY0P0hcp+QpnAKbJng2A2MPjVKXCxrDSUkzghVJ4ZGPIv+JC4gX8fPUWscC0RtjsWGw== +pretty-error@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-3.0.4.tgz#94b1d54f76c1ed95b9c604b9de2194838e5b574e" + integrity sha512-ytLFLfv1So4AO1UkoBF6GXQgJRaKbiSiGFICaOPNwQ3CMvBvXpLRubeQWyPGnsbV/t9ml9qto6IeCsho0aEvwQ== dependencies: lodash "^4.17.20" - renderkid "^2.0.4" + renderkid "^2.0.6" pretty-time@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/pretty-time/-/pretty-time-1.1.0.tgz#ffb7429afabb8535c346a34e41873adf3d74dd0e" integrity sha512-28iF6xPQrP8Oa6uxE6a1biz+lWeTOAPKggvjB8HAs6nVMKZwf5bG++632Dx614hIWgUPkgivRfG+a8uAXGTIbA== -prism-react-renderer@^1.1.1: +prism-react-renderer@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/prism-react-renderer/-/prism-react-renderer-1.2.1.tgz#392460acf63540960e5e3caa699d851264e99b89" integrity sha512-w23ch4f75V1Tnz8DajsYKvY5lF7H1+WvzvLUcF0paFxkTHSp42RS0H5CttdN2Q8RR3DRGZ9v5xD/h3n8C8kGmg== @@ -6619,7 +6661,7 @@ prompts@2.4.0: kleur "^3.0.3" sisteransi "^1.0.5" -prompts@^2.4.0: +prompts@^2.4.1: version "2.4.1" resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.1.tgz#befd3b1195ba052f9fd2fde8a486c4e82ee77f61" integrity sha512-EQyfIuO2hPDsX1L/blblV+H7I0knhgAd82cVneCwcdND9B8AuCDuRcBH6yIcG4dFzlOUqbazQqwGjx5xmsNLuQ== @@ -6827,7 +6869,7 @@ react-is@^16.6.0, react-is@^16.7.0, react-is@^16.8.1: resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== -react-json-view@^1.21.1: +react-json-view@^1.21.3: version "1.21.3" resolved "https://registry.yarnpkg.com/react-json-view/-/react-json-view-1.21.3.tgz#f184209ee8f1bf374fb0c41b0813cff54549c475" integrity sha512-13p8IREj9/x/Ye4WI/JpjhoIwuzEgUAtgJZNBJckfzJt1qyh24BdTm6UQNGnyTq9dapQdrqvquZTo3dz1X6Cjw== @@ -7134,7 +7176,7 @@ remove-trailing-separator@^1.0.1: resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8= -renderkid@^2.0.4: +renderkid@^2.0.6: version "2.0.7" resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-2.0.7.tgz#464f276a6bdcee606f4a15993f9b29fc74ca8609" integrity sha512-oCcFyxaMrKsKcTY59qnCAtmDVSLfPbrv6A3tVbPdFMMrv5jaK10V6m40cKsoPNhAqN6rmHW9sswW4o3ruSrwUQ== @@ -7256,10 +7298,10 @@ rimraf@^3.0.2: dependencies: glob "^7.1.3" -rtl-detect@^1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/rtl-detect/-/rtl-detect-1.0.3.tgz#42145b9a4f9cf0b94c4542aba90d57f0d18559bf" - integrity sha512-2sMcZO60tL9YDEFe24gqddg3hJ+xSmJFN8IExcQUxeHxQzydQrN6GHPL+yAWgzItXSI7es53hcZC9pJneuZDKA== +rtl-detect@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/rtl-detect/-/rtl-detect-1.0.4.tgz#40ae0ea7302a150b96bc75af7d749607392ecac6" + integrity sha512-EBR4I2VDSSYr7PkBmFy04uhycIpDKp+21p/jARYXlCSjQksTBQcJ0HFUPOO79EPPH5JS6VAhiIQbycf0O3JAxQ== rtlcss@^3.1.2: version "3.2.0" @@ -7348,6 +7390,15 @@ schema-utils@^3.0.0: ajv "^6.12.5" ajv-keywords "^3.5.2" +schema-utils@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.1.1.tgz#bc74c4b6b6995c1d88f76a8b77bea7219e0c8281" + integrity sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw== + dependencies: + "@types/json-schema" "^7.0.8" + ajv "^6.12.5" + ajv-keywords "^3.5.2" + section-matter@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/section-matter/-/section-matter-1.0.0.tgz#e9041953506780ec01d59f292a19c7b850b84167" @@ -7416,10 +7467,10 @@ send@0.17.1: range-parser "~1.2.1" statuses "~1.5.0" -serialize-javascript@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-5.0.1.tgz#7886ec848049a462467a97d3d918ebb2aaf934f4" - integrity sha512-SaaNal9imEO737H2c05Og0/8LUXG7EnsZyMa8MzkmuHoELfT6txuj0cMqRj6zfPKnmQ1yasR4PCJc8x+M4JSPA== +serialize-javascript@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.0.tgz#efae5d88f45d7924141da8b5c3a7a7e663fefeb8" + integrity sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag== dependencies: randombytes "^2.1.0" @@ -7554,12 +7605,12 @@ sisteransi@^1.0.5: resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== -sitemap@^6.3.6: - version "6.4.0" - resolved "https://registry.yarnpkg.com/sitemap/-/sitemap-6.4.0.tgz#b4bc4edf36de742405a7572bc3e467ba484b852e" - integrity sha512-DoPKNc2/apQZTUnfiOONWctwq7s6dZVspxAZe2VPMNtoqNq7HgXRvlRnbIpKjf+8+piQdWncwcy+YhhTGY5USQ== +sitemap@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/sitemap/-/sitemap-7.0.0.tgz#022bef4df8cba42e38e1fe77039f234cab0372b6" + integrity sha512-Ud0jrRQO2k7fEtPAM+cQkBKoMvxQyPKNXKDLn8tRVHxRCsdDQ2JZvw+aZ5IRYYQVAV9iGxEar6boTwZzev+x3g== dependencies: - "@types/node" "^14.14.28" + "@types/node" "^15.0.1" "@types/sax" "^1.2.1" arg "^5.0.0" sax "^1.2.4" @@ -7625,7 +7676,7 @@ sort-css-media-queries@1.5.4: resolved "https://registry.yarnpkg.com/sort-css-media-queries/-/sort-css-media-queries-1.5.4.tgz#24182b12002a13d01ba943ddf74f5098d7c244ce" integrity sha512-YP5W/h4Sid/YP7Lp87ejJ5jP13/Mtqt2vx33XyhO+IAugKlufRPbOrPlIiEUuxmpNBSBd3EeeQpFhdu3RfI2Ag== -source-list-map@^2.0.0, source-list-map@^2.0.1: +source-list-map@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== @@ -7876,7 +7927,7 @@ supports-color@^6.1.0: dependencies: has-flag "^3.0.0" -supports-color@^7.0.0, supports-color@^7.1.0: +supports-color@^7.1.0: version "7.2.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== @@ -7937,15 +7988,15 @@ tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0: resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.0.tgz#5c373d281d9c672848213d0e037d1c4165ab426b" integrity sha512-FBk4IesMV1rBxX2tfiK8RAmogtWn53puLOQlvO8XuwlgxcYbP4mVPS9Ph4aeamSyyVjOl24aYWAuc8U5kCVwMw== -terser-webpack-plugin@^5.1.1: - version "5.1.3" - resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.1.3.tgz#30033e955ca28b55664f1e4b30a1347e61aa23af" - integrity sha512-cxGbMqr6+A2hrIB5ehFIF+F/iST5ZOxvOmy9zih9ySbP1C2oEWQSOUS+2SNBTjzx5xLKO4xnod9eywdfq1Nb9A== +terser-webpack-plugin@^5.1.3: + version "5.1.4" + resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.1.4.tgz#c369cf8a47aa9922bd0d8a94fe3d3da11a7678a1" + integrity sha512-C2WkFwstHDhVEmsmlCxrXUtVklS+Ir1A7twrYzrDrQQOIMOaVAYykaoo/Aq1K0QRkMoY2hhvDQY1cm4jnIMFwA== dependencies: jest-worker "^27.0.2" p-limit "^3.1.0" schema-utils "^3.0.0" - serialize-javascript "^5.0.1" + serialize-javascript "^6.0.0" source-map "^0.6.1" terser "^5.7.0" @@ -8069,7 +8120,7 @@ tslib@^1.9.0: resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== -tslib@^2.0.3, tslib@^2.1.0: +tslib@^2.0.3, tslib@^2.1.0, tslib@^2.2.0: version "2.3.0" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.3.0.tgz#803b8cdab3e12ba581a4ca41c8839bbb0dacb09e" integrity sha512-N82ooyxVNm6h1riLCoyS9e3fuJ3AMG2zIZs2Gd1ATcSFjSA23Q0fzjjZeh0jbJvWVDZ0cJT8yaNNaaXHzueNjg== @@ -8437,7 +8488,7 @@ vfile@^4.0.0: unist-util-stringify-position "^2.0.0" vfile-message "^2.0.0" -wait-on@^5.2.1: +wait-on@^5.3.0: version "5.3.0" resolved "https://registry.yarnpkg.com/wait-on/-/wait-on-5.3.0.tgz#584e17d4b3fe7b46ac2b9f8e5e102c005c2776c7" integrity sha512-DwrHrnTK+/0QFaB9a8Ol5Lna3k7WvUR4jzSKmz0YaPBpuN2sACyiPVKVfj6ejnjcajAcvn3wlbTyMIn9AZouOg== @@ -8468,7 +8519,7 @@ web-namespaces@^1.0.0, web-namespaces@^1.1.2: resolved "https://registry.yarnpkg.com/web-namespaces/-/web-namespaces-1.1.4.tgz#bc98a3de60dadd7faefc403d1076d529f5e030ec" integrity sha512-wYxSGajtmoP4WxfejAPIr4l0fVh+jeMXZb08wNc0tMg6xsfZXj3cECqIK0G7ZAqUq0PP8WlMDtaOGVBTAWztNw== -webpack-bundle-analyzer@^4.4.0: +webpack-bundle-analyzer@^4.4.2: version "4.4.2" resolved "https://registry.yarnpkg.com/webpack-bundle-analyzer/-/webpack-bundle-analyzer-4.4.2.tgz#39898cf6200178240910d629705f0f3493f7d666" integrity sha512-PIagMYhlEzFfhMYOzs5gFT55DkUdkyrJi/SxJp8EF3YMWhS+T9vvs2EoTetpk5qb6VsCq02eXTlRDOydRhDFAQ== @@ -8541,7 +8592,7 @@ webpack-log@^2.0.0: ansi-colors "^3.0.0" uuid "^3.3.2" -webpack-merge@^5.7.3: +webpack-merge@^5.8.0: version "5.8.0" resolved "https://registry.yarnpkg.com/webpack-merge/-/webpack-merge-5.8.0.tgz#2b39dbf22af87776ad744c390223731d30a68f61" integrity sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q== @@ -8557,29 +8608,26 @@ webpack-sources@^1.1.0, webpack-sources@^1.4.3: source-list-map "^2.0.0" source-map "~0.6.1" -webpack-sources@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-2.3.0.tgz#9ed2de69b25143a4c18847586ad9eccb19278cfa" - integrity sha512-WyOdtwSvOML1kbgtXbTDnEW0jkJ7hZr/bDByIwszhWd/4XX1A3XMkrbFMsuH4+/MfLlZCUzlAdg4r7jaGKEIgQ== - dependencies: - source-list-map "^2.0.1" - source-map "^0.6.1" +webpack-sources@^3.1.1: + version "3.2.0" + resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-3.2.0.tgz#b16973bcf844ebcdb3afde32eda1c04d0b90f89d" + integrity sha512-fahN08Et7P9trej8xz/Z7eRu8ltyiygEo/hnRi9KqBUs80KeDcnf96ZJo++ewWd84fEf3xSX9bp4ZS9hbw0OBw== -webpack@^5.28.0: - version "5.39.0" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.39.0.tgz#37d6899f1f40c31d5901abc0f39bc8cc7224138c" - integrity sha512-25CHmuDj+oOTyteI13sUqNlCnjCnySuhiKWE/cRYPQYeoQ3ijHgyWX27CiyUKLNGq27v8S0mrksyTreT/xo7pg== +webpack@^5.40.0: + version "5.47.1" + resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.47.1.tgz#20fb7d76f68912a2249a6dd7ff16faa178049ad2" + integrity sha512-cW+Mzy9SCDapFV4OrkHuP6EFV2mAsiQd+gOa3PKtHNoKg6qPqQXZzBlHH+CnQG1osplBCqwsJZ8CfGO6XWah0g== dependencies: "@types/eslint-scope" "^3.7.0" - "@types/estree" "^0.0.47" - "@webassemblyjs/ast" "1.11.0" - "@webassemblyjs/wasm-edit" "1.11.0" - "@webassemblyjs/wasm-parser" "1.11.0" - acorn "^8.2.1" + "@types/estree" "^0.0.50" + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/wasm-edit" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + acorn "^8.4.1" browserslist "^4.14.5" chrome-trace-event "^1.0.2" enhanced-resolve "^5.8.0" - es-module-lexer "^0.4.0" + es-module-lexer "^0.7.1" eslint-scope "5.1.1" events "^3.2.0" glob-to-regexp "^0.4.1" @@ -8588,11 +8636,11 @@ webpack@^5.28.0: loader-runner "^4.2.0" mime-types "^2.1.27" neo-async "^2.6.2" - schema-utils "^3.0.0" + schema-utils "^3.1.0" tapable "^2.1.1" - terser-webpack-plugin "^5.1.1" + terser-webpack-plugin "^5.1.3" watchpack "^2.2.0" - webpack-sources "^2.3.0" + webpack-sources "^3.1.1" webpackbar@^5.0.0-3: version "5.0.0-3" @@ -8743,7 +8791,7 @@ yallist@^4.0.0: resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== -yaml@^1.10.0: +yaml@^1.10.0, yaml@^1.10.2: version "1.10.2" resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== From 15949adbe4972e27bc75fe310e37606214b20da9 Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Mon, 2 Aug 2021 18:51:44 +0200 Subject: [PATCH 02/32] Documenting error and secure utils. --- .../es/weso/rdfshape/server/Server.scala | 26 +++++++------- .../server/api/WikibaseSchemaParam.scala | 9 +++-- .../server/utils/error/ExitCodes.scala | 25 ++++++++++++++ .../server/utils/error/SysUtils.scala | 24 ++----------- .../SSLContextCreationException.scala | 15 +++++++- .../server/utils/secure/SSLHelper.scala | 34 +++++++++++++++++-- .../es/weso/rdfshape/cli/CliManager.scala | 26 +++++++------- .../weso/rdfshape/logging/LoggingLevels.scala | 13 +++++++ .../rdfshape/logging/LoggingManager.scala | 19 +++-------- 9 files changed, 124 insertions(+), 67 deletions(-) create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/ExitCodes.scala create mode 100644 src/main/scala/es/weso/rdfshape/logging/LoggingLevels.scala diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala index 858b6779..fd3d743c 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala @@ -5,8 +5,8 @@ import cats.implicits._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdfshape.server.Server._ import es.weso.rdfshape.server.api._ -import es.weso.rdfshape.server.utils.error.SysUtils import es.weso.rdfshape.server.utils.error.exceptions.SSLContextCreationException +import es.weso.rdfshape.server.utils.error.{ExitCodes, SysUtils} import es.weso.rdfshape.server.utils.secure.SSLHelper import fs2.Stream import org.http4s.client.Client @@ -53,9 +53,9 @@ private class Server( } /** Create an instance of a secure SSLContext for the application. - * @return None, if no HTTPS is required; an SSLContext if HTTPS is required and the context could be created + * @return None if no HTTPS is required; an SSLContext if HTTPS is required and the context could be created * @see {@link es.weso.rdfshape.server.utils.secure.SSLHelper} - * @note If an error occurs creating the SSLContext, program termination will be ordered + * @note If an error occurs creating the SSLContext, program termination will occur */ private def getSslContext: Option[SSLContext] = { if(!https) return None @@ -65,7 +65,7 @@ private class Server( case Failure(exception) => val e = SSLContextCreationException(exception.getMessage, exception) SysUtils.fatalError( - SysUtils.sslContextCreationError, + ExitCodes.SSL_CONTEXT_CREATION_ERROR, e.getMessage ) None @@ -88,15 +88,6 @@ private class Server( } yield exitCode }.drain - /** Create an http4s application object - * @param client Http4s' client in charge of the application - * @return Http4s' application with the given client and a request-logging middleware - */ - private def createApp(client: Client[IO]): HttpApp[IO] = { - val app = routesService(client).orNotFound - Logger.httpApp(logHeaders = true, logBody = false)(app) - } - /** Create the final http4s server * @param client Http4s' client in charge of the application * @param sslContext SSLContext used by the application @@ -118,6 +109,15 @@ private class Server( case Some(context) => baseServer.withSslContext(context) } } + + /** Create an http4s application object + * @param client Http4s' client in charge of the application + * @return Http4s' application with the given client and a request-logging middleware + */ + private def createApp(client: Client[IO]): HttpApp[IO] = { + val app = routesService(client).orNotFound + Logger.httpApp(logHeaders = true, logBody = false)(app) + } } /** Static utilities to aid when creating the Server, as when as for managing Server creation diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/WikibaseSchemaParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/WikibaseSchemaParam.scala index 4ef335ea..19f06fe8 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/WikibaseSchemaParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/WikibaseSchemaParam.scala @@ -1,17 +1,18 @@ package es.weso.rdfshape.server.api + import cats.effect._ import es.weso.rdf.RDFReasoner import es.weso.rdfshape.server.wikibase._ import es.weso.schema.{Schema, Schemas} import org.http4s.client._ import org.http4s.dsl.io._ -import org.http4s.{Uri, _} +import org.http4s._ case class WikibaseSchemaParam( maybeSchemaParam: Option[SchemaParam], maybeEntitySchema: Option[String], schemaStr: Option[String], - wikibase: Wikibase = Wikidata + wikidata: Wikibase = Wikidata ) { def getSchema( @@ -34,7 +35,7 @@ case class WikibaseSchemaParam( es: String, client: Client[IO] ): IO[(Option[String], Either[String, Schema])] = { - val uriSchema = wikibase.schemaEntityUri(es) + val uriSchema = wikidata.schemaEntityUri(es) val r: IO[(Schema, String)] = for { strSchema <- deref(uriSchema, client) schema <- Schemas.fromString(strSchema, "ShEXC", "ShEx") @@ -104,8 +105,10 @@ object WikibaseSchemaParam { // TODO: Move this code to es.weso.utils.IOUtils private def ok_f[A](v: A): IO[A] = IO.pure(v) + private def err_f[A](err: String): IO[A] = IO.raiseError[A](new RuntimeException(err)) + private def either2f[A](e: Either[String, A]): IO[A] = e.fold(s => IO.raiseError(new RuntimeException(s)), IO.pure) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/ExitCodes.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/ExitCodes.scala new file mode 100644 index 00000000..bcaa054d --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/ExitCodes.scala @@ -0,0 +1,25 @@ +package es.weso.rdfshape.server.utils.error + +/** Enum classifying the accepted exit codes by their Int code. + */ +case object ExitCodes { + type ExitCodes = Int + + /** Exit code on successful program execution */ + val SUCCESS = 0 + // CLI ERRORS + /** Exit code on CLI argument parsing error + */ + val ARGUMENTS_PARSE_ERROR = 101 + + /** Exit code on invalid CLI arguments + */ + val ARGUMENTS_INVALID_ERROR = 102 + + // Server startup errors + /** Exit code on runtime error when trying to build and SSL Context for the app + * + * @see {@link es.weso.rdfshape.server.utils.secure.SSLHelper} + */ + val SSL_CONTEXT_CREATION_ERROR = 201 +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/SysUtils.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/SysUtils.scala index b0d9253c..c0f764cb 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/SysUtils.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/SysUtils.scala @@ -7,27 +7,9 @@ import com.typesafe.scalalogging.LazyLogging */ object SysUtils extends LazyLogging { - /** Exit code on successful program execution - */ - val successCode = 0 - - // CLI errors - /** Exit code on CLI argument parsing error - */ - val parseArgumentsError = 101 - - /** Exit code on invalid CLI arguments - */ - val invalidArgumentsError = 102 - - // Server startup errors - /** Exit code on runtime error when trying to build and SSL Context for the app - * @see {@link es.weso.rdfshape.server.utils.secure.SSLHelper} - */ - val sslContextCreationError = 201 - - /** Terminates the program with a given error code after printing a given message in standard error - * @param code Exit code of the program + /** Terminates the program with a given error code after logging a given error message + * + * @param code Exit code of the program * @param message Message to be printed before exiting */ def fatalError(code: Int, message: String): Unit = { diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/exceptions/SSLContextCreationException.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/exceptions/SSLContextCreationException.scala index 279a229f..18198b77 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/exceptions/SSLContextCreationException.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/exceptions/SSLContextCreationException.scala @@ -1,14 +1,28 @@ package es.weso.rdfshape.server.utils.error.exceptions +/** Custom exception thrown when a failure occurs when trying to create an SSL Context from user's environment data + * + * @param message Reason/explanation of why the exception occurred + * @param cause Nested exception that caused the SSL Context creation to fail + */ final case class SSLContextCreationException( private val message: String, private val cause: Throwable ) extends Exception(message, cause) {} object SSLContextCreationException { + + /** Fixed message preceding the exception message + */ private val prefix = "Could not create an SSL context with the specified configuration: " + /** Factory method used for instantiating {@linkplain es.weso.rdfshape.server.utils.error.exceptions.SSLContextCreationException} + * + * @param message Message of the new exception + * @param cause Cause of the new exception + * @return A new {@linkplain es.weso.rdfshape.server.utils.error.exceptions.SSLContextCreationException SSLContextCreationException} with the given data. + */ def apply( message: String, cause: Throwable = None.orNull @@ -17,6 +31,5 @@ object SSLContextCreationException { case cause: SSLContextCreationException => cause case _ => new SSLContextCreationException(s"$prefix$message", cause) } -// new SSLContextCreationException(s"$prefix$message", cause) } } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/secure/SSLHelper.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/secure/SSLHelper.scala index dead1678..a7933efc 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/secure/SSLHelper.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/secure/SSLHelper.scala @@ -2,17 +2,45 @@ package es.weso.rdfshape.server.utils.secure import es.weso.rdfshape.server.utils.error.exceptions.SSLContextCreationException -import java.io.FileInputStream +import java.io.{FileInputStream, IOException} import java.nio.file.Paths import java.security.{KeyStore, SecureRandom} import javax.net.ssl.{KeyManagerFactory, SSLContext, TrustManagerFactory} +/** Static utilities for creating SSL Contexts to serve the API via HTTPS. + * Pre-requisites: + * - A valid certificate is expected to be found in a keystore. + * - Some environment variables need to be set beforehand: + * - KEYSTORE_PATH: location of the keystore storing the certificate. + * - KEYSTORE_PASSWORD: password protecting the keystore (leave empty if there is none). + * - KEYMANAGER_PASSWORD: password protecting the certificate (leave empty is there is none). + * + * @note The inner functionality needs to be able read the host's environment and filesystem + * @note Further docs, see https://github.com/weso/rdfshape-api/wiki/Deploying-RDFShape-API-(SBT)#serving-with-https + * @see {@link es.weso.rdfshape.server.Server} + */ object SSLHelper { + + /** Password protecting the keystore, extracted from the host's environment. + */ lazy val keyStorePassword: Option[String] = sys.env.get("KEYSTORE_PASSWORD") + + /** Password protecting the certificate, extracted from the host's environment. + */ lazy val keyManagerPassword: Option[String] = sys.env.get("KEYMANAGER_PASSWORD") + + /** Location of the keystore storing the certificate, extracted from the host's environment. + */ lazy val keyStorePath: Option[String] = sys.env.get("KEYSTORE_PATH") + /** Try to build an SSL Context given that the certificate's location and credentials are in the PATH. + * @throws es.weso.rdfshape.server.utils.error.exceptions.SSLContextCreationException On errors getting the certificate information + * @throws java.io.IOException On errors accessing the filesystem + * @return An SSLContext created from the user's certificate + */ + @throws(classOf[SSLContextCreationException]) + @throws(classOf[IOException]) def getContext: SSLContext = { if( @@ -20,7 +48,9 @@ object SSLHelper { keyManagerPassword.isEmpty || keyStorePath.isEmpty ) { - throw SSLContextCreationException("Some environment variables are missing.") + throw SSLContextCreationException( + "Some environment variables are missing." + ) } val keyStore = loadKeystore(keyStorePassword.get) diff --git a/src/main/scala/es/weso/rdfshape/cli/CliManager.scala b/src/main/scala/es/weso/rdfshape/cli/CliManager.scala index f9e56075..4badccea 100644 --- a/src/main/scala/es/weso/rdfshape/cli/CliManager.scala +++ b/src/main/scala/es/weso/rdfshape/cli/CliManager.scala @@ -6,12 +6,13 @@ import es.weso.rdfshape.cli.CliManager.{ versionText } import es.weso.rdfshape.server.Server -import es.weso.rdfshape.server.utils.error.SysUtils +import es.weso.rdfshape.server.utils.error.{ExitCodes, SysUtils} import org.rogach.scallop._ import org.rogach.scallop.exceptions.{Help, ValidationFailure, Version} /** Class in charge of parsing the arguments provided via command-line when executing RDFShape. * Parsed data is later used to instantiate the API server according to the user's needs. + * * @param arguments Array of arguments passed to the executable * @see es.weso.rdfshape.Main */ @@ -91,17 +92,17 @@ class CliManager(arguments: Array[String]) extends ScallopConf(arguments) { // On "help", show help menu and exit case Help("") => printHelp() - sys.exit(SysUtils.successCode) + sys.exit(ExitCodes.SUCCESS) // On "version", show version and exit case Version => println(versionText) - sys.exit(SysUtils.successCode) + sys.exit(ExitCodes.SUCCESS) // On args validation failure: exit, printing the specific error message case _: ValidationFailure => SysUtils.fatalError( - SysUtils.invalidArgumentsError, + ExitCodes.ARGUMENTS_INVALID_ERROR, s""" |Invalid argument provided: ${e.getMessage} |$useHelpText @@ -110,11 +111,11 @@ class CliManager(arguments: Array[String]) extends ScallopConf(arguments) { // On other CLI failure: exit, printing the specific error message case _ => SysUtils.fatalError( - SysUtils.parseArgumentsError, + ExitCodes.ARGUMENTS_PARSE_ERROR, s""" - |Could not parse arguments: ${e.getMessage} - |$useHelpText - |""".stripMargin + |Could not parse arguments: ${e.getMessage} + |$useHelpText + |""".stripMargin ) } @@ -124,19 +125,20 @@ class CliManager(arguments: Array[String]) extends ScallopConf(arguments) { } /** Provide static members used by the CLI interface. + * * @see {@link es.weso.rdfshape.cli.CliManager} */ object CliManager { - /** Text message shown when suggesting the user to check the help menu - */ - private val useHelpText = s"""Use "--help" for usage information""" - /** Text message shown when showing the user the program version */ private lazy val formattedVersion: String = s"${buildinfo.BuildInfo.name} ${buildinfo.BuildInfo.version} by WESO Research Group (https://www.weso.es/)" + /** Text message shown when suggesting the user to check the help menu + */ + private val useHelpText = s"""Use "--help" for usage information""" + /** Simplified version text */ private val versionText = s"Version: $formattedVersion" diff --git a/src/main/scala/es/weso/rdfshape/logging/LoggingLevels.scala b/src/main/scala/es/weso/rdfshape/logging/LoggingLevels.scala new file mode 100644 index 00000000..02732b63 --- /dev/null +++ b/src/main/scala/es/weso/rdfshape/logging/LoggingLevels.scala @@ -0,0 +1,13 @@ +package es.weso.rdfshape.logging + +/** Enumeration classifying the accepted logging levels by their String representation. + */ +case object LoggingLevels { + type LoggingLevels = String + + val ERROR = "ERROR" + val WARN = "WARN" + val INFO = "INFO" + val DEBUG = "DEBUG" + val TRACE = "TRACE" +} diff --git a/src/main/scala/es/weso/rdfshape/logging/LoggingManager.scala b/src/main/scala/es/weso/rdfshape/logging/LoggingManager.scala index d5eae5a0..26951e7b 100644 --- a/src/main/scala/es/weso/rdfshape/logging/LoggingManager.scala +++ b/src/main/scala/es/weso/rdfshape/logging/LoggingManager.scala @@ -57,21 +57,10 @@ object LoggingManager { */ def mapVerbosityValueToLogLevel(verbosity: Int): String = { verbosity match { - case 0 => LoggingLevel.ERROR // No verbose argument. Show errors. - case 1 => LoggingLevel.WARN // -v. Show warnings. - case 2 => LoggingLevel.INFO // -vv. Show info. - case _ => LoggingLevel.DEBUG // -vvv and forth. Show debug information. + case 0 => LoggingLevels.ERROR // No verbose argument. Show errors. + case 1 => LoggingLevels.WARN // -v. Show warnings. + case 2 => LoggingLevels.INFO // -vv. Show info. + case _ => LoggingLevels.DEBUG // -vvv and forth. Show debug information. } } } - -/** Enum classifying the accepted logging levels by their String representation. - */ -object LoggingLevel extends Enumeration { - type LoggingLevel = String - val ERROR = "ERROR" - val WARN = "WARN" - val INFO = "INFO" - val DEBUG = "DEBUG" - val TRACE = "TRACE" -} From b34aa26b08f9d23d4d4e230e605d20ad07c2d1e4 Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Tue, 3 Aug 2021 13:23:43 +0200 Subject: [PATCH 03/32] Documented JSON utils. --- .../server/utils/json/JsonUtilsServer.scala | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtilsServer.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtilsServer.scala index 1f2a3c0d..a3e5cb62 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtilsServer.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtilsServer.scala @@ -2,7 +2,19 @@ package es.weso.rdfshape.server.utils.json import io.circe.Json +/** Helper utilities to extract JSON from the complex data managed by the API. + */ object JsonUtilsServer { + + /** Converts some object to JSON, given a converter function. + * + * @param data Data to be converted to JSON + * @param name Name given to the data + * @param cnv Converter function from A to Json + * @tparam A Type of the data to be converted to JSON + * @return A list with containing a single tuple: the name given to the data and the JSON representation of "A" itself. + * The list will be empty if no data is provided for conversion. + */ def maybeField[A]( data: Option[A], name: String, @@ -13,6 +25,15 @@ object JsonUtilsServer { case Some(v) => List((name, cnv(v))) } + /** Converts some object to JSON, given a converter function. + * + * @param data Data to be converted to JSON + * @param name Name given to the data + * @param cnv Converter function from A to Json + * @tparam A Type of the data to be converted to JSON + * @return A list with containing a single tuple: the name given to the data and the JSON representation of "A" itself. + * In case no data is provided, the list will contain: the name given to the data and the message given instead of the data. + */ def eitherField[A]( data: Either[String, A], name: String, From 04fdc19cee315360194bc0f1591dcf25e3d84618 Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Thu, 12 Aug 2021 17:00:22 +0200 Subject: [PATCH 04/32] Refactored HTML2RDF module. --- .../weso/rdfshape/server/api/DataParam.scala | 193 ++++++++++-------- .../rdfshape/server/html2rdf/HTML2RDF.scala | 145 ++++++++----- .../server/html2rdf/RdfSourceTypes.scala | 8 + .../server/html2rdf/HTML2RDFTest.scala | 90 ++++---- 4 files changed, 247 insertions(+), 189 deletions(-) create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/html2rdf/RdfSourceTypes.scala diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataParam.scala index 99717523..41d5a0d9 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataParam.scala @@ -29,38 +29,6 @@ case class DataParam( compoundData: Option[String] ) extends LazyLogging { - sealed abstract class DataInputType { - val id: String - } - case object dataUrlType extends DataInputType { - override val id = "#dataUrl" - } - case object dataFileType extends DataInputType { - override val id = "#dataFile" - } - case object dataEndpointType extends DataInputType { - override val id = "#dataEndpoint" - } - case object dataTextAreaType extends DataInputType { - override val id = "#dataTextArea" - } - case object compoundDataType extends DataInputType { - override val id = "#compoundData" - } - - def parseDataTab(tab: String): Either[String, DataInputType] = { - logger.debug(s"parseDataTab: tab = $tab") - val inputTypes = - List(dataUrlType, dataFileType, dataEndpointType, dataTextAreaType) - inputTypes.find(_.id == tab) match { - case Some(x) => Right(x) - case None => - Left( - s"Wrong value of tab: $tab, must be one of [${inputTypes.map(_.id).mkString(",")}]" - ) - } - } - val dataFormat: Option[DataFormat] = { val dataTab = parseDataTab(activeDataTab.getOrElse(defaultActiveDataTab)) logger.debug(s"Data tab received: $dataTab") @@ -73,35 +41,8 @@ case class DataParam( } } - private def applyInference( - rdf: Resource[IO, RDFReasoner], - inference: Option[String], - dataFormat: Format - ): Resource[IO, RDFReasoner] = - extendWithInference(rdf, inference) - - private def extendWithInference( - resourceRdf: Resource[IO, RDFReasoner], - optInference: Option[String] - ): Resource[IO, RDFReasoner] = { - logger.debug(s"Applying inference $optInference") - optInference match { - case None => resourceRdf - case Some(str) => - InferenceEngine.fromString(str) match { - case Right(engine) => - resourceRdf.evalMap(rdf => rdf.applyInference(engine)) - case Left(err) => - // TODO: Check how to invoke using Resource.raiseError... - throw new RuntimeException( - s"Error parsing inference engine param ($str): $err" - ) - } - - } - } - /** get RDF data from data parameters + * * @return a pair where the first value can be Some(string) * if it has string representation and the second parameter * is the resource with the RDF data @@ -209,6 +150,19 @@ case class DataParam( x } + def parseDataTab(tab: String): Either[String, DataInputType] = { + logger.debug(s"parseDataTab: tab = $tab") + val inputTypes = + List(dataUrlType, dataFileType, dataEndpointType, dataTextAreaType) + inputTypes.find(_.id == tab) match { + case Some(x) => Right(x) + case None => + Left( + s"Wrong value of tab: $tab, must be one of [${inputTypes.map(_.id).mkString(",")}]" + ) + } + } + private def showFinalize: IO[Unit] = IO { logger.debug("Closing RDF data") } @@ -220,8 +174,10 @@ case class DataParam( ): IO[Resource[IO, RDFReasoner]] = { logger.debug(s"RDF from string with format: $format") format.name match { - case f if HTML2RDF.availableExtractorNames contains f => - IO(HTML2RDF.extractFromString(str, f)) /*for { + case formatName if HTML2RDF.availableExtractorNames contains formatName => + IO( + HTML2RDF.extractFromString(str, formatName) + ) /*for { eitherRdf <- } yield eitherRdf */ case _ => @@ -238,8 +194,13 @@ case class DataParam( base: Option[String] ): IO[Resource[IO, RDFReasoner]] = { format.name.toLowerCase match { - case f if HTML2RDF.availableExtractorNames contains f => - IO(HTML2RDF.extractFromUrl(uri.toString, f)) + case formatName if HTML2RDF.availableExtractorNames contains formatName => + IO( + HTML2RDF.extractFromUrl( + uri.toString, + formatName + ) + ) case _ => for { baseIri <- mkBase(base) @@ -248,13 +209,6 @@ case class DataParam( } } - private def mkBaseIri( - maybeBase: Option[String] - ): Either[String, Option[IRI]] = maybeBase match { - case None => Right(None) - case Some(str) => IRI.fromString(str).map(Some(_)) - } - private def mkBase(base: Option[String]): IO[Option[IRI]] = base match { case None => IO(None) case Some(str) => @@ -266,6 +220,65 @@ case class DataParam( ) } + private def applyInference( + rdf: Resource[IO, RDFReasoner], + inference: Option[String], + dataFormat: Format + ): Resource[IO, RDFReasoner] = + extendWithInference(rdf, inference) + + private def extendWithInference( + resourceRdf: Resource[IO, RDFReasoner], + optInference: Option[String] + ): Resource[IO, RDFReasoner] = { + logger.debug(s"Applying inference $optInference") + optInference match { + case None => resourceRdf + case Some(str) => + InferenceEngine.fromString(str) match { + case Right(engine) => + resourceRdf.evalMap(rdf => rdf.applyInference(engine)) + case Left(err) => + // TODO: Check how to invoke using Resource.raiseError... + throw new RuntimeException( + s"Error parsing inference engine param ($str): $err" + ) + } + + } + } + + private def mkBaseIri( + maybeBase: Option[String] + ): Either[String, Option[IRI]] = maybeBase match { + case None => Right(None) + case Some(str) => IRI.fromString(str).map(Some(_)) + } + + sealed abstract class DataInputType { + val id: String + } + + case object dataUrlType extends DataInputType { + override val id = "#dataUrl" + } + + case object dataFileType extends DataInputType { + override val id = "#dataFile" + } + + case object dataEndpointType extends DataInputType { + override val id = "#dataEndpoint" + } + + case object dataTextAreaType extends DataInputType { + override val id = "#dataTextArea" + } + + case object compoundDataType extends DataInputType { + override val id = "#compoundData" + } + } object DataParam extends LazyLogging { @@ -285,25 +298,6 @@ object DataParam extends LazyLogging { r } - private def getDataFormat( - name: String, - partsMap: PartsMap - ): IO[Option[DataFormat]] = for { - maybeStr <- partsMap.optPartValue(name) - } yield maybeStr match { - case None => None - case Some(str) => - DataFormat - .fromString(str) - .fold( - err => { - logger.warn(s"Unsupported dataFormat for $name: $str") - None - }, - df => Some(df) - ) - } - private[api] def mkDataParam(partsMap: PartsMap): IO[DataParam] = for { data <- partsMap.optPartValue("data") compoundData <- partsMap.optPartValue("compoundData") @@ -360,6 +354,25 @@ object DataParam extends LazyLogging { dp } + private def getDataFormat( + name: String, + partsMap: PartsMap + ): IO[Option[DataFormat]] = for { + maybeStr <- partsMap.optPartValue(name) + } yield maybeStr match { + case None => None + case Some(str) => + DataFormat + .fromString(str) + .fold( + err => { + logger.warn(s"Unsupported dataFormat for $name: $str") + None + }, + df => Some(df) + ) + } + private[api] def empty: DataParam = DataParam( None, diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/html2rdf/HTML2RDF.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/html2rdf/HTML2RDF.scala index 6d695010..e9992c72 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/html2rdf/HTML2RDF.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/html2rdf/HTML2RDF.scala @@ -1,9 +1,9 @@ package es.weso.rdfshape.server.html2rdf + import cats.effect.{Resource => CatsResource, _} import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.RDFReasoner import es.weso.rdf.jena.RDFAsJenaModel -import es.weso.rdf.nodes.IRI import es.weso.utils.IOUtils._ import org.apache.any23.Any23 import org.apache.any23.extractor._ @@ -28,51 +28,106 @@ import org.eclipse.rdf4j.model.{ import scala.util.Try +/** Utilities for extracting RDF models from different sources + */ object HTML2RDF extends LazyLogging { + /** List of all available RDF data extractors + */ val availableExtractors = List(RDFA11, Microdata) + /** List of the names of all available RDF data extractors + */ val availableExtractorNames: List[String] = availableExtractors.map(_.name) + /** For a given HTML string, extract the inner RDF data model + * + * @param htmlStr HTML string + * @param extractorName Name of the extractor to be used + * @return RDF Reasoner allowing operations on the extracted RDF data + */ def extractFromString( htmlStr: String, extractorName: String + ): CatsResource[IO, RDFReasoner] = { + extractFromSource(RdfSourceTypes.STRING, htmlStr, extractorName) + } + + /** For a given URI, extract its content's inner RDF data model + * + * @param uri URI containing the RDF data + * @param extractorName Name of the extractor to be used + * @return RDF Reasoner allowing operations on the extracted RDF data + */ + def extractFromUrl( + uri: String, + extractorName: String + ): CatsResource[IO, RDFReasoner] = { + extractFromSource(RdfSourceTypes.URI, uri, extractorName) + } + + /** @param sourceType Origin of the RDF data, used to perform different extraction operations + * @param rdfData String with the RDF data or the location of it + * @param extractorName Name of the extractor to be used + * @return RDF Reasoner allowing operations on the extracted RDF data + */ + private def extractFromSource( + sourceType: RdfSourceTypes.Value, + rdfData: String, + extractorName: String ): CatsResource[IO, RDFReasoner] = { Try { + logger.debug( + s"Extracting RDF from ${sourceType.toString} with extractor $extractorName" + ) + + // Common code to all RDF extractions val model = ModelFactory.createDefaultModel() val any23 = new Any23(extractorName) any23.setHTTPUserAgent("test-user-agent") - val httpClient = any23.getHTTPClient - val source = new StringDocumentSource(htmlStr, "http://example.org/") - val handler = JenaTripleHandler(model) + val handler = JenaTripleHandler(model) - logger.debug("Initialization ready for extractor...") + // Check the RDF source and get the data accordingly + val source = sourceType match { + case RdfSourceTypes.STRING => + new StringDocumentSource(rdfData, "http://example.org/") + case RdfSourceTypes.URI => + val httpClient = any23.getHTTPClient + new HTTPDocumentSource(httpClient, rdfData) + } + // Extract RDF from data try { any23.extract(source, handler) } finally { handler.close() } + // Return RDF model model }.fold( - e => + // Error handling + e => { + val errorMsg = + s"Error obtaining RDF from HTML string: ${e.getMessage}\nHTML String: $rdfData\nExtractor name: $extractorName" + logger.error(errorMsg) CatsResource.eval( - err( - s"Error obtaining RDF from HTML string: ${e.getMessage}\nHTML String: $htmlStr\nExtractor name: $extractorName" - ) - ), + err(errorMsg) + ) + }, model => { - fromModel(model, None) - } // Ref.of[IO,Model](model).flatMap(ref => ) + fromModel(model) + } ) } - private def fromModel( - model: Model, - uri: Option[IRI] - ): CatsResource[IO, RDFAsJenaModel] = { + /** Get an RDF model object from a general Jena model + * + * @param model Input RDF model + * @return RDF model + */ + private def fromModel(model: Model): CatsResource[IO, RDFAsJenaModel] = { CatsResource.make( Ref .of[IO, Model](model) @@ -80,39 +135,17 @@ object HTML2RDF extends LazyLogging { )(m => m.getModel.flatMap(m => IO(m.close()))) } - def extractFromUrl( - uri: String, - extractorName: String - ): CatsResource[IO, RDFReasoner] = { - Try { - val model = ModelFactory.createDefaultModel() - val any23 = new Any23(extractorName) - any23.setHTTPUserAgent("test-user-agent") - val httpClient = any23.getHTTPClient - val source = new HTTPDocumentSource(httpClient, uri) - // val out = new ByteArrayOutputStream() - val handler = JenaTripleHandler(model) - try { - any23.extract(source, handler) - } finally { - handler.close() - } - // val n3: String = out.toString("UTF-8") - model - }.fold( - e => - CatsResource.eval( - err(s"Exception obtaining RDF from URI: ${e.getMessage}\nURI:\n$uri") - ), - model => fromModel(model, Some(IRI(uri))) - ) - } - + /** Interface comprising all RDF data extractors + */ sealed trait Extractor { val name: String } - case class JenaTripleHandler(m: Model) extends TripleHandler { + /** RDF triple handler based on Apache Jena + * + * @param model Base model + */ + case class JenaTripleHandler(model: Model) extends TripleHandler { override def receiveTriple( s: Resource, @@ -121,7 +154,7 @@ object HTML2RDF extends LazyLogging { g: RDF4jIRI, context: ExtractionContext ): Unit = { - m.add(cnvSubj(s), cnvIRI(p), cnvObj(o)) + model.add(cnvSubj(s), cnvIRI(p), cnvObj(o)) logger.debug(s"Triple: <$s,$p,$o>") } @@ -131,22 +164,22 @@ object HTML2RDF extends LazyLogging { case b: BNode => cnvBNode(b) } - def cnvIRI(p: RDF4jIRI): JenaProperty = - m.createProperty(p.toString) - - def cnvBNode(b: BNode): JenaResource = - m.createResource(AnonId.create(b.getID)) - def cnvObj(o: Value): JenaRDFNode = o match { case i: RDF4jIRI => cnvIRI(i) case b: BNode => cnvBNode(b) case l: Literal => if(l.getLanguage.isPresent) { - m.createLiteral(l.getLabel, l.getLanguage.get) + model.createLiteral(l.getLabel, l.getLanguage.get) } else - m.createTypedLiteral(l.getLabel, l.getDatatype.toString) + model.createTypedLiteral(l.getLabel, l.getDatatype.toString) } + def cnvBNode(b: BNode): JenaResource = + model.createResource(AnonId.create(b.getID)) + + def cnvIRI(p: RDF4jIRI): JenaProperty = + model.createProperty(p.toString) + override def startDocument( documentIRI: RDF4jIRI ): Unit = {} @@ -171,11 +204,15 @@ object HTML2RDF extends LazyLogging { } + /** RDFA11 extractor + */ case object RDFA11 extends Extractor { val extractor = new RDFa11Extractor() val name: String = extractor.getDescription.getExtractorName } + /** Microdata extractor + */ case object Microdata extends Extractor { val extractor = new MicrodataExtractor val name: String = extractor.getDescription.getExtractorName diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/html2rdf/RdfSourceTypes.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/html2rdf/RdfSourceTypes.scala new file mode 100644 index 00000000..544da75e --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/html2rdf/RdfSourceTypes.scala @@ -0,0 +1,8 @@ +package es.weso.rdfshape.server.html2rdf + +/** Enum listing the accepted sources from which RDF may be extracted. + */ +case object RdfSourceTypes extends Enumeration { + val STRING: RdfSourceTypes.Value = Value("String") + val URI: RdfSourceTypes.Value = Value("URI") +} diff --git a/modules/server/src/test/scala/es/weso/rdfshape/server/html2rdf/HTML2RDFTest.scala b/modules/server/src/test/scala/es/weso/rdfshape/server/html2rdf/HTML2RDFTest.scala index 8c446e6a..e66dc968 100644 --- a/modules/server/src/test/scala/es/weso/rdfshape/server/html2rdf/HTML2RDFTest.scala +++ b/modules/server/src/test/scala/es/weso/rdfshape/server/html2rdf/HTML2RDFTest.scala @@ -1,34 +1,32 @@ package es.weso.rdfshape.server.html2rdf -import es.weso.rdf.jena.RDFAsJenaModel -import es.weso.utils.IOUtils._ -import es.weso.rdf.RDFReader -import cats.implicits._ import cats.effect.IO +import cats.implicits._ +import es.weso.rdf.jena.RDFAsJenaModel import munit._ class HTML2RDFTest extends CatsEffectSuite { shouldExtract( """| - |
- |

Post created - | last saturday. - |

- |
- | + | vocab = "http://schema.org/" > + |
+ |

Post created + | last saturday. + |

+ |
+ | """.stripMargin, """|prefix rdfa: - |prefix schema: - |prefix xsd: - | - | rdfa:usesVocabulary schema: . - | - | a schema:Blog ; - | schema:created "2018-10-30"^^xsd:date . + |prefix schema: + |prefix xsd: + | + | rdfa:usesVocabulary schema: . + | + | a schema:Blog ; + | schema:created "2018-10-30"^^xsd:date . """.stripMargin, "html-rdfa11" ) @@ -50,32 +48,32 @@ class HTML2RDFTest extends CatsEffectSuite { shouldExtract( """|
- |
- |""".stripMargin, + | itemtype="https://vocab.example.net/book"> + | + |""".stripMargin, """|prefix md: - | - | md:item [ - | a - |] . + | + | md:item [ + | a + |] . """.stripMargin, "html-microdata" ) shouldExtract( """|
- | My name is Alice. - |
- | + | itemtype="http://schema.org/Person" + | itemid="http://person.info/alice" style="font-size:25pt;"> + | My name is Alice. + | + | """.stripMargin, """|prefix md: - |prefix person: - | md:item person:alice . - |person:alice a ; - | "Alice" . - |""".stripMargin, + |prefix person: + | md:item person:alice . + |person:alice a ; + | "Alice" . + |""".stripMargin, "html-microdata" ) @@ -94,12 +92,14 @@ class HTML2RDFTest extends CatsEffectSuite { s"Should extract from \n$html\n and obtain\n$expected\nExtractor $extractorName" ) { val r: IO[(Boolean, String, String)] = for { - res1 <- IO(HTML2RDF.extractFromString(html, extractorName)) + res1 <- IO( + HTML2RDF.extractFromString(html, extractorName) + ) res2 <- RDFAsJenaModel.fromChars(expected, "TURTLE") vv <- (res1, res2).tupled.use { case (rdf, expected) => for { - expectedStr <- expected.serialize("TURTLE") - rdfObtained <- rdf.serialize("TURTLE") + expectedStr <- expected.serialize("TURTLE") + rdfObtained <- rdf.serialize("TURTLE") isIsomorphic <- rdf.isIsomorphicWith(expected) } yield (isIsomorphic, rdfObtained, expectedStr) } @@ -111,11 +111,11 @@ class HTML2RDFTest extends CatsEffectSuite { assertEquals(ok, true) } else { fail(s"""Model extracted is not isomorphic with expected one: - |Model extracted - |${expected} - |Model expected - |${rdf} - |""".stripMargin) + |Model extracted + |${expected} + |Model expected + |${rdf} + |""".stripMargin) } }) } From 3ecfb56fe5259f9e3050a8fecaf40f200fc6ee76 Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Thu, 12 Aug 2021 19:26:36 +0200 Subject: [PATCH 05/32] Refactored Streams module. --- .../rdfshape/server/streams/Streams.scala | 203 ++++++++++++++---- 1 file changed, 164 insertions(+), 39 deletions(-) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/streams/Streams.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/streams/Streams.scala index 59ed9206..f6396174 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/streams/Streams.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/streams/Streams.scala @@ -1,4 +1,5 @@ package es.weso.rdfshape.server.streams + import cats.effect.IO import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.jena.SPARQLQueries.queryTriplesWithSubject @@ -8,55 +9,179 @@ import org.apache.jena.query.QueryExecutionFactory import org.apache.jena.rdf.model.ModelFactory import org.apache.jena.riot.system._ import org.apache.jena.riot.{Lang, RDFDataMgr} +import org.http4s.Uri import java.io.{OutputStream, StringWriter} +import java.nio.charset.Charset import java.nio.charset.StandardCharsets.UTF_8 -/* import org.apache.jena.riot.system.{StreamOps, StreamRDF, StreamRDFLib, - * StreamRDFWriter, StreamRDFWriterFactory} */ -import org.http4s.Uri +/** Utilities for working with RDF data and its extraction from remote source + */ object Streams extends LazyLogging { - def getRaw(uri: Uri): IO[String] = { - val stringWriter = new StringWriter - val os: OutputStream = new WriterOutputStream(stringWriter, UTF_8) - val destination: StreamRDF = - StreamRDFWriter.getWriterStream(os, Lang.TURTLE) - IO { - RDFDataMgr.parse(destination, uri.renderString) - stringWriter.toString - } + /** @param uri URI to read from + * @param lang Output RDF syntax (turtle, n-triples...) + * @return Raw RDF data from a remote URI in plain text using the specified syntax + */ + def getRdfRaw( + uri: Uri, + lang: Lang = Lang.TURTLE + ): IO[String] = { + + getRdf( + uri, + lang, + (stringWriter, _, rdfStream) => { + IO { + RDFDataMgr.parse(rdfStream, uri.renderString) + val raw = stringWriter.toString + logger.debug(s"Raw graph: $raw") + raw + } + } + ) + } - def getRawWithModel(uri: Uri): IO[String] = { - val stringWriter = new StringWriter - val os: OutputStream = new WriterOutputStream(stringWriter, UTF_8) - val destination: StreamRDF = - StreamRDFWriter.getWriterStream(os, Lang.NTRIPLES) - IO { - val model = ModelFactory.createDefaultModel - val modelGraph = model.getGraph - val streamGraph = StreamRDFLib.graph(modelGraph) - RDFDataMgr.parse(streamGraph, uri.renderString) - - logger.debug(s"Model graph: $model") - - StreamRDFOps.sendGraphToStream(modelGraph, destination) - stringWriter.toString + /** Generic function for private use. Given an RDF-extracting function, executes it while checking for errors and closing all resources used in the process. + * + * @param uri URI to read from + * @param lang Output RDF syntax (turtle, n-triples...) + * @param getRdfLogic Logic in charge of extracting RDF from sources + * @param encoding Encoding with which the data extracted is stored + * @return String representation of the RDF data extracted (in the specified language and encoding) + */ + private def getRdf( + uri: Uri, + lang: Lang, + getRdfLogic: (StringWriter, OutputStream, StreamRDF) => IO[String], + encoding: Charset = UTF_8 + ): IO[String] = { + + /* Get the necessary elements (writer, streams, etc.) to read the RDF data + * and store it in plain text if needed. */ + val streamsIOElements = StreamsIOElements(lang, encoding) + val (stringWriter, outputStream, rdfStream) = + StreamsIOElements.unapply(streamsIOElements) + + /* Extract the String representation of the URI and pick up the data from + * the initial StringWriter. + * DATA => StreamRDF => OutputStream => StringWriter */ + try { + getRdfLogic(stringWriter, outputStream, rdfStream) + } catch { + // Log errors before throwing + case e: Throwable => + logger.error(s"Error parsing RDF data from $uri: ${e.getMessage}") + throw e + } finally { + // Always close the output stream + outputStream.close() } + + } + + /** @param uri URI to read from + * @param lang Output RDF syntax (turtle, n-triples...) + * @return Graphed RDF data from a remote URI in plain text using the specified syntax + */ + def getRdfRawWithModel(uri: Uri, lang: Lang = Lang.NTRIPLES): IO[String] = { + getRdf( + uri, + lang, + (stringWriter, _, rdfStream) => { + IO { + val model = ModelFactory.createDefaultModel + val modelGraph = model.getGraph + val streamGraph = StreamRDFLib.graph(modelGraph) + RDFDataMgr.parse(streamGraph, uri.renderString) + + logger.debug(s"Model graph: $model") + + StreamRDFOps.sendGraphToStream(modelGraph, rdfStream) + stringWriter.toString + } + } + ) + + } + + /** @param endpoint URI to read from + * @param node Node to query the endpoint about + * @param lang Output RDF syntax (turtle, n-triples...) + * @return Outgoing node information in RDF from a remote endpoint in plain text with the specified syntax + */ + def getOutgoing( + endpoint: Uri, + node: String, + lang: Lang = Lang.TURTLE + ): IO[String] = + getRdf( + endpoint, + lang, + (stringWriter, _, rdfStream) => { + IO { + val query = QueryExecutionFactory + .sparqlService( + endpoint.renderString, + queryTriplesWithSubject(IRI(node)) + ) + .execConstruct() + + val graph = query.getGraph + logger.debug(s"Query graph: $graph") + + StreamRDFOps.sendGraphToStream(graph, rdfStream) + stringWriter.toString + + } + } + ) + +} + +/** Data class used as a factory for the repetitive task of instantiating + * the IO tools (StringWriters, OutputStreams, RDFStreams) used for RDF reading, parsing and storing + * + * @param stringWriter String buffer used to store RDF data in plain text + * @param outputStream OutputStream receiving RDF data and sending it to the writer + * @param streamRDF RDFStream used for reading RDF data and sending it to the OutputStream once formatted + */ +sealed case class StreamsIOElements( + stringWriter: StringWriter, + outputStream: OutputStream, + streamRDF: StreamRDF +) + +object StreamsIOElements { + + /** Factory method + * + * @param lang Syntax that the RDFStream with use to output RDF data + * @param encoding Encoding that the OutputStream will use to output data + * @return A new data object with IO utils + */ + def apply( + lang: Lang = Lang.TURTLE, + encoding: Charset = UTF_8 + ): StreamsIOElements = { + // Create basic StringWriter and attach it to an OutputStream + val stringWriter = new StringWriter + val outputStream: OutputStream = + new WriterOutputStream(stringWriter, encoding) + // Create an RDF StreamWriter outputting to the previous OutputStream + val rdfStream: StreamRDF = + StreamRDFWriter.getWriterStream(outputStream, lang) + + new StreamsIOElements(stringWriter, outputStream, rdfStream) } - def getOutgoing(endpoint: String, node: String): IO[String] = IO { - logger.debug(s"Outgoing: $node at $endpoint") - - val c = QueryExecutionFactory - .sparqlService(endpoint, queryTriplesWithSubject(IRI(node))) - .execConstruct() - val stringWriter = new StringWriter - val os: OutputStream = new WriterOutputStream(stringWriter, UTF_8) - val destination: StreamRDF = - StreamRDFWriter.getWriterStream(os, Lang.TURTLE) - StreamRDFOps.sendGraphToStream(c.getGraph, destination) - stringWriter.toString + /** @param it StreamsIOElements containing the IO utils to parse RDF + * @return A tuple will the IO Utils ready to be destructured in other parts of the code + */ + def unapply( + it: StreamsIOElements + ): (StringWriter, OutputStream, StreamRDF) = { + (it.stringWriter, it.outputStream, it.streamRDF) } } From 31c26497bd63609af2225062aec53ec5b99b24b6 Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Fri, 13 Aug 2021 17:19:03 +0200 Subject: [PATCH 06/32] Refactored API formats. --- .../weso/rdfshape/server/api/APIService.scala | 4 +- ...Definitions.scala => ApiDefinitions.scala} | 2 +- .../weso/rdfshape/server/api/DataParam.scala | 7 +- .../rdfshape/server/api/DataService.scala | 4 +- .../weso/rdfshape/server/api/Defaults.scala | 6 +- .../rdfshape/server/api/EndpointService.scala | 2 +- .../rdfshape/server/api/FetchService.scala | 2 +- .../server/api/PermalinkService.scala | 2 +- .../rdfshape/server/api/SchemaParam.scala | 8 +- .../rdfshape/server/api/SchemaService.scala | 6 +- .../rdfshape/server/api/ShExService.scala | 2 +- .../rdfshape/server/api/ShapeMapService.scala | 2 +- .../rdfshape/server/api/WikidataService.scala | 22 ++- .../server/api/format/DataFormat.scala | 157 ++++++------------ .../rdfshape/server/api/format/Format.scala | 59 ++++++- .../server/api/format/HtmlFormat.scala | 22 +++ .../server/api/format/RdfFormat.scala | 50 ++++++ .../server/api/format/SchemaFormat.scala | 73 ++++---- .../server/api/schema/SchemaFormat.scala | 108 ------------ 19 files changed, 254 insertions(+), 284 deletions(-) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{APIDefinitions.scala => ApiDefinitions.scala} (70%) create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/format/HtmlFormat.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/format/RdfFormat.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/schema/SchemaFormat.scala diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/APIService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/APIService.scala index 5767138b..bfcdb72f 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/APIService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/APIService.scala @@ -5,7 +5,7 @@ import cats.effect._ import cats.implicits._ import es.weso.rdf.jena.Endpoint import es.weso.rdf.nodes.IRI -import es.weso.rdfshape.server.api.APIDefinitions._ +import es.weso.rdfshape.server.api.ApiDefinitions._ import es.weso.rdfshape.server.api.QueryParams._ import es.weso.utils.IOUtils._ import io.circe._ @@ -24,7 +24,7 @@ class APIService(client: Client[IO]) extends Http4sDsl[IO] { case req @ GET -> Root / `api` / "health" => Ok("OK") - + case req @ GET -> Root / `api` / "endpoint" / "outgoing" :? OptEndpointParam(optEndpoint) +& OptNodeParam(optNode) +& diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/APIDefinitions.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/ApiDefinitions.scala similarity index 70% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/APIDefinitions.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/ApiDefinitions.scala index 0a713b6c..7e9b3122 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/APIDefinitions.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/ApiDefinitions.scala @@ -1,5 +1,5 @@ package es.weso.rdfshape.server.api -object APIDefinitions { +object ApiDefinitions { val api = "api" } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataParam.scala index 41d5a0d9..1e47f4b3 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataParam.scala @@ -80,10 +80,11 @@ case class DataParam( dataURL match { case None => err(s"Non value for dataURL") case Some(dataUrl) => - val dataFormat = dataFormatUrl.getOrElse(DataFormat.default) + val dataFormat = dataFormatUrl.getOrElse(DataFormat.defaultFormat) for { rdf <- rdfFromUri(new URI(dataUrl), dataFormat, base) } yield (None, rdf) + RDFFormat } case Right(`dataFileType`) => logger.debug(s"Input - dataFileType: $data") @@ -91,7 +92,7 @@ case class DataParam( case None => err(s"No value for dataFile") case Some(dataStr) => val dataFormat: Format = - dataFormatFile.getOrElse(DataFormat.default) + dataFormatFile.getOrElse(DataFormat.defaultFormat) /* io2es(RDFAsJenaModel.fromString(dataStr, dataFormat.name, * iriBase).use(rdf => for { iriBase <- mkBase(base) newRdf <- * extendWithInference(rdf, inference) eitherStr <- @@ -124,7 +125,7 @@ case class DataParam( case None => RDFAsJenaModel.empty.flatMap(e => IO((None, e))) case d @ Some(data) => val dataFormat = dataFormatTextarea.getOrElse( - dataFormatValue.getOrElse(DataFormat.default) + dataFormatValue.getOrElse(DataFormat.defaultFormat) ) val x: IO[(Option[String], Resource[IO, RDFReasoner])] = for { res <- rdfFromString(data, dataFormat, base) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataService.scala index 1886292c..7e3f0ff9 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataService.scala @@ -3,7 +3,7 @@ package es.weso.rdfshape.server.api import cats.effect._ import cats.implicits._ import com.typesafe.scalalogging.LazyLogging -import es.weso.rdfshape.server.api.APIDefinitions._ +import es.weso.rdfshape.server.api.ApiDefinitions._ import es.weso.rdfshape.server.api.ApiHelper._ import es.weso.rdfshape.server.api.Defaults.defaultDataFormat import es.weso.rdfshape.server.api.QueryParams._ @@ -39,7 +39,7 @@ class DataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { Ok(json) case GET -> Root / `api` / "data" / "formats" / "default" => - val dataFormat = DataFormat.default.name + val dataFormat = DataFormat.defaultFormat.name Ok(Json.fromString(dataFormat)) case GET -> Root / `api` / "data" / "inferenceEngines" => diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/Defaults.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/Defaults.scala index c1be1b8f..7fc9180b 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/Defaults.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/Defaults.scala @@ -2,15 +2,15 @@ package es.weso.rdfshape.server.api import es.weso.rdf.nodes.IRI import es.weso.rdfshape.server.api.format._ -import es.weso.schema.{Schemas, _} +import es.weso.schema._ import es.weso.shapemaps.ShapeMap object Defaults { val availableDataFormats: List[DataFormat] = DataFormat.availableFormats - val defaultDataFormat: DataFormat = DataFormat.default + val defaultDataFormat: DataFormat = DataFormat.defaultFormat val availableSchemaFormats: List[SchemaFormat] = SchemaFormat.availableFormats - val defaultSchemaFormat: SchemaFormat = SchemaFormat.default + val defaultSchemaFormat: SchemaFormat = SchemaFormat.defaultFormat val availableSchemaEngines: List[String] = Schemas.availableSchemaNames val defaultSchemaEngine: String = Schemas.defaultSchemaName val availableTriggerModes: List[String] = Schemas.availableTriggerModes diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/EndpointService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/EndpointService.scala index cb88112c..a0fcf255 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/EndpointService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/EndpointService.scala @@ -2,7 +2,7 @@ package es.weso.rdfshape.server.api import cats.data.EitherT import cats.effect._ import com.typesafe.scalalogging.LazyLogging -import es.weso.rdfshape.server.api.APIDefinitions._ +import es.weso.rdfshape.server.api.ApiDefinitions._ import es.weso.rdfshape.server.api.{Query => ServerQuery} import es.weso.utils.IOUtils._ import io.circe.Json diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/FetchService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/FetchService.scala index ea4d2c1c..c1fbc3a6 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/FetchService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/FetchService.scala @@ -1,7 +1,7 @@ package es.weso.rdfshape.server.api import cats.effect._ -import es.weso.rdfshape.server.api.APIDefinitions._ +import es.weso.rdfshape.server.api.ApiDefinitions._ import es.weso.rdfshape.server.api.QueryParams.UrlParam import org.http4s._ import org.http4s.client.Client diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/PermalinkService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/PermalinkService.scala index 06a3112c..9511dcad 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/PermalinkService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/PermalinkService.scala @@ -2,7 +2,7 @@ package es.weso.rdfshape.server.api import cats.effect._ import com.typesafe.scalalogging.LazyLogging -import es.weso.rdfshape.server.api.APIDefinitions._ +import es.weso.rdfshape.server.api.ApiDefinitions._ import es.weso.rdfshape.server.api.QueryParams.{UrlCodeParam, UrlParam} import org.http4s._ import org.http4s.client.Client diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaParam.scala index d1dde786..a4d5945a 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaParam.scala @@ -61,7 +61,7 @@ case class SchemaParam( case Right(schema) => for { str <- schema.serialize( - schemaFormat.getOrElse(SchemaFormat.default).name + schemaFormat.getOrElse(SchemaFormat.defaultFormat).name ) } yield (Some(str), Right(schema)) } @@ -90,7 +90,7 @@ case class SchemaParam( ) schema <- Schemas.fromString( str, - schemaFormat.getOrElse(SchemaFormat.default).name, + schemaFormat.getOrElse(SchemaFormat.defaultFormat).name, schemaEngine.getOrElse(defaultSchemaEngine), ApiHelper.getBase ) // .leftMap(s => s"Error parsing contents of $schemaUrl: $s\nContents:\n$str") @@ -112,7 +112,7 @@ case class SchemaParam( case None => IO((None, Left(s"No value for schemaFile"))) case Some(schemaStr) => val schemaFormatStr = - schemaFormat.getOrElse(SchemaFormat.default).name + schemaFormat.getOrElse(SchemaFormat.defaultFormat).name val schemaEngineStr = schemaEngine.getOrElse(defaultSchemaEngine) Schemas @@ -137,7 +137,7 @@ case class SchemaParam( pair <- Schemas .fromString( schemaStr, - schemaFormat.getOrElse(SchemaFormat.default).name, + schemaFormat.getOrElse(SchemaFormat.defaultFormat).name, schemaEngine.getOrElse(defaultSchemaEngine), ApiHelper.getBase ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaService.scala index 27c03027..a43a82f3 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaService.scala @@ -6,8 +6,8 @@ import cats.implicits._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.jena.RDFAsJenaModel import es.weso.rdf.{InferenceEngine, RDFReasoner} -import es.weso.rdfshape.server.api.APIDefinitions._ -import es.weso.rdfshape.server.api.ApiHelper.{SchemaInfoReply, _} +import es.weso.rdfshape.server.api.ApiDefinitions._ +import es.weso.rdfshape.server.api.ApiHelper._ import es.weso.rdfshape.server.api.Defaults._ import es.weso.rdfshape.server.api.QueryParams._ import es.weso.rdfshape.server.api.format._ @@ -196,7 +196,7 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { converted <- convertSchema( schema, sp.schema, - sp.schemaFormat.getOrElse(SchemaFormat.default), + sp.schemaFormat.getOrElse(SchemaFormat.defaultFormat), sp.schemaEngine.getOrElse(defaultSchemaEngine), targetSchemaFormat, sp.targetSchemaEngine diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/ShExService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/ShExService.scala index 46708f64..1174bb7a 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/ShExService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/ShExService.scala @@ -1,6 +1,6 @@ package es.weso.rdfshape.server.api import cats.effect._ -import es.weso.rdfshape.server.api.APIDefinitions._ +import es.weso.rdfshape.server.api.ApiDefinitions._ import es.weso.schema._ import io.circe._ import org.http4s._ diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/ShapeMapService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/ShapeMapService.scala index fe2f156d..1230eaa1 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/ShapeMapService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/ShapeMapService.scala @@ -2,7 +2,7 @@ package es.weso.rdfshape.server.api import cats.effect._ import com.typesafe.scalalogging.LazyLogging -import es.weso.rdfshape.server.api.APIDefinitions._ +import es.weso.rdfshape.server.api.ApiDefinitions._ import es.weso.rdfshape.server.api.ApiHelper._ import es.weso.rdfshape.server.api.results.ShapeMapInfoResult import es.weso.shapemaps.ShapeMap diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/WikidataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/WikidataService.scala index 2d53f17a..25bbfecb 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/WikidataService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/WikidataService.scala @@ -24,7 +24,7 @@ import org.http4s.headers._ import org.http4s.multipart._ import org.http4s.implicits._ import es.weso.rdf.sgraph._ -import APIDefinitions._ +import ApiDefinitions._ import es.weso.utils.IOUtils._ import org.http4s.client.middleware.FollowRedirect import es.weso.shapemaps.{Status => _, _} @@ -43,7 +43,9 @@ import es.weso.wikibaserdf._ import ApiHelper._ import com.typesafe.scalalogging.LazyLogging -class WikidataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { +class WikidataService(client: Client[IO]) + extends Http4sDsl[IO] + with LazyLogging { val wikidataEntityUrl = uri"http://www.wikidata.org/entity" val apiUri = uri"/api/wikidata/entity" @@ -116,7 +118,7 @@ class WikidataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging val continue: String = maybeContinue.getOrElse(defaultContinue.toString) val requestUrl = s"""${endpoint.getOrElse("https://www.wikidata.org")}""" - + val uri = Uri .fromString(requestUrl) .valueOr(throw _) @@ -199,7 +201,7 @@ class WikidataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging ContinueParam(maybeContinue) => { val limit: String = maybelimit.getOrElse(defaultLimit.toString) val continue: String = maybeContinue.getOrElse(defaultContinue.toString) - + val uri = uri"https://www.wikidata.org" .withPath(Uri.Path.unsafeFromString("/w/api.php")) .withQueryParam("action", "wbsearchentities") @@ -209,7 +211,7 @@ class WikidataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging .withQueryParam("continue", continue) .withQueryParam("type", "lexeme") .withQueryParam("format", "json") - + val req: Request[IO] = Request(method = GET, uri = uri) for { eitherValues <- client.run(req).use { @@ -303,9 +305,11 @@ class WikidataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) val r: EitherT[IO, String, Response[IO]] = for { - label <- EitherT(partsMap.eitherPartValue("entity")) - info <- either2es[InfoEntity](cnvEntity(label)) - _ <- { logger.debug(s"Extraction URI: ${info.uri}"); ok_esf[Unit, IO](()) } + label <- EitherT(partsMap.eitherPartValue("entity")) + info <- either2es[InfoEntity](cnvEntity(label)) + _ <- { + logger.debug(s"Extraction URI: ${info.uri}"); ok_esf[Unit, IO](()) + } strRdf <- io2es(redirectClient.expect[String](info.uri)) eitherInferred <- io2es( RDFAsJenaModel @@ -528,7 +532,7 @@ class WikidataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging // data <- resolve(uri) // rdf <- getRDF(data) // maybeDot <- generateDot(rdf, withDot)/* if (generateDot) -/* EitherT.fromEither[F](RDF2Dot.rdf2dot(rdf).bimap(e => s"Error + /* EitherT.fromEither[F](RDF2Dot.rdf2dot(rdf).bimap(e => s"Error * converting to Dot: $e", s => Some(s.toString))) */ // else EitherT.pure(none) */ json <- prepareJson( diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/DataFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/DataFormat.scala index 7cc68f1f..8e3bf0e1 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/DataFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/DataFormat.scala @@ -2,109 +2,62 @@ package es.weso.rdfshape.server.api.format import org.http4s.MediaType -trait DataFormat extends Format { - val default: DataFormat = Turtle -} - -object DataFormat { - - val default: DataFormat = Turtle - - def fromString(name: String): Either[String, DataFormat] = - if(name == "") Right(default) - else { - formatsMap.get(name.toLowerCase) match { - case None => - Left( - s"Not found format: $name. Available formats: ${availableFormats.mkString(",")}" - ) - case Some(df) => Right(df) - } - } - - private def formatsMap: Map[String, DataFormat] = { - def toPair(f: DataFormat): (String, DataFormat) = (f.name.toLowerCase(), f) - availableFormats.map(toPair).toMap - } - - lazy val availableFormats: List[DataFormat] = - List( - Turtle, - JsonLd, - NTriples, - RdfXml, - RdfJson, - Trig, - HtmlMicrodata, - HtmlRdfa11, - Dot, - Svg, - Png, - JsonDataFormat +/** Extension of the Format interface to represent RDF data formats + */ +class DataFormat(formatName: String, formatMimeType: MediaType) extends Format { + override val name: String = formatName + override val mimeType: MediaType = formatMimeType +} + +/** Companion object with all DataFormat static utilities + */ +object DataFormat extends FormatCompanion[DataFormat] { + + override lazy val availableFormats: List[DataFormat] = List( + Turtle, + JsonLd, + NTriples, + RdfXml, + RdfJson, + Trig, + HtmlMicrodata, + HtmlRdfa11, + Dot, + Svg, + Png, + JsonDataFormat + ) + override val defaultFormat: DataFormat = Turtle +} + +/** Represents the mime-type "application/json" + */ +case object JsonDataFormat + extends DataFormat( + formatName = "json", + formatMimeType = new MediaType("application", "json") ) -} - -case object JsonDataFormat extends DataFormat { - override val name = "json" - override val mimeType = new MediaType("application", "json") -} - -case object Dot extends DataFormat { - override val name = "dot" - override val mimeType = new MediaType("text", "vnd.graphviz") -} - -case object Svg extends DataFormat { - override val name = "svg" - override val mimeType = MediaType.image.`svg+xml` -} - -case object Png extends DataFormat { - override val name = "png" - override val mimeType = MediaType.image.png -} - -sealed trait RDFFormat extends DataFormat - -case object Turtle extends RDFFormat { - override val name = "turtle" - override val mimeType = new MediaType("text", "turtle") -} - -case object NTriples extends RDFFormat { - override val name = "n-triples" - override val mimeType = new MediaType("application", "n-triples") -} - -case object Trig extends RDFFormat { - override val name = "trig" - override val mimeType = new MediaType("application", "trig") -} - -case object JsonLd extends RDFFormat { - override val name = "json-ld" - override val mimeType = new MediaType("application", "ld+json") -} - -case object RdfXml extends RDFFormat { - override val name = "rdf/xml" - override val mimeType = new MediaType("application", "rdf+xml") -} - -case object RdfJson extends RDFFormat { - override val name = "rdf/json" - override val mimeType = MediaType.application.json -} - -sealed trait HtmlFormat extends DataFormat +/** Represents the mime-type "text/vnd.graphviz" + */ +case object Dot + extends DataFormat( + formatName = "dot", + formatMimeType = new MediaType("text", "vnd.graphviz") + ) -case object HtmlRdfa11 extends HtmlFormat { - override val name = "html-rdfa11" - override val mimeType = MediaType.text.html -} +/** Represents the mime-type "image/svg+xml" + */ +case object Svg + extends DataFormat( + formatName = "svg", + formatMimeType = MediaType.image.`svg+xml` + ) -case object HtmlMicrodata extends HtmlFormat { - override val name = "html-microdata" - override val mimeType = MediaType.text.html -} +/** Represents the mime-type "image/png" + */ +case object Png + extends DataFormat( + formatName = "png", + formatMimeType = MediaType.image.png + ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala index 905e2310..c5970dde 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala @@ -1,14 +1,67 @@ package es.weso.rdfshape.server.api.format +import com.typesafe.scalalogging.LazyLogging import org.http4s.MediaType +/** Generic interface for any format any data transmitted to/from the API may have + */ trait Format { + + /** Format friendly name + */ val name: String - val mimeType: MediaType -// def availableFormats[F <: Format]: List[F] -// def default[F<:Format]: F + /** Format mime type (e.g., application/json, image/png, etc.) + */ + val mimeType: MediaType override def toString: String = s"Format $name" } + +trait FormatCompanion[F <: Format] extends LazyLogging { + + /** Default format to be used when none specified + */ + val defaultFormat: F + + /** List of all formats available for the current type of entity + */ + val availableFormats: List[F] + + /** Given a format name, get its corresponding DataFormat object + * DataFormat + * + * @param name String name of the format we require + * @return the DataFormat object with the format data (an error String if it does not exist) + */ + def fromString(name: String): Either[String, F] = { + if(name.isBlank) Right(defaultFormat) + else { + formatsMap.get(name.toLowerCase) match { + case None => + val errorMsg = mkErrorMessage(name) + logger.error(errorMsg) + Left(errorMsg) + case Some(format) => Right(format) + } + } + } + + /** Make a generic error message string to be used elsewhere + * + * @param name Name of the problematic format + * @return Error message with format alternatives + */ + def mkErrorMessage(name: String): String = + s"Not found format: $name. Available formats: ${availableFormats.mkString(",")}" + + /** @return Map [String, Format] containing all formats while using their names as keys + */ + def formatsMap: Map[String, F] = { + def getFormatPairs(format: F): (String, F) = + (format.name.toLowerCase(), format) + + availableFormats.map(getFormatPairs).toMap + } +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/HtmlFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/HtmlFormat.scala new file mode 100644 index 00000000..62090d58 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/HtmlFormat.scala @@ -0,0 +1,22 @@ +package es.weso.rdfshape.server.api.format + +import org.http4s.MediaType + +/** Dummy trait to differentiate HTML-based formats from the more generic DataFormat + * @see {@link es.weso.rdfshape.server.api.format.DataFormat} + */ +sealed trait HtmlFormat extends DataFormat + +/** Represents the mime-type "text/html" when used along rdfa11 + */ +case object HtmlRdfa11 extends HtmlFormat { + override val name = "html-rdfa11" + override val mimeType: MediaType = MediaType.text.html +} + +/** Represents the mime-type "text/html" when used along microdata + */ +case object HtmlMicrodata extends HtmlFormat { + override val name = "html-microdata" + override val mimeType: MediaType = MediaType.text.html +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/RdfFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/RdfFormat.scala new file mode 100644 index 00000000..6a98a151 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/RdfFormat.scala @@ -0,0 +1,50 @@ +package es.weso.rdfshape.server.api.format + +import org.http4s.MediaType + +/** Dummy trait to differentiate RDF formats from the more generic DataFormat + * @see {@link es.weso.rdfshape.server.api.format.DataFormat} + */ +sealed trait RDFFormat extends DataFormat + +/** Represents the mime-type "text/turtle" + */ +case object Turtle extends RDFFormat { + override val name = "turtle" + override val mimeType = new MediaType("text", "turtle") +} + +/** Represents the mime-type "application/n-triples" + */ +case object NTriples extends RDFFormat { + override val name = "n-triples" + override val mimeType = new MediaType("application", "n-triples") +} + +/** Represents the mime-type "application/trig" + */ +case object Trig extends RDFFormat { + override val name = "trig" + override val mimeType = new MediaType("application", "trig") +} + +/** Represents the mime-type "application/ld+json" + */ +case object JsonLd extends RDFFormat { + override val name = "json-ld" + override val mimeType = new MediaType("application", "ld+json") +} + +/** Represents the mime-type "application/rdf+xml" + */ +case object RdfXml extends RDFFormat { + override val name = "rdf/xml" + override val mimeType = new MediaType("application", "rdf+xml") +} + +/** Represents the mime-type "application/json" + */ +case object RdfJson extends RDFFormat { + override val name = "rdf/json" + override val mimeType: MediaType = MediaType.application.json +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/SchemaFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/SchemaFormat.scala index 1cf19b43..d9ec983a 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/SchemaFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/SchemaFormat.scala @@ -2,50 +2,45 @@ package es.weso.rdfshape.server.api.format import org.http4s.MediaType -trait SchemaFormat extends DataFormat { - override val default: SchemaFormat = ShExC +/** Dummy trait to differentiate schema formats from the more generic DataFormat + * @see {@link es.weso.rdfshape.server.api.format.DataFormat} + */ +class SchemaFormat(formatName: String, formatMimeType: MediaType) + extends DataFormat(formatName, formatMimeType) { + def this(format: Format) { + this(format.name, format.mimeType) + } } -object SchemaFormat { - - val default: SchemaFormat = ShExC - - def fromString(name: String): Either[String, SchemaFormat] = - if(name == "") Right(default) - else { - formatsMap.get(name.toLowerCase) match { - case None => - Left( - s"Not found format: $name. Available formats: ${availableFormats.mkString(",")}" - ) - case Some(df) => Right(df) - } - } +/** Companion object with all SchemaFormat static utilities + */ +object SchemaFormat extends FormatCompanion[SchemaFormat] { - private def formatsMap: Map[String, SchemaFormat] = { - def toPair(f: SchemaFormat): (String, SchemaFormat) = - (f.name.toLowerCase(), f) - availableFormats.map(toPair).toMap - } - - val availableFormats: List[SchemaFormat] = + override lazy val availableFormats: List[SchemaFormat] = List( - FromDataFormat(Turtle), - FromDataFormat(JsonLd), - FromDataFormat(NTriples), - FromDataFormat(RdfXml), - FromDataFormat(RdfJson), - FromDataFormat(Trig), + new SchemaFormat(Turtle), + new SchemaFormat(JsonLd), + new SchemaFormat(NTriples), + new SchemaFormat(RdfXml), + new SchemaFormat(RdfJson), + new SchemaFormat(Trig), ShExC ) - -} -case object ShExC extends SchemaFormat { - override val name = "shexc" - override val mimeType = new MediaType("text", "shex") + override val defaultFormat: SchemaFormat = ShExC } -case class FromDataFormat(dataFormat: DataFormat) extends SchemaFormat { - override val name = dataFormat.name - override val mimeType = dataFormat.mimeType -} +/** Represents the mime-type "text/shex" + */ +case object ShExC + extends SchemaFormat( + formatName = "shexc", + formatMimeType = new MediaType("text", "shex") + ) + +/** Represents the mime-type "image/png" + */ +case class FromDataFormat(dataFormat: DataFormat) + extends SchemaFormat( + formatName = dataFormat.name, + formatMimeType = dataFormat.mimeType + ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/schema/SchemaFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/schema/SchemaFormat.scala deleted file mode 100644 index d47c8ece..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/schema/SchemaFormat.scala +++ /dev/null @@ -1,108 +0,0 @@ -package es.weso.rdfshape.server.api.schema - -import org.http4s.MediaType - -sealed trait DataFormat { - val name: String - val mimeType: MediaType - - override def toString: String = s"$name" -} - -object DataFormat { - lazy val availableDataFormats: List[DataFormat] = - List( - Turtle, - JsonLd, - NTriples, - RdfXml, - RdfJson, - Trig, - HtmlMicrodata, - HtmlRdfa11, - Dot, - Svg, - Png, - JsonDataFormat - ) - lazy val dataFormatsMap: Map[String, DataFormat] = - availableDataFormats.map(df => (df.name.toLowerCase, df)).toMap - lazy val default: DataFormat = Turtle - - def fromString(name: String): Either[String, DataFormat] = - if(name == "") Right(default) - else { - dataFormatsMap.get(name.toLowerCase) match { - case None => - Left( - s"Not found data format: $name. Available formats: ${availableDataFormats.mkString(",")}" - ) - case Some(df) => Right(df) - } - } - -} - -case object JsonDataFormat extends DataFormat { - override val name = "json" - override val mimeType = new MediaType("application", "json") -} - -case object Dot extends DataFormat { - override val name = "dot" - override val mimeType = new MediaType("text", "vnd.graphviz") -} - -case object Svg extends DataFormat { - override val name = "svg" - override val mimeType: MediaType = MediaType.image.`svg+xml` -} - -case object Png extends DataFormat { - override val name = "png" - override val mimeType: MediaType = MediaType.image.png -} - -sealed trait RDFFormat extends DataFormat - -case object Turtle extends RDFFormat { - override val name = "turtle" - override val mimeType = new MediaType("text", "turtle") -} - -case object NTriples extends RDFFormat { - override val name = "n-triples" - override val mimeType = new MediaType("application", "n-triples") -} - -case object Trig extends RDFFormat { - override val name = "trig" - override val mimeType = new MediaType("application", "trig") -} - -case object JsonLd extends RDFFormat { - override val name = "json-ld" - override val mimeType = new MediaType("application", "ld+json") -} - -case object RdfXml extends RDFFormat { - override val name = "rdf/xml" - override val mimeType = new MediaType("application", "rdf+xml") -} - -case object RdfJson extends RDFFormat { - override val name = "rdf/json" - override val mimeType: MediaType = MediaType.application.json -} - -sealed trait HtmlFormat extends DataFormat - -case object HtmlRdfa11 extends HtmlFormat { - override val name = "html-rdfa11" - override val mimeType: MediaType = MediaType.text.html -} - -case object HtmlMicrodata extends HtmlFormat { - override val name = "html-microdata" - override val mimeType: MediaType = MediaType.text.html -} From 712ac05638976f4d13254a5a3541f9246c3c47fd Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Fri, 13 Aug 2021 17:19:03 +0200 Subject: [PATCH 07/32] Refactored API formats. --- .../weso/rdfshape/server/api/APIService.scala | 4 +- ...Definitions.scala => ApiDefinitions.scala} | 2 +- .../weso/rdfshape/server/api/DataParam.scala | 7 +- .../rdfshape/server/api/DataService.scala | 4 +- .../weso/rdfshape/server/api/Defaults.scala | 6 +- .../rdfshape/server/api/EndpointService.scala | 2 +- .../rdfshape/server/api/FetchService.scala | 2 +- .../server/api/PermalinkService.scala | 2 +- .../rdfshape/server/api/SchemaParam.scala | 8 +- .../rdfshape/server/api/SchemaService.scala | 6 +- .../rdfshape/server/api/ShExService.scala | 2 +- .../rdfshape/server/api/ShapeMapService.scala | 2 +- .../rdfshape/server/api/WikidataService.scala | 22 ++- .../server/api/format/DataFormat.scala | 157 ++++++------------ .../rdfshape/server/api/format/Format.scala | 59 ++++++- .../server/api/format/HtmlFormat.scala | 22 +++ .../server/api/format/RdfFormat.scala | 50 ++++++ .../server/api/format/SchemaFormat.scala | 65 +++----- .../server/api/schema/SchemaFormat.scala | 108 ------------ 19 files changed, 246 insertions(+), 284 deletions(-) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{APIDefinitions.scala => ApiDefinitions.scala} (70%) create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/format/HtmlFormat.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/format/RdfFormat.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/schema/SchemaFormat.scala diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/APIService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/APIService.scala index 5767138b..bfcdb72f 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/APIService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/APIService.scala @@ -5,7 +5,7 @@ import cats.effect._ import cats.implicits._ import es.weso.rdf.jena.Endpoint import es.weso.rdf.nodes.IRI -import es.weso.rdfshape.server.api.APIDefinitions._ +import es.weso.rdfshape.server.api.ApiDefinitions._ import es.weso.rdfshape.server.api.QueryParams._ import es.weso.utils.IOUtils._ import io.circe._ @@ -24,7 +24,7 @@ class APIService(client: Client[IO]) extends Http4sDsl[IO] { case req @ GET -> Root / `api` / "health" => Ok("OK") - + case req @ GET -> Root / `api` / "endpoint" / "outgoing" :? OptEndpointParam(optEndpoint) +& OptNodeParam(optNode) +& diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/APIDefinitions.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/ApiDefinitions.scala similarity index 70% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/APIDefinitions.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/ApiDefinitions.scala index 0a713b6c..7e9b3122 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/APIDefinitions.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/ApiDefinitions.scala @@ -1,5 +1,5 @@ package es.weso.rdfshape.server.api -object APIDefinitions { +object ApiDefinitions { val api = "api" } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataParam.scala index 41d5a0d9..1e47f4b3 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataParam.scala @@ -80,10 +80,11 @@ case class DataParam( dataURL match { case None => err(s"Non value for dataURL") case Some(dataUrl) => - val dataFormat = dataFormatUrl.getOrElse(DataFormat.default) + val dataFormat = dataFormatUrl.getOrElse(DataFormat.defaultFormat) for { rdf <- rdfFromUri(new URI(dataUrl), dataFormat, base) } yield (None, rdf) + RDFFormat } case Right(`dataFileType`) => logger.debug(s"Input - dataFileType: $data") @@ -91,7 +92,7 @@ case class DataParam( case None => err(s"No value for dataFile") case Some(dataStr) => val dataFormat: Format = - dataFormatFile.getOrElse(DataFormat.default) + dataFormatFile.getOrElse(DataFormat.defaultFormat) /* io2es(RDFAsJenaModel.fromString(dataStr, dataFormat.name, * iriBase).use(rdf => for { iriBase <- mkBase(base) newRdf <- * extendWithInference(rdf, inference) eitherStr <- @@ -124,7 +125,7 @@ case class DataParam( case None => RDFAsJenaModel.empty.flatMap(e => IO((None, e))) case d @ Some(data) => val dataFormat = dataFormatTextarea.getOrElse( - dataFormatValue.getOrElse(DataFormat.default) + dataFormatValue.getOrElse(DataFormat.defaultFormat) ) val x: IO[(Option[String], Resource[IO, RDFReasoner])] = for { res <- rdfFromString(data, dataFormat, base) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataService.scala index 1886292c..7e3f0ff9 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataService.scala @@ -3,7 +3,7 @@ package es.weso.rdfshape.server.api import cats.effect._ import cats.implicits._ import com.typesafe.scalalogging.LazyLogging -import es.weso.rdfshape.server.api.APIDefinitions._ +import es.weso.rdfshape.server.api.ApiDefinitions._ import es.weso.rdfshape.server.api.ApiHelper._ import es.weso.rdfshape.server.api.Defaults.defaultDataFormat import es.weso.rdfshape.server.api.QueryParams._ @@ -39,7 +39,7 @@ class DataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { Ok(json) case GET -> Root / `api` / "data" / "formats" / "default" => - val dataFormat = DataFormat.default.name + val dataFormat = DataFormat.defaultFormat.name Ok(Json.fromString(dataFormat)) case GET -> Root / `api` / "data" / "inferenceEngines" => diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/Defaults.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/Defaults.scala index c1be1b8f..7fc9180b 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/Defaults.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/Defaults.scala @@ -2,15 +2,15 @@ package es.weso.rdfshape.server.api import es.weso.rdf.nodes.IRI import es.weso.rdfshape.server.api.format._ -import es.weso.schema.{Schemas, _} +import es.weso.schema._ import es.weso.shapemaps.ShapeMap object Defaults { val availableDataFormats: List[DataFormat] = DataFormat.availableFormats - val defaultDataFormat: DataFormat = DataFormat.default + val defaultDataFormat: DataFormat = DataFormat.defaultFormat val availableSchemaFormats: List[SchemaFormat] = SchemaFormat.availableFormats - val defaultSchemaFormat: SchemaFormat = SchemaFormat.default + val defaultSchemaFormat: SchemaFormat = SchemaFormat.defaultFormat val availableSchemaEngines: List[String] = Schemas.availableSchemaNames val defaultSchemaEngine: String = Schemas.defaultSchemaName val availableTriggerModes: List[String] = Schemas.availableTriggerModes diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/EndpointService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/EndpointService.scala index cb88112c..a0fcf255 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/EndpointService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/EndpointService.scala @@ -2,7 +2,7 @@ package es.weso.rdfshape.server.api import cats.data.EitherT import cats.effect._ import com.typesafe.scalalogging.LazyLogging -import es.weso.rdfshape.server.api.APIDefinitions._ +import es.weso.rdfshape.server.api.ApiDefinitions._ import es.weso.rdfshape.server.api.{Query => ServerQuery} import es.weso.utils.IOUtils._ import io.circe.Json diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/FetchService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/FetchService.scala index ea4d2c1c..c1fbc3a6 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/FetchService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/FetchService.scala @@ -1,7 +1,7 @@ package es.weso.rdfshape.server.api import cats.effect._ -import es.weso.rdfshape.server.api.APIDefinitions._ +import es.weso.rdfshape.server.api.ApiDefinitions._ import es.weso.rdfshape.server.api.QueryParams.UrlParam import org.http4s._ import org.http4s.client.Client diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/PermalinkService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/PermalinkService.scala index 06a3112c..9511dcad 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/PermalinkService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/PermalinkService.scala @@ -2,7 +2,7 @@ package es.weso.rdfshape.server.api import cats.effect._ import com.typesafe.scalalogging.LazyLogging -import es.weso.rdfshape.server.api.APIDefinitions._ +import es.weso.rdfshape.server.api.ApiDefinitions._ import es.weso.rdfshape.server.api.QueryParams.{UrlCodeParam, UrlParam} import org.http4s._ import org.http4s.client.Client diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaParam.scala index d1dde786..a4d5945a 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaParam.scala @@ -61,7 +61,7 @@ case class SchemaParam( case Right(schema) => for { str <- schema.serialize( - schemaFormat.getOrElse(SchemaFormat.default).name + schemaFormat.getOrElse(SchemaFormat.defaultFormat).name ) } yield (Some(str), Right(schema)) } @@ -90,7 +90,7 @@ case class SchemaParam( ) schema <- Schemas.fromString( str, - schemaFormat.getOrElse(SchemaFormat.default).name, + schemaFormat.getOrElse(SchemaFormat.defaultFormat).name, schemaEngine.getOrElse(defaultSchemaEngine), ApiHelper.getBase ) // .leftMap(s => s"Error parsing contents of $schemaUrl: $s\nContents:\n$str") @@ -112,7 +112,7 @@ case class SchemaParam( case None => IO((None, Left(s"No value for schemaFile"))) case Some(schemaStr) => val schemaFormatStr = - schemaFormat.getOrElse(SchemaFormat.default).name + schemaFormat.getOrElse(SchemaFormat.defaultFormat).name val schemaEngineStr = schemaEngine.getOrElse(defaultSchemaEngine) Schemas @@ -137,7 +137,7 @@ case class SchemaParam( pair <- Schemas .fromString( schemaStr, - schemaFormat.getOrElse(SchemaFormat.default).name, + schemaFormat.getOrElse(SchemaFormat.defaultFormat).name, schemaEngine.getOrElse(defaultSchemaEngine), ApiHelper.getBase ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaService.scala index 27c03027..a43a82f3 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaService.scala @@ -6,8 +6,8 @@ import cats.implicits._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.jena.RDFAsJenaModel import es.weso.rdf.{InferenceEngine, RDFReasoner} -import es.weso.rdfshape.server.api.APIDefinitions._ -import es.weso.rdfshape.server.api.ApiHelper.{SchemaInfoReply, _} +import es.weso.rdfshape.server.api.ApiDefinitions._ +import es.weso.rdfshape.server.api.ApiHelper._ import es.weso.rdfshape.server.api.Defaults._ import es.weso.rdfshape.server.api.QueryParams._ import es.weso.rdfshape.server.api.format._ @@ -196,7 +196,7 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { converted <- convertSchema( schema, sp.schema, - sp.schemaFormat.getOrElse(SchemaFormat.default), + sp.schemaFormat.getOrElse(SchemaFormat.defaultFormat), sp.schemaEngine.getOrElse(defaultSchemaEngine), targetSchemaFormat, sp.targetSchemaEngine diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/ShExService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/ShExService.scala index 46708f64..1174bb7a 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/ShExService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/ShExService.scala @@ -1,6 +1,6 @@ package es.weso.rdfshape.server.api import cats.effect._ -import es.weso.rdfshape.server.api.APIDefinitions._ +import es.weso.rdfshape.server.api.ApiDefinitions._ import es.weso.schema._ import io.circe._ import org.http4s._ diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/ShapeMapService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/ShapeMapService.scala index fe2f156d..1230eaa1 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/ShapeMapService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/ShapeMapService.scala @@ -2,7 +2,7 @@ package es.weso.rdfshape.server.api import cats.effect._ import com.typesafe.scalalogging.LazyLogging -import es.weso.rdfshape.server.api.APIDefinitions._ +import es.weso.rdfshape.server.api.ApiDefinitions._ import es.weso.rdfshape.server.api.ApiHelper._ import es.weso.rdfshape.server.api.results.ShapeMapInfoResult import es.weso.shapemaps.ShapeMap diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/WikidataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/WikidataService.scala index 2d53f17a..25bbfecb 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/WikidataService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/WikidataService.scala @@ -24,7 +24,7 @@ import org.http4s.headers._ import org.http4s.multipart._ import org.http4s.implicits._ import es.weso.rdf.sgraph._ -import APIDefinitions._ +import ApiDefinitions._ import es.weso.utils.IOUtils._ import org.http4s.client.middleware.FollowRedirect import es.weso.shapemaps.{Status => _, _} @@ -43,7 +43,9 @@ import es.weso.wikibaserdf._ import ApiHelper._ import com.typesafe.scalalogging.LazyLogging -class WikidataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { +class WikidataService(client: Client[IO]) + extends Http4sDsl[IO] + with LazyLogging { val wikidataEntityUrl = uri"http://www.wikidata.org/entity" val apiUri = uri"/api/wikidata/entity" @@ -116,7 +118,7 @@ class WikidataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging val continue: String = maybeContinue.getOrElse(defaultContinue.toString) val requestUrl = s"""${endpoint.getOrElse("https://www.wikidata.org")}""" - + val uri = Uri .fromString(requestUrl) .valueOr(throw _) @@ -199,7 +201,7 @@ class WikidataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging ContinueParam(maybeContinue) => { val limit: String = maybelimit.getOrElse(defaultLimit.toString) val continue: String = maybeContinue.getOrElse(defaultContinue.toString) - + val uri = uri"https://www.wikidata.org" .withPath(Uri.Path.unsafeFromString("/w/api.php")) .withQueryParam("action", "wbsearchentities") @@ -209,7 +211,7 @@ class WikidataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging .withQueryParam("continue", continue) .withQueryParam("type", "lexeme") .withQueryParam("format", "json") - + val req: Request[IO] = Request(method = GET, uri = uri) for { eitherValues <- client.run(req).use { @@ -303,9 +305,11 @@ class WikidataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) val r: EitherT[IO, String, Response[IO]] = for { - label <- EitherT(partsMap.eitherPartValue("entity")) - info <- either2es[InfoEntity](cnvEntity(label)) - _ <- { logger.debug(s"Extraction URI: ${info.uri}"); ok_esf[Unit, IO](()) } + label <- EitherT(partsMap.eitherPartValue("entity")) + info <- either2es[InfoEntity](cnvEntity(label)) + _ <- { + logger.debug(s"Extraction URI: ${info.uri}"); ok_esf[Unit, IO](()) + } strRdf <- io2es(redirectClient.expect[String](info.uri)) eitherInferred <- io2es( RDFAsJenaModel @@ -528,7 +532,7 @@ class WikidataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging // data <- resolve(uri) // rdf <- getRDF(data) // maybeDot <- generateDot(rdf, withDot)/* if (generateDot) -/* EitherT.fromEither[F](RDF2Dot.rdf2dot(rdf).bimap(e => s"Error + /* EitherT.fromEither[F](RDF2Dot.rdf2dot(rdf).bimap(e => s"Error * converting to Dot: $e", s => Some(s.toString))) */ // else EitherT.pure(none) */ json <- prepareJson( diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/DataFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/DataFormat.scala index 7cc68f1f..8e3bf0e1 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/DataFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/DataFormat.scala @@ -2,109 +2,62 @@ package es.weso.rdfshape.server.api.format import org.http4s.MediaType -trait DataFormat extends Format { - val default: DataFormat = Turtle -} - -object DataFormat { - - val default: DataFormat = Turtle - - def fromString(name: String): Either[String, DataFormat] = - if(name == "") Right(default) - else { - formatsMap.get(name.toLowerCase) match { - case None => - Left( - s"Not found format: $name. Available formats: ${availableFormats.mkString(",")}" - ) - case Some(df) => Right(df) - } - } - - private def formatsMap: Map[String, DataFormat] = { - def toPair(f: DataFormat): (String, DataFormat) = (f.name.toLowerCase(), f) - availableFormats.map(toPair).toMap - } - - lazy val availableFormats: List[DataFormat] = - List( - Turtle, - JsonLd, - NTriples, - RdfXml, - RdfJson, - Trig, - HtmlMicrodata, - HtmlRdfa11, - Dot, - Svg, - Png, - JsonDataFormat +/** Extension of the Format interface to represent RDF data formats + */ +class DataFormat(formatName: String, formatMimeType: MediaType) extends Format { + override val name: String = formatName + override val mimeType: MediaType = formatMimeType +} + +/** Companion object with all DataFormat static utilities + */ +object DataFormat extends FormatCompanion[DataFormat] { + + override lazy val availableFormats: List[DataFormat] = List( + Turtle, + JsonLd, + NTriples, + RdfXml, + RdfJson, + Trig, + HtmlMicrodata, + HtmlRdfa11, + Dot, + Svg, + Png, + JsonDataFormat + ) + override val defaultFormat: DataFormat = Turtle +} + +/** Represents the mime-type "application/json" + */ +case object JsonDataFormat + extends DataFormat( + formatName = "json", + formatMimeType = new MediaType("application", "json") ) -} - -case object JsonDataFormat extends DataFormat { - override val name = "json" - override val mimeType = new MediaType("application", "json") -} - -case object Dot extends DataFormat { - override val name = "dot" - override val mimeType = new MediaType("text", "vnd.graphviz") -} - -case object Svg extends DataFormat { - override val name = "svg" - override val mimeType = MediaType.image.`svg+xml` -} - -case object Png extends DataFormat { - override val name = "png" - override val mimeType = MediaType.image.png -} - -sealed trait RDFFormat extends DataFormat - -case object Turtle extends RDFFormat { - override val name = "turtle" - override val mimeType = new MediaType("text", "turtle") -} - -case object NTriples extends RDFFormat { - override val name = "n-triples" - override val mimeType = new MediaType("application", "n-triples") -} - -case object Trig extends RDFFormat { - override val name = "trig" - override val mimeType = new MediaType("application", "trig") -} - -case object JsonLd extends RDFFormat { - override val name = "json-ld" - override val mimeType = new MediaType("application", "ld+json") -} - -case object RdfXml extends RDFFormat { - override val name = "rdf/xml" - override val mimeType = new MediaType("application", "rdf+xml") -} - -case object RdfJson extends RDFFormat { - override val name = "rdf/json" - override val mimeType = MediaType.application.json -} - -sealed trait HtmlFormat extends DataFormat +/** Represents the mime-type "text/vnd.graphviz" + */ +case object Dot + extends DataFormat( + formatName = "dot", + formatMimeType = new MediaType("text", "vnd.graphviz") + ) -case object HtmlRdfa11 extends HtmlFormat { - override val name = "html-rdfa11" - override val mimeType = MediaType.text.html -} +/** Represents the mime-type "image/svg+xml" + */ +case object Svg + extends DataFormat( + formatName = "svg", + formatMimeType = MediaType.image.`svg+xml` + ) -case object HtmlMicrodata extends HtmlFormat { - override val name = "html-microdata" - override val mimeType = MediaType.text.html -} +/** Represents the mime-type "image/png" + */ +case object Png + extends DataFormat( + formatName = "png", + formatMimeType = MediaType.image.png + ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala index 905e2310..c5970dde 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala @@ -1,14 +1,67 @@ package es.weso.rdfshape.server.api.format +import com.typesafe.scalalogging.LazyLogging import org.http4s.MediaType +/** Generic interface for any format any data transmitted to/from the API may have + */ trait Format { + + /** Format friendly name + */ val name: String - val mimeType: MediaType -// def availableFormats[F <: Format]: List[F] -// def default[F<:Format]: F + /** Format mime type (e.g., application/json, image/png, etc.) + */ + val mimeType: MediaType override def toString: String = s"Format $name" } + +trait FormatCompanion[F <: Format] extends LazyLogging { + + /** Default format to be used when none specified + */ + val defaultFormat: F + + /** List of all formats available for the current type of entity + */ + val availableFormats: List[F] + + /** Given a format name, get its corresponding DataFormat object + * DataFormat + * + * @param name String name of the format we require + * @return the DataFormat object with the format data (an error String if it does not exist) + */ + def fromString(name: String): Either[String, F] = { + if(name.isBlank) Right(defaultFormat) + else { + formatsMap.get(name.toLowerCase) match { + case None => + val errorMsg = mkErrorMessage(name) + logger.error(errorMsg) + Left(errorMsg) + case Some(format) => Right(format) + } + } + } + + /** Make a generic error message string to be used elsewhere + * + * @param name Name of the problematic format + * @return Error message with format alternatives + */ + def mkErrorMessage(name: String): String = + s"Not found format: $name. Available formats: ${availableFormats.mkString(",")}" + + /** @return Map [String, Format] containing all formats while using their names as keys + */ + def formatsMap: Map[String, F] = { + def getFormatPairs(format: F): (String, F) = + (format.name.toLowerCase(), format) + + availableFormats.map(getFormatPairs).toMap + } +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/HtmlFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/HtmlFormat.scala new file mode 100644 index 00000000..62090d58 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/HtmlFormat.scala @@ -0,0 +1,22 @@ +package es.weso.rdfshape.server.api.format + +import org.http4s.MediaType + +/** Dummy trait to differentiate HTML-based formats from the more generic DataFormat + * @see {@link es.weso.rdfshape.server.api.format.DataFormat} + */ +sealed trait HtmlFormat extends DataFormat + +/** Represents the mime-type "text/html" when used along rdfa11 + */ +case object HtmlRdfa11 extends HtmlFormat { + override val name = "html-rdfa11" + override val mimeType: MediaType = MediaType.text.html +} + +/** Represents the mime-type "text/html" when used along microdata + */ +case object HtmlMicrodata extends HtmlFormat { + override val name = "html-microdata" + override val mimeType: MediaType = MediaType.text.html +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/RdfFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/RdfFormat.scala new file mode 100644 index 00000000..6a98a151 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/RdfFormat.scala @@ -0,0 +1,50 @@ +package es.weso.rdfshape.server.api.format + +import org.http4s.MediaType + +/** Dummy trait to differentiate RDF formats from the more generic DataFormat + * @see {@link es.weso.rdfshape.server.api.format.DataFormat} + */ +sealed trait RDFFormat extends DataFormat + +/** Represents the mime-type "text/turtle" + */ +case object Turtle extends RDFFormat { + override val name = "turtle" + override val mimeType = new MediaType("text", "turtle") +} + +/** Represents the mime-type "application/n-triples" + */ +case object NTriples extends RDFFormat { + override val name = "n-triples" + override val mimeType = new MediaType("application", "n-triples") +} + +/** Represents the mime-type "application/trig" + */ +case object Trig extends RDFFormat { + override val name = "trig" + override val mimeType = new MediaType("application", "trig") +} + +/** Represents the mime-type "application/ld+json" + */ +case object JsonLd extends RDFFormat { + override val name = "json-ld" + override val mimeType = new MediaType("application", "ld+json") +} + +/** Represents the mime-type "application/rdf+xml" + */ +case object RdfXml extends RDFFormat { + override val name = "rdf/xml" + override val mimeType = new MediaType("application", "rdf+xml") +} + +/** Represents the mime-type "application/json" + */ +case object RdfJson extends RDFFormat { + override val name = "rdf/json" + override val mimeType: MediaType = MediaType.application.json +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/SchemaFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/SchemaFormat.scala index 1cf19b43..d92b2226 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/SchemaFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/SchemaFormat.scala @@ -2,50 +2,37 @@ package es.weso.rdfshape.server.api.format import org.http4s.MediaType -trait SchemaFormat extends DataFormat { - override val default: SchemaFormat = ShExC +/** Dummy trait to differentiate schema formats from the more generic DataFormat + * @see {@link es.weso.rdfshape.server.api.format.DataFormat} + */ +class SchemaFormat(formatName: String, formatMimeType: MediaType) + extends DataFormat(formatName, formatMimeType) { + def this(format: Format) { + this(format.name, format.mimeType) + } } -object SchemaFormat { - - val default: SchemaFormat = ShExC - - def fromString(name: String): Either[String, SchemaFormat] = - if(name == "") Right(default) - else { - formatsMap.get(name.toLowerCase) match { - case None => - Left( - s"Not found format: $name. Available formats: ${availableFormats.mkString(",")}" - ) - case Some(df) => Right(df) - } - } - - private def formatsMap: Map[String, SchemaFormat] = { - def toPair(f: SchemaFormat): (String, SchemaFormat) = - (f.name.toLowerCase(), f) - availableFormats.map(toPair).toMap - } +/** Companion object with all SchemaFormat static utilities + */ +object SchemaFormat extends FormatCompanion[SchemaFormat] { - val availableFormats: List[SchemaFormat] = + override lazy val availableFormats: List[SchemaFormat] = List( - FromDataFormat(Turtle), - FromDataFormat(JsonLd), - FromDataFormat(NTriples), - FromDataFormat(RdfXml), - FromDataFormat(RdfJson), - FromDataFormat(Trig), + new SchemaFormat(Turtle), + new SchemaFormat(JsonLd), + new SchemaFormat(NTriples), + new SchemaFormat(RdfXml), + new SchemaFormat(RdfJson), + new SchemaFormat(Trig), ShExC ) - -} -case object ShExC extends SchemaFormat { - override val name = "shexc" - override val mimeType = new MediaType("text", "shex") + override val defaultFormat: SchemaFormat = ShExC } -case class FromDataFormat(dataFormat: DataFormat) extends SchemaFormat { - override val name = dataFormat.name - override val mimeType = dataFormat.mimeType -} +/** Represents the mime-type "text/shex" + */ +case object ShExC + extends SchemaFormat( + formatName = "shexc", + formatMimeType = new MediaType("text", "shex") + ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/schema/SchemaFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/schema/SchemaFormat.scala deleted file mode 100644 index d47c8ece..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/schema/SchemaFormat.scala +++ /dev/null @@ -1,108 +0,0 @@ -package es.weso.rdfshape.server.api.schema - -import org.http4s.MediaType - -sealed trait DataFormat { - val name: String - val mimeType: MediaType - - override def toString: String = s"$name" -} - -object DataFormat { - lazy val availableDataFormats: List[DataFormat] = - List( - Turtle, - JsonLd, - NTriples, - RdfXml, - RdfJson, - Trig, - HtmlMicrodata, - HtmlRdfa11, - Dot, - Svg, - Png, - JsonDataFormat - ) - lazy val dataFormatsMap: Map[String, DataFormat] = - availableDataFormats.map(df => (df.name.toLowerCase, df)).toMap - lazy val default: DataFormat = Turtle - - def fromString(name: String): Either[String, DataFormat] = - if(name == "") Right(default) - else { - dataFormatsMap.get(name.toLowerCase) match { - case None => - Left( - s"Not found data format: $name. Available formats: ${availableDataFormats.mkString(",")}" - ) - case Some(df) => Right(df) - } - } - -} - -case object JsonDataFormat extends DataFormat { - override val name = "json" - override val mimeType = new MediaType("application", "json") -} - -case object Dot extends DataFormat { - override val name = "dot" - override val mimeType = new MediaType("text", "vnd.graphviz") -} - -case object Svg extends DataFormat { - override val name = "svg" - override val mimeType: MediaType = MediaType.image.`svg+xml` -} - -case object Png extends DataFormat { - override val name = "png" - override val mimeType: MediaType = MediaType.image.png -} - -sealed trait RDFFormat extends DataFormat - -case object Turtle extends RDFFormat { - override val name = "turtle" - override val mimeType = new MediaType("text", "turtle") -} - -case object NTriples extends RDFFormat { - override val name = "n-triples" - override val mimeType = new MediaType("application", "n-triples") -} - -case object Trig extends RDFFormat { - override val name = "trig" - override val mimeType = new MediaType("application", "trig") -} - -case object JsonLd extends RDFFormat { - override val name = "json-ld" - override val mimeType = new MediaType("application", "ld+json") -} - -case object RdfXml extends RDFFormat { - override val name = "rdf/xml" - override val mimeType = new MediaType("application", "rdf+xml") -} - -case object RdfJson extends RDFFormat { - override val name = "rdf/json" - override val mimeType: MediaType = MediaType.application.json -} - -sealed trait HtmlFormat extends DataFormat - -case object HtmlRdfa11 extends HtmlFormat { - override val name = "html-rdfa11" - override val mimeType: MediaType = MediaType.text.html -} - -case object HtmlMicrodata extends HtmlFormat { - override val name = "html-microdata" - override val mimeType: MediaType = MediaType.text.html -} From 4ab2fa98688588946fe9dec5f02b1000e7a55fa5 Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Fri, 13 Aug 2021 17:30:37 +0200 Subject: [PATCH 08/32] Refactored API formats (bis). --- .../weso/rdfshape/server/api/DataParam.scala | 43 +++++++------ .../server/api/format/HtmlFormat.scala | 16 ++--- .../server/api/format/RdfFormat.scala | 60 +++++++++++-------- .../server/api/format/SchemaFormat.scala | 2 +- 4 files changed, 61 insertions(+), 60 deletions(-) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataParam.scala index 1e47f4b3..79fb74aa 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataParam.scala @@ -84,7 +84,6 @@ case class DataParam( for { rdf <- rdfFromUri(new URI(dataUrl), dataFormat, base) } yield (None, rdf) - RDFFormat } case Right(`dataFileType`) => logger.debug(s"Input - dataFileType: $data") @@ -137,16 +136,14 @@ case class DataParam( x } - case Right(other) => { + case Right(other) => val msg = s"Unknown value for activeDataTab: $other" logger.error(msg) err(msg) - } - case Left(msg) => { + case Left(msg) => logger.error(msg) err(msg) - } } x } @@ -189,6 +186,17 @@ case class DataParam( } } + private def mkBase(base: Option[String]): IO[Option[IRI]] = base match { + case None => IO(None) + case Some(str) => + IRI + .fromString(str) + .fold( + e => IO.raiseError(new RuntimeException(s"Cannot get IRI from $str")), + (iri: IRI) => IO(Some(iri)) + ) + } + private def rdfFromUri( uri: URI, format: Format, @@ -210,24 +218,6 @@ case class DataParam( } } - private def mkBase(base: Option[String]): IO[Option[IRI]] = base match { - case None => IO(None) - case Some(str) => - IRI - .fromString(str) - .fold( - e => IO.raiseError(new RuntimeException(s"Cannot get IRI from $str")), - (iri: IRI) => IO(Some(iri)) - ) - } - - private def applyInference( - rdf: Resource[IO, RDFReasoner], - inference: Option[String], - dataFormat: Format - ): Resource[IO, RDFReasoner] = - extendWithInference(rdf, inference) - private def extendWithInference( resourceRdf: Resource[IO, RDFReasoner], optInference: Option[String] @@ -249,6 +239,13 @@ case class DataParam( } } + private def applyInference( + rdf: Resource[IO, RDFReasoner], + inference: Option[String], + dataFormat: Format + ): Resource[IO, RDFReasoner] = + extendWithInference(rdf, inference) + private def mkBaseIri( maybeBase: Option[String] ): Either[String, Option[IRI]] = maybeBase match { diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/HtmlFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/HtmlFormat.scala index 62090d58..cacbe222 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/HtmlFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/HtmlFormat.scala @@ -2,21 +2,17 @@ package es.weso.rdfshape.server.api.format import org.http4s.MediaType -/** Dummy trait to differentiate HTML-based formats from the more generic DataFormat +/** Dummy class to differentiate HTML-based formats from the more generic DataFormat + * * @see {@link es.weso.rdfshape.server.api.format.DataFormat} */ -sealed trait HtmlFormat extends DataFormat +class HtmlFormat(formatName: String) + extends DataFormat(formatName, MediaType.text.html) /** Represents the mime-type "text/html" when used along rdfa11 */ -case object HtmlRdfa11 extends HtmlFormat { - override val name = "html-rdfa11" - override val mimeType: MediaType = MediaType.text.html -} +case object HtmlRdfa11 extends HtmlFormat(formatName = "html-rdfa11") /** Represents the mime-type "text/html" when used along microdata */ -case object HtmlMicrodata extends HtmlFormat { - override val name = "html-microdata" - override val mimeType: MediaType = MediaType.text.html -} +case object HtmlMicrodata extends HtmlFormat(formatName = "html-microdata") diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/RdfFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/RdfFormat.scala index 6a98a151..89ffdb9c 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/RdfFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/RdfFormat.scala @@ -2,49 +2,57 @@ package es.weso.rdfshape.server.api.format import org.http4s.MediaType -/** Dummy trait to differentiate RDF formats from the more generic DataFormat +/** Dummy class to differentiate RDF formats from the more generic DataFormat + * * @see {@link es.weso.rdfshape.server.api.format.DataFormat} */ -sealed trait RDFFormat extends DataFormat +sealed class RDFFormat(formatName: String, formatMimeType: MediaType) + extends DataFormat(formatName, formatMimeType) /** Represents the mime-type "text/turtle" */ -case object Turtle extends RDFFormat { - override val name = "turtle" - override val mimeType = new MediaType("text", "turtle") -} +case object Turtle + extends RDFFormat( + formatName = "turtle", + formatMimeType = new MediaType("text", "turtle") + ) /** Represents the mime-type "application/n-triples" */ -case object NTriples extends RDFFormat { - override val name = "n-triples" - override val mimeType = new MediaType("application", "n-triples") -} +case object NTriples + extends RDFFormat( + formatName = "n-triples", + formatMimeType = new MediaType("application", "n-triples") + ) /** Represents the mime-type "application/trig" */ -case object Trig extends RDFFormat { - override val name = "trig" - override val mimeType = new MediaType("application", "trig") -} +case object Trig + extends RDFFormat( + formatName = "trig", + formatMimeType = new MediaType("application", "trig") + ) /** Represents the mime-type "application/ld+json" */ -case object JsonLd extends RDFFormat { - override val name = "json-ld" - override val mimeType = new MediaType("application", "ld+json") -} +case object JsonLd + extends RDFFormat( + formatName = "json-ld", + formatMimeType = new MediaType("application", "ld+json") + ) /** Represents the mime-type "application/rdf+xml" */ -case object RdfXml extends RDFFormat { - override val name = "rdf/xml" - override val mimeType = new MediaType("application", "rdf+xml") -} +case object RdfXml + extends RDFFormat( + formatName = "rdf/xml", + formatMimeType = new MediaType("application", "rdf+xml") + ) /** Represents the mime-type "application/json" */ -case object RdfJson extends RDFFormat { - override val name = "rdf/json" - override val mimeType: MediaType = MediaType.application.json -} +case object RdfJson + extends RDFFormat( + formatName = "rdf/json", + formatMimeType = MediaType.application.json + ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/SchemaFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/SchemaFormat.scala index d92b2226..5a598516 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/SchemaFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/SchemaFormat.scala @@ -7,7 +7,7 @@ import org.http4s.MediaType */ class SchemaFormat(formatName: String, formatMimeType: MediaType) extends DataFormat(formatName, formatMimeType) { - def this(format: Format) { + def this(format: Format) = { this(format.name, format.mimeType) } } From 570b80c769b77bbcef67baec395b1c201281c2f0 Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Fri, 13 Aug 2021 17:40:50 +0200 Subject: [PATCH 09/32] Refactored values. --- .../es/weso/rdfshape/server/api/values/EndpointValue.scala | 5 +++++ .../rdfshape/server/api/values/WikidataEntityValue.scala | 4 ++++ 2 files changed, 9 insertions(+) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/values/EndpointValue.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/values/EndpointValue.scala index 91c0c4a3..fd206a16 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/values/EndpointValue.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/values/EndpointValue.scala @@ -1,3 +1,8 @@ package es.weso.rdfshape.server.api.values +/** Data class representing any endpoint from where information is fetched or that identifies RDF data + * @param endpoint Base endpoint + * @param node Specific information node + * TODO + */ case class EndpointValue(endpoint: Option[String], node: Option[String]) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/values/WikidataEntityValue.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/values/WikidataEntityValue.scala index 62b56c7c..d25fa2ad 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/values/WikidataEntityValue.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/values/WikidataEntityValue.scala @@ -1,3 +1,7 @@ package es.weso.rdfshape.server.api.values +/** Data class representing a Wikidata entity + * + * @param entity Entity of which the data is contained + */ case class WikidataEntityValue(entity: Option[String]) From 5a2799350394aa4d6adf4d7f9c9170dfb532dcf1 Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Fri, 13 Aug 2021 20:17:08 +0200 Subject: [PATCH 10/32] Documented merged and results. --- .../rdfshape/server/api/ApiDefinitions.scala | 40 +++++++++ .../weso/rdfshape/server/api/Defaults.scala | 34 -------- .../server/api/merged/ActiveDataTab.scala | 53 +++++++++--- .../server/api/merged/CompoundData.scala | 53 ++++++------ .../server/api/merged/DataElement.scala | 81 +++++++++++++------ .../server/api/merged/MergedModels.scala | 79 +++++++++++------- .../api/results/DataConversionResult.scala | 11 +++ .../api/results/DataExtractResult.scala | 31 ++++++- .../server/api/results/DataInfoResult.scala | 35 +++++++- .../api/results/SchemaConversionResult.scala | 22 +++++ .../api/results/ShapeMapInfoResult.scala | 25 +++++- 11 files changed, 332 insertions(+), 132 deletions(-) delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/Defaults.scala diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/ApiDefinitions.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/ApiDefinitions.scala index 7e9b3122..535b50f9 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/ApiDefinitions.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/ApiDefinitions.scala @@ -1,5 +1,45 @@ package es.weso.rdfshape.server.api +import es.weso.rdf.nodes.IRI +import es.weso.rdfshape.server.api.format.{DataFormat, SchemaFormat} +import es.weso.schema.{Schemas, ShapeMapTrigger} +import es.weso.shapemaps.ShapeMap + +/** Global definitions used in the API + */ object ApiDefinitions { + + /** API route inside the web server + */ val api = "api" } + +/** Application-wide defaults + */ +object Defaults { + + val availableDataFormats: List[DataFormat] = DataFormat.availableFormats + val defaultDataFormat: DataFormat = DataFormat.defaultFormat + val availableSchemaFormats: List[SchemaFormat] = SchemaFormat.availableFormats + val defaultSchemaFormat: SchemaFormat = SchemaFormat.defaultFormat + val availableSchemaEngines: List[String] = Schemas.availableSchemaNames + val defaultSchemaEngine: String = Schemas.defaultSchemaName + val availableTriggerModes: List[String] = Schemas.availableTriggerModes + val defaultTriggerMode: String = ShapeMapTrigger(ShapeMap.empty).name + val availableInferenceEngines = List( + "NONE", + "RDFS", + "OWL" + ) // TODO: Obtain from RDFAsJenaModel.empty.map(_.availableInferenceEngines).unsafeRunSync + val defaultSchemaEmbedded = false + val defaultInference: String = availableInferenceEngines.head + val defaultActiveDataTab = "#dataTextArea" + val defaultActiveSchemaTab = "#schemaTextArea" + val defaultActiveQueryTab = "#queryTextArea" + val defaultShapeMapFormat: String = ShapeMap.defaultFormat + val availableShapeMapFormats: List[String] = ShapeMap.formats + val defaultActiveShapeMapTab = "#shapeMapTextArea" + val defaultShapeLabel: IRI = IRI("Shape") + val relativeBase: Some[IRI] = Some(IRI("internal://base/")) + +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/Defaults.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/Defaults.scala deleted file mode 100644 index 7fc9180b..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/Defaults.scala +++ /dev/null @@ -1,34 +0,0 @@ -package es.weso.rdfshape.server.api - -import es.weso.rdf.nodes.IRI -import es.weso.rdfshape.server.api.format._ -import es.weso.schema._ -import es.weso.shapemaps.ShapeMap - -object Defaults { - - val availableDataFormats: List[DataFormat] = DataFormat.availableFormats - val defaultDataFormat: DataFormat = DataFormat.defaultFormat - val availableSchemaFormats: List[SchemaFormat] = SchemaFormat.availableFormats - val defaultSchemaFormat: SchemaFormat = SchemaFormat.defaultFormat - val availableSchemaEngines: List[String] = Schemas.availableSchemaNames - val defaultSchemaEngine: String = Schemas.defaultSchemaName - val availableTriggerModes: List[String] = Schemas.availableTriggerModes - val defaultTriggerMode: String = ShapeMapTrigger(ShapeMap.empty).name - val availableInferenceEngines = List( - "NONE", - "RDFS", - "OWL" - ) // TODO: Obtain from RDFAsJenaModel.empty.map(_.availableInferenceEngines).unsafeRunSync - val defaultSchemaEmbedded = false - val defaultInference: String = availableInferenceEngines.head - val defaultActiveDataTab = "#dataTextArea" - val defaultActiveSchemaTab = "#schemaTextArea" - val defaultActiveQueryTab = "#queryTextArea" - val defaultShapeMapFormat: String = ShapeMap.defaultFormat - val availableShapeMapFormats: List[String] = ShapeMap.formats - val defaultActiveShapeMapTab = "#shapeMapTextArea" - val defaultShapeLabel: IRI = IRI("Shape") - val relativeBase: Some[IRI] = Some(IRI("internal://base/")) - -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/ActiveDataTab.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/ActiveDataTab.scala index ea5d837a..623b754b 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/ActiveDataTab.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/ActiveDataTab.scala @@ -1,11 +1,25 @@ package es.weso.rdfshape.server.api.merged +import com.typesafe.scalalogging.LazyLogging + +/** Abstract representation of the active tab in the client that sent the request. + * Used to distinguish whether the submitted text, URL or file should be prioritized in case several are present. + */ sealed abstract class ActiveDataTab { + + /** Unique identifier of the active tab + */ val id: String } + +/** Indicates the client uploaded RDF data as raw text + */ case object DataTextArea extends ActiveDataTab { override val id = "#dataTextArea" } + +/** Indicates the client uploaded RDF data by indicating the URL where it lives + */ case object DataUrl extends ActiveDataTab { override val id = "#dataUrl" } @@ -13,24 +27,43 @@ case object DataUrl extends ActiveDataTab { case object DataEndpoint extends ActiveDataTab { override val id = "#dataEndpoint" } + +/** Indicates the client uploaded RDF data by uploading a file containing it + */ case object DataFile extends ActiveDataTab { override val id = "#dataFile" } -object ActiveDataTab { - val values = List(DataTextArea, DataUrl, DataFile, DataEndpoint) - val default: ActiveDataTab = values.head +object ActiveDataTab extends LazyLogging { + + /** Default value to use if none is present + */ + lazy val default: ActiveDataTab = dataTabValues.head - def fromString(str: String): Either[String, ActiveDataTab] = { + /** All possible values the DataTab may acquire + */ + private val dataTabValues = + List(DataTextArea, DataUrl, DataFile, DataEndpoint) - values.collectFirst { - case v if v.id == str => v + /** Given a tab identifier (name), find and returning the corresponding data tab with that id + * + * @param tabId Id of the Tab to be returned + * @return The corresponding ActiveDataTab if the tabName exists, otherwise an error message + */ + def fromString(tabId: String): Either[String, ActiveDataTab] = { + + dataTabValues.collectFirst { + case value if value.id == tabId => value } match { - case None => - Left( - s"Unknown value for activeDataTab: $str. Available values: ${values.map(_.id).mkString(",")}" - ) case Some(v) => Right(v) + case None => + val errorMsg = mkErrorMessage(tabId) + logger.error(errorMsg) + Left(errorMsg) } } + + private def mkErrorMessage(id: String): String = { + s"Unknown value for activeDataTab: $id. Available values: ${dataTabValues.map(_.id).mkString(",")}" + } } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/CompoundData.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/CompoundData.scala index 296e0639..6ef60b3d 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/CompoundData.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/CompoundData.scala @@ -1,24 +1,32 @@ package es.weso.rdfshape.server.api.merged -import es.weso.rdf.RDFReasoner -// import cats._ + import cats.effect._ import cats.implicits._ +import es.weso.rdf.RDFReasoner import es.weso.rdf.jena.RDFAsJenaModel import es.weso.rdfshape.server.api.merged.DataElement._ import io.circe._ import io.circe.parser._ import io.circe.syntax._ -case class CompoundData(elems: List[DataElement]) { +/** Data class representing the merge of several RDF data into a single compound + * + * @param elements List of the individual DataElements conforming a CompoundData instance + */ +case class CompoundData(elements: List[DataElement]) { def toRDF: IO[Resource[IO, RDFReasoner]] = { - val rs = elems.map(_.toRDF).sequence + val rs = elements.map(_.toRDF).sequence + def combine( ls: List[Resource[IO, RDFAsJenaModel]] ): Resource[IO, List[RDFAsJenaModel]] = ls.sequence - val v = rs.flatMap(lsRs => + + /** Whole compound value resulting from merging the individual elements + */ + val value = rs.flatMap(lsRs => IO(combine(lsRs).evalMap(ls => MergedModels.fromList(ls))) ) - v + value } } @@ -26,33 +34,32 @@ object CompoundData { def fromString(str: String): Either[String, CompoundData] = for { json <- parse(str).leftMap(pe => - s"CompoundData.fromString: error parsing $str as JSON: ${pe}" + s"CompoundData.fromString: error parsing $str as JSON: $pe" ) cd <- json .as[CompoundData] .leftMap(de => - s"CompoundData.fromString: error decoding json to compoundData: ${de}\nJSON obtained: \n${json.spaces2}" + s"CompoundData.fromString: error decoding json to compoundData: $de\nJSON obtained: \n${json.spaces2}" ) } yield cd + /** Encoder used to transform CompoundData instances to JSON values + */ implicit val encodeCompoundData: Encoder[CompoundData] = - new Encoder[CompoundData] { - final def apply(a: CompoundData): Json = - Json.fromValues(a.elems.map(_.asJson)) - } + (a: CompoundData) => Json.fromValues(a.elements.map(_.asJson)) + /** Decoder used to extract CompoundData instances from JSON values + */ implicit val decodeCompoundData: Decoder[CompoundData] = - new Decoder[CompoundData] { - final def apply(c: HCursor): Decoder.Result[CompoundData] = { - c.values match { - case None => - DecodingFailure("Empty list for compound data", List()) - .asLeft[CompoundData] - case Some(vs) => - val xs: Decoder.Result[List[DataElement]] = - vs.toList.map(_.as[DataElement]).sequence - xs.map(CompoundData(_)) - } + (cursor: HCursor) => { + cursor.values match { + case None => + DecodingFailure("Empty list for compound data", List()) + .asLeft[CompoundData] + case Some(vs) => + val xs: Decoder.Result[List[DataElement]] = + vs.toList.map(_.as[DataElement]).sequence + xs.map(CompoundData(_)) } } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala index e99cd90a..dd436c4c 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala @@ -1,4 +1,5 @@ package es.weso.rdfshape.server.api.merged + import cats.effect._ import cats.implicits._ import com.typesafe.scalalogging.LazyLogging @@ -7,6 +8,15 @@ import es.weso.rdfshape.server.api.Defaults import es.weso.rdfshape.server.api.format.DataFormat import io.circe._ +/** Represent each chunk of RDF data submitted (mainly on RDF-merging operations) + * + * @param data Raw RDF data (plain text) + * @param dataUrl URL containing the RDF data + * @param endpoint RDF data endpoint to use + * @param dataFile File containing the RDF data + * @param dataFormat Format of the RDF data + * @param activeDataTab Active tab in the client's view, used to choose which RDF source should be read + */ case class DataElement( data: Option[String], dataUrl: Option[String], @@ -15,6 +25,12 @@ case class DataElement( dataFormat: DataFormat, activeDataTab: ActiveDataTab ) extends LazyLogging { + + /** Given an RDF source of data sent by a client, try to parse it and get the RDF model representation + * + * @return RDF (Jena) model of RDF data received from a client + * @note Iteratively compares the different possible values of activeDataTab against the one the client attached to decide an extracting strategy + */ def toRDF: IO[Resource[IO, RDFAsJenaModel]] = activeDataTab match { case DataTextArea => @@ -40,7 +56,7 @@ case class DataElement( logger.error(s"Data element error") IO.raiseError( new RuntimeException( - s"Not implemented yet compound with activeTab: ${activeDataTab}" + s"Not implemented yet compound with activeTab: $activeDataTab" ) ) } @@ -48,18 +64,22 @@ case class DataElement( object DataElement extends LazyLogging { + /** Empty and most basic data element + */ val empty: DataElement = DataElement( - None, - None, - None, - None, + data = None, + dataUrl = None, + endpoint = None, + dataFile = None, Defaults.defaultDataFormat, ActiveDataTab.default ) + /** Encoder used to transform DataElement instances to JSON values + */ implicit val encodeDataElement: Encoder[DataElement] = - new Encoder[DataElement] { - final def apply(a: DataElement): Json = a.activeDataTab match { + (a: DataElement) => + a.activeDataTab match { case DataTextArea => Json.obj( ("data", Json.fromString(a.data.getOrElse(""))), @@ -85,14 +105,15 @@ object DataElement extends LazyLogging { ("dataFormat", Json.fromString(a.dataFormat.name)) ) } - } + /** Decoder used to extract DataElement instances from JSON values + */ implicit val decodeDataElement: Decoder[DataElement] = new Decoder[DataElement] { - final def apply(c: HCursor): Decoder.Result[DataElement] = { + final def apply(cursor: HCursor): Decoder.Result[DataElement] = { for { - dataActiveTab <- parseActiveTab(c) - dataFormat <- parseDataFormat(c) + dataActiveTab <- parseActiveTab(cursor) + dataFormat <- parseDataFormat(cursor) base = DataElement.empty.copy( dataFormat = dataFormat, activeDataTab = dataActiveTab @@ -101,52 +122,60 @@ object DataElement extends LazyLogging { case DataTextArea => logger.debug("Data element decoder - DataTextArea") for { - data <- c.downField("data").as[String] + data <- cursor.downField("data").as[String] } yield base.copy(data = Some(data)) case DataFile => logger.debug("Data element decoder - DataFile") /* TODO: either send the file text through the request (bad idea) * or decode the file appropriately */ - logger.debug(c.downField("dataFile").toString) + logger.debug(cursor.downField("dataFile").toString) for { - dataFile <- c.downField("dataFile").as[String] + dataFile <- cursor.downField("dataFile").as[String] } yield base.copy(dataFile = Some(dataFile)) case DataUrl => logger.debug("Data element decoder - DaraUrl") for { - dataUrl <- c.downField("dataURL").as[String] + dataUrl <- cursor.downField("dataURL").as[String] } yield base.copy(dataUrl = Some(dataUrl)) case DataEndpoint => logger.debug("Data element decoder - DataEndpoint") for { - endpoint <- c.downField("endpoint").as[String] + endpoint <- cursor.downField("endpoint").as[String] } yield base.copy(endpoint = Some(endpoint)) } } yield rest } - private def parseActiveTab(c: HCursor): Decoder.Result[ActiveDataTab] = { + /** @param cursor Cursor to operate JSON abstractions + * @return The ActiveDataTab specified in a JSON encoded DataElement + */ + private def parseActiveTab( + cursor: HCursor + ): Decoder.Result[ActiveDataTab] = { for { - str <- c.downField("activeTab").as[String] orElse Right( + activeTabId <- cursor.downField("activeTab").as[String] orElse Right( ActiveDataTab.default.id ) - a <- ActiveDataTab.fromString(str).leftMap(DecodingFailure(_, List())) + a <- ActiveDataTab + .fromString(activeTabId) + .leftMap(DecodingFailure(_, List())) } yield a } - private def parseDataFormat(c: HCursor): Decoder.Result[DataFormat] = + /** @param cursor Cursor to operate JSON abstractions + * @return The DataFormat specified in a JSON encoded DataElement + */ + private def parseDataFormat(cursor: HCursor): Decoder.Result[DataFormat] = for { - str <- c + dataFormatStr <- cursor .downField("dataFormat") .as[String] .orElse(Right(Defaults.defaultDataFormat.name)) - df <- DataFormat - .fromString(str) + dataFormat <- DataFormat + .fromString(dataFormatStr) .leftMap(s => DecodingFailure(s"Non supported dataFormat: $s", List()) ) - } yield df - + } yield dataFormat } - } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/MergedModels.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/MergedModels.scala index 396e859e..f1466e1a 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/MergedModels.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/MergedModels.scala @@ -1,21 +1,23 @@ package es.weso.rdfshape.server.api.merged -import cats.effect._ -import cats.implicits._ -import com.typesafe.scalalogging.LazyLogging -import es.weso.rdf._ -import io.circe.Json -import fs2._ -import org.slf4j._ -// import fs2.Stream -import _root_.es.weso.rdf.{PrefixMap, RDFBuilder, RDFReader, RDFReasoner} +import _root_.es.weso.rdf._ import _root_.es.weso.rdf.jena.RDFAsJenaModel import _root_.es.weso.rdf.nodes.{IRI, RDFNode} import _root_.es.weso.rdf.path.SHACLPath import _root_.es.weso.rdf.triples.RDFTriple import cats.data.NonEmptyList +import cats.effect._ +import cats.implicits._ +import com.typesafe.scalalogging.LazyLogging +import io.circe.Json +import fs2._ import org.apache.jena.rdf.model.{Model, ModelFactory} +/** Data class representing an RDF model compound of several smaller RDF models + * + * @param members RDF models conforming the merged model + * @param mergedModel Unified RDF model containing the rest + */ case class MergedModels( members: NonEmptyList[RDFAsJenaModel], mergedModel: Ref[IO, RDFAsJenaModel] @@ -24,16 +26,11 @@ case class MergedModels( with LazyLogging { type Rdf = MergedModels - - def getModel: IO[RDFAsJenaModel] = mergedModel.get - - /* def mergeModels: IO[RDFAsJenaModel] = { val zero: Model = - * RDFAsJenaModel(ModelFactory.createDefaultModel()) def cmb(v: Model, r: - * Model): Model = RDFAsJenaModel(model = r.model.add(v.model)) - * members.map(_.getModel) foldRight(zero)(cmb) } */ + val id = s"MergedModels" override def getPrefixMap: IO[PrefixMap] = { val zero: PrefixMap = PrefixMap.empty + def cmb(pm: PrefixMap, v: RDFAsJenaModel): IO[PrefixMap] = for { newPm <- v.getPrefixMap } yield pm.addPrefixMap(newPm) @@ -41,24 +38,27 @@ case class MergedModels( members.foldM(zero)(cmb) } - val id = s"MergedModels" + /** Available parse formats, similar to those in Jena models + */ + def availableParseFormats: List[String] = RDFAsJenaModel.availableFormats - def availableParseFormats: List[String] = RDFAsJenaModel.availableFormats + /** Available serialize formats, similar to those in Jena models + */ def availableSerializeFormats: List[String] = RDFAsJenaModel.availableFormats - val log = LoggerFactory.getLogger("MergedModels") - - /* override def fromString(cs: CharSequence, format: String, base: - * Option[IRI]): RDFRead[MergedModels] = for { rdf <- - * RDFAsJenaModel.fromString(cs.toString, format, base) } yield - * MergedModels(List(rdf)) */ - + /* Override and replicate the functionalities inherited from RDF + * Reader/Reasoner */ override def serialize(format: String, base: Option[IRI]): RDFRead[String] = for { mergedRdf <- getModel str <- mergedRdf.serialize(format, base) } yield str + /* override def fromString(cs: CharSequence, format: String, base: + * Option[IRI]): RDFRead[MergedModels] = for { rdf <- + * RDFAsJenaModel.fromString(cs.toString, format, base) } yield + * MergedModels(List(rdf)) */ + override def iris(): RDFStream[IRI] = { Stream.eval(getModel).flatMap(_.iris()) } @@ -148,14 +148,24 @@ case class MergedModels( override def hasPredicateWithSubject(n: RDFNode, p: IRI): RDFRead[Boolean] = getModel.flatMap(_.hasPredicateWithSubject(n, p)) + def getModel: IO[RDFAsJenaModel] = mergedModel.get + } +/** Static utilities to work with several RDF models + */ object MergedModels { - def fromList(ls: List[RDFAsJenaModel]): IO[RDFReasoner] = - NonEmptyList.fromList(ls) match { + + /** Merge multiple RDF sources, entered as a list, into one + * + * @param models List containing the RDF models to be merged + * @return A unified RDF model containing all the models in the list + */ + def fromList(models: List[RDFAsJenaModel]): IO[RDFReasoner] = + NonEmptyList.fromList(models) match { case Some(nel) => for { - rdfModel <- mergeModels(ls) + rdfModel <- mergeModels(models) refRdfModel <- Ref.of[IO, RDFAsJenaModel](rdfModel) } yield MergedModels(nel, refRdfModel) case None => @@ -164,11 +174,18 @@ object MergedModels { } yield RDFAsJenaModel(ref, None, None, Map(), Map()) } - private def mergeModels(ls: List[RDFAsJenaModel]): IO[RDFAsJenaModel] = { - val zero: Model = ModelFactory.createDefaultModel() + /** Merge multiple RDF sources into one + * + * @param models List of RDF models to be merged + * @return A unified RDF model containing all the models passed as arguments + */ + private def mergeModels(models: List[RDFAsJenaModel]): IO[RDFAsJenaModel] = { + val zero: Model = ModelFactory.createDefaultModel() + def cmb(v: Model, r: Model): Model = r.add(v) + for { - model <- ls.map(_.getModel).sequence.map(_.foldLeft(zero)(cmb)) + model <- models.map(_.getModel).sequence.map(_.foldLeft(zero)(cmb)) r <- Ref.of[IO, Model](model) } yield RDFAsJenaModel(r, None, None, Map(), Map()) } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataConversionResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataConversionResult.scala index 2a4804f1..9d4483d8 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataConversionResult.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataConversionResult.scala @@ -4,6 +4,14 @@ import es.weso.rdfshape.server.api.format.DataFormat import es.weso.rdfshape.server.utils.json.JsonUtilsServer._ import io.circe.Json +/** Data class representing the output of a conversion operation + * + * @param msg Output informational message after conversion + * @param data Data to be converted + * @param dataFormat Initial data format + * @param targetFormat Target data format + * @param result Data after conversion + */ case class DataConversionResult( msg: String, data: Option[String], @@ -12,6 +20,9 @@ case class DataConversionResult( result: String ) { + /** Convert a conversion result to its JSON representation + * @return JSON representation of the conversion result + */ def toJson: Json = Json.fromFields( List( ("msg", Json.fromString(msg)), diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataExtractResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataExtractResult.scala index ca65e193..9d605cce 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataExtractResult.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataExtractResult.scala @@ -1,13 +1,26 @@ package es.weso.rdfshape.server.api.results import cats.effect.IO -import es.weso.rdfshape.server.api.Defaults.{defaultSchemaEngine, defaultSchemaFormat} +import es.weso.rdfshape.server.api.Defaults.{ + defaultSchemaEngine, + defaultSchemaFormat +} import es.weso.rdfshape.server.api.format._ import es.weso.rdfshape.server.utils.json.JsonUtilsServer._ import es.weso.schema.Schema import es.weso.shapemaps.ResultShapeMap import io.circe.Json +/** Data class representing the output of an extraction operation (input RDF data => output schema) + * + * @param msg Output informational message after conversion. Used in case of error. + * @param optData RDF input data from which ShEx may be extracted + * @param optDataFormat RDF input data format + * @param optSchemaFormat Target schema format + * @param optSchemaEngine Target schema engine + * @param optSchema Resulting schema + * @param optResultShapeMap Resulting shapemap + */ case class DataExtractResult private ( msg: String, optData: Option[String], @@ -17,9 +30,13 @@ case class DataExtractResult private ( optSchema: Option[Schema], optResultShapeMap: Option[ResultShapeMap] ) { + + /** Convert an extraction result to its JSON representation + * @return JSON representation of the extraction result + */ def toJson: IO[Json] = optSchema match { case None => IO(Json.fromFields(List(("msg", Json.fromString(msg))))) - case Some(schema) => { + case Some(schema) => val engine = optSchemaEngine.getOrElse(defaultSchemaEngine) val schemaFormat = optSchemaFormat.getOrElse(defaultSchemaFormat.name) for { @@ -27,7 +44,7 @@ case class DataExtractResult private ( } yield Json.fromFields( List( ("msg", Json.fromString(msg)), - ("inferedShape", Json.fromString(schemaStr)), + ("inferredShape", Json.fromString(schemaStr)), ("schemaFormat", Json.fromString(schemaFormat)), ("schemaEngine", Json.fromString(engine)) ) ++ @@ -43,13 +60,19 @@ case class DataExtractResult private ( (r: ResultShapeMap) => Json.fromString(r.toString) ) ) - } } } object DataExtractResult { + + /** @param msg Error message contained in the result + * @return A DataExtractResult consisting of a single error message and no data + */ def fromMsg(msg: String): DataExtractResult = DataExtractResult(msg, None, None, None, None, None, None) + + /** @return A DataExtractResult, given all the parameters needed to build it (input, formats and results) + */ def fromExtraction( optData: Option[String], optDataFormat: Option[DataFormat], diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataInfoResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataInfoResult.scala index d3d4718a..c3deb0ea 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataInfoResult.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataInfoResult.scala @@ -7,6 +7,15 @@ import es.weso.rdfshape.server.api.format._ import es.weso.rdfshape.server.utils.json.JsonUtilsServer._ import io.circe.Json +/** Data class representing the output of an "information" operation + * + * @param msg Output informational message after processing. Used in case of error. + * @param data RDF input data + * @param dataFormat RDF input data format + * @param predicates List of predicates of the RDF input + * @param numberStatements Number of statements in the RDF input + * @param prefixMap Prefix map of the RDF input + */ case class DataInfoResult private ( msg: String, data: Option[String], @@ -15,6 +24,14 @@ case class DataInfoResult private ( numberStatements: Option[Int], prefixMap: Option[PrefixMap] ) { + + /** Prefix map: defaults to empty. + */ + lazy val pm: PrefixMap = prefixMap.getOrElse(PrefixMap.empty) + + /** Convert an information result to its JSON representation + * @return JSON information of the extraction result + */ def toJson: Json = { Json.fromFields( List(("msg", Json.fromString(msg))) ++ @@ -34,8 +51,9 @@ case class DataInfoResult private ( ) } - lazy val pm = prefixMap.getOrElse(PrefixMap.empty) - + /** @param iri IRI to be converted + * @return JSON representation of the IRI + */ private def iri2Json(iri: IRI): Json = { Json.fromString(pm.qualifyIRI(iri)) } @@ -43,8 +61,19 @@ case class DataInfoResult private ( } object DataInfoResult { + + /** Message attached to the result when created successfully + */ + val successMessage = "Well formed RDF" + + /** @param msg Error message contained in the result + * @return A DataInfoResult consisting of a single error message and no data + */ def fromMsg(msg: String): DataInfoResult = DataInfoResult(msg, None, None, None, None, None) + + /** @return A DataInfoResult, given all the parameters needed to build it (input, predicates, etc.) + */ def fromData( data: Option[String], dataFormat: Option[DataFormat], @@ -53,7 +82,7 @@ object DataInfoResult { prefixMap: PrefixMap ): DataInfoResult = DataInfoResult( - "Well formed RDF", + successMessage, data, dataFormat, Some(predicates), diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/SchemaConversionResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/SchemaConversionResult.scala index df2d929c..dcc9c63f 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/SchemaConversionResult.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/SchemaConversionResult.scala @@ -5,6 +5,17 @@ import es.weso.shapemaps.ShapeMap import io.circe.Json import io.circe.syntax._ +/** Data class representing the output of an conversion operation (input schema -> output schema) + * + * @param msg Output informational message after processing. Used in case of error. + * @param schema Input schema + * @param schemaFormat Input schema format + * @param schemaEngine Input schema engine + * @param targetSchemaFormat Target schema format + * @param targetSchemaEngine Target schema engine + * @param result Output schema + * @param resultShapeMap Output shapemap + */ case class SchemaConversionResult( msg: String, schema: Option[String], @@ -16,6 +27,10 @@ case class SchemaConversionResult( resultShapeMap: Option[ShapeMap] ) { + /** Convert a conversion result to its JSON representation + * + * @return JSON information of the conversion result + */ def toJson: Json = Json.fromFields( List( ("msg", Json.fromString(msg)) @@ -35,8 +50,15 @@ case class SchemaConversionResult( } object SchemaConversionResult { + + /** @param msg Error message contained in the result + * @return A SchemaConversionResult consisting of a single error message and no data + */ def fromMsg(msg: String): SchemaConversionResult = SchemaConversionResult(msg, None, None, None, None, None, None, None) + + /** @return A SchemaConversionResult, given all the parameters needed to build it (schemas, formats, results, etc.) + */ def fromConversion( source: String, schemaFormat: String, diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/ShapeMapInfoResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/ShapeMapInfoResult.scala index 08d7f6c9..142fc4ee 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/ShapeMapInfoResult.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/ShapeMapInfoResult.scala @@ -4,12 +4,24 @@ import es.weso.rdfshape.server.utils.json.JsonUtilsServer._ import es.weso.shapemaps._ import io.circe.Json +/** Data class representing the output of a ShapeMapInfo operation + * + * @param msg Output informational message after processing. Used in case of error. + * @param shapeMap Input shapemap + * @param shapeMapFormat Input shapemap format + * @param shapeMapJson Output shapemap (JSON representation) + */ case class ShapeMapInfoResult private ( msg: String, shapeMap: Option[String], shapeMapFormat: Option[ShapeMapFormat], shapeMapJson: Option[Json] ) { + + /** Convert a result to its JSON representation + * + * @return JSON information of the shapemap result + */ def toJson: Json = { Json.fromFields( List(("msg", Json.fromString(msg))) ++ @@ -26,15 +38,26 @@ case class ShapeMapInfoResult private ( } object ShapeMapInfoResult { + + /** Message attached to the result when created successfully + */ + val successMessage = "Well formed ShapeMap" + + /** @param msg Error message contained in the result + * @return A ShapeMapInfoResult consisting of a single error message and no data + */ def fromMsg(msg: String): ShapeMapInfoResult = ShapeMapInfoResult(msg, None, None, None) + + /** @return A ShapeMapInfoResult, given all the parameters needed to build it (shapemap, formats, etc.) + */ def fromShapeMap( shapeMapStr: Option[String], shapeMapFormat: Option[ShapeMapFormat], shapeMap: ShapeMap ): ShapeMapInfoResult = ShapeMapInfoResult( - "Well formed Shape Map", + successMessage, shapeMapStr, shapeMapFormat, Some(shapeMap.toJson) From 8c28343f12351ca27b39dcde0a69a8560707f125 Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Fri, 13 Aug 2021 22:50:12 +0200 Subject: [PATCH 11/32] Re-structured the code handling the API endpoints. --- .../es/weso/rdfshape/server/Server.scala | 10 +- .../weso/rdfshape/server/api/APIService.scala | 90 ----- .../rdfshape/server/api/EndpointService.scala | 83 ----- .../es/weso/rdfshape/server/api/Query.scala | 3 - .../server/api/merged/DataElement.scala | 2 +- .../api/results/DataExtractResult.scala | 17 +- .../server/api/results/DataInfoResult.scala | 2 +- .../api/{ => routes}/ApiDefinitions.scala | 2 +- .../server/api/{ => routes}/ApiHelper.scala | 45 ++- .../server/api/routes/ApiService.scala | 10 + .../IncomingRequestParameters.scala} | 72 +++- .../server/api/{ => routes}/PartsMap.scala | 3 +- .../server/api/routes/api/APIService.scala | 38 ++ .../api/{ => routes/data}/DataConverter.scala | 4 +- .../api/{ => routes/data}/DataParam.scala | 7 +- .../api/{ => routes/data}/DataService.scala | 15 +- .../api/{ => routes/data}/DataValue.scala | 3 +- .../{ => routes/endpoint}/EndpointParam.scala | 3 +- .../api/routes/endpoint/EndpointService.scala | 141 ++++++++ .../api/{ => routes/endpoint}/Outgoing.scala | 2 +- .../server/api/routes/endpoint/Query.scala | 6 + .../endpoint}/SparqlQueryParam.scala | 5 +- .../api/{ => routes/fetch}/FetchService.scala | 6 +- .../permalink}/PermalinkService.scala | 26 +- .../schema}/SchemaInfoResult.scala | 2 +- .../api/{ => routes/schema}/SchemaParam.scala | 5 +- .../{ => routes/schema}/SchemaService.scala | 19 +- .../api/{ => routes/schema}/SchemaValue.scala | 6 +- .../schema}/TriggerModeParam.scala | 7 +- .../{ => routes/shapemap}/ShapeMapParam.scala | 5 +- .../shapemap}/ShapeMapService.scala | 11 +- .../{ => routes/shapemap}/ShapeMapValue.scala | 2 +- .../api/{ => routes/shex}/ShExService.scala | 5 +- .../wikibase}/WikibaseSchemaParam.scala | 6 +- .../wikibase}/WikidataService.scala | 332 +++++++++--------- .../server/utils/numeric/NumericUtils.scala | 17 + .../rdfshape/server/api/TestHttp4sTest.scala | 5 +- 37 files changed, 595 insertions(+), 422 deletions(-) delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/APIService.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/EndpointService.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/Query.scala rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{ => routes}/ApiDefinitions.scala (97%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{ => routes}/ApiHelper.scala (94%) create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/ApiService.scala rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{QueryParams.scala => routes/IncomingRequestParameters.scala} (82%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{ => routes}/PartsMap.scala (96%) create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/api/APIService.scala rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{ => routes/data}/DataConverter.scala (98%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{ => routes/data}/DataParam.scala (98%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{ => routes/data}/DataService.scala (94%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{ => routes/data}/DataValue.scala (87%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{ => routes/endpoint}/EndpointParam.scala (95%) create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/EndpointService.scala rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{ => routes/endpoint}/Outgoing.scala (97%) create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/Query.scala rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{ => routes/endpoint}/SparqlQueryParam.scala (93%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{ => routes/fetch}/FetchService.scala (80%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{ => routes/permalink}/PermalinkService.scala (94%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{ => routes/schema}/SchemaInfoResult.scala (76%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{ => routes/schema}/SchemaParam.scala (98%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{ => routes/schema}/SchemaService.scala (96%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{ => routes/schema}/SchemaValue.scala (71%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{ => routes/schema}/TriggerModeParam.scala (97%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{ => routes/shapemap}/ShapeMapParam.scala (96%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{ => routes/shapemap}/ShapeMapService.scala (81%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{ => routes/shapemap}/ShapeMapValue.scala (79%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{ => routes/shex}/ShExService.scala (84%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{ => routes/wikibase}/WikibaseSchemaParam.scala (95%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{ => routes/wikibase}/WikidataService.scala (93%) create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/utils/numeric/NumericUtils.scala diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala index fd3d743c..5d7dc789 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala @@ -4,7 +4,15 @@ import cats.effect._ import cats.implicits._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdfshape.server.Server._ -import es.weso.rdfshape.server.api._ +import es.weso.rdfshape.server.api.routes.api.APIService +import es.weso.rdfshape.server.api.routes.data.DataService +import es.weso.rdfshape.server.api.routes.endpoint.EndpointService +import es.weso.rdfshape.server.api.routes.fetch.FetchService +import es.weso.rdfshape.server.api.routes.permalink.PermalinkService +import es.weso.rdfshape.server.api.routes.schema.SchemaService +import es.weso.rdfshape.server.api.routes.shapemap.ShapeMapService +import es.weso.rdfshape.server.api.routes.shex.ShExService +import es.weso.rdfshape.server.api.routes.wikibase.WikidataService import es.weso.rdfshape.server.utils.error.exceptions.SSLContextCreationException import es.weso.rdfshape.server.utils.error.{ExitCodes, SysUtils} import es.weso.rdfshape.server.utils.secure.SSLHelper diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/APIService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/APIService.scala deleted file mode 100644 index bfcdb72f..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/APIService.scala +++ /dev/null @@ -1,90 +0,0 @@ -package es.weso.rdfshape.server.api - -import cats.data.EitherT -import cats.effect._ -import cats.implicits._ -import es.weso.rdf.jena.Endpoint -import es.weso.rdf.nodes.IRI -import es.weso.rdfshape.server.api.ApiDefinitions._ -import es.weso.rdfshape.server.api.QueryParams._ -import es.weso.utils.IOUtils._ -import io.circe._ -import org.http4s._ -import org.http4s.circe._ -import org.http4s.client.Client -import org.http4s.dsl.Http4sDsl -import org.http4s.server.staticcontent.resourceServiceBuilder -import org.log4s.getLogger - -import scala.util.Try - -class APIService(client: Client[IO]) extends Http4sDsl[IO] { - - val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { - - case req @ GET -> Root / `api` / "health" => - Ok("OK") - - case req @ GET -> Root / `api` / "endpoint" / "outgoing" :? - OptEndpointParam(optEndpoint) +& - OptNodeParam(optNode) +& - LimitParam(optLimit) => - for { - eitherOutgoing <- getOutgoing(optEndpoint, optNode, optLimit).value - resp <- eitherOutgoing.fold( - (s: String) => errJson(s"Error: $s"), - (outgoing: Outgoing) => Ok(outgoing.toJson) - ) - } yield resp - - // Contents on /swagger are directly mapped to /swagger - /* case r @ GET -> _ if - * r.pathInfo.startsWith(UriPath.fromString("/swagger/")) => - * swagger.toRoutes. // getOrElseF(NotFound()) */ - - } - private val relativeBase = Defaults.relativeBase - private val logger = getLogger - private val swagger = - resourceServiceBuilder[IO]("/swagger") // ResourceService.Config()) - - private def errJson(msg: String): IO[Response[IO]] = - Ok(Json.fromFields(List(("error", Json.fromString(msg))))) - - private def getOutgoing( - optEndpoint: Option[String], - optNode: Option[String], - optLimit: Option[String] - ): EitherT[IO, String, Outgoing] = { - for { - endpointIRI <- EitherT.fromEither[IO]( - Either - .fromOption(optEndpoint, "No endpoint provided") - .flatMap(IRI.fromString(_)) - ) - node <- EitherT.fromEither[IO]( - Either - .fromOption(optNode, "No node provided") - .flatMap(IRI.fromString(_)) - ) - limit <- EitherT.fromEither[IO](parseInt(optLimit.getOrElse("1"))) - o <- outgoing(endpointIRI, node, limit) - } yield o - } - - private def parseInt(s: String): Either[String, Int] = - Try(s.toInt).map(Right(_)).getOrElse(Left(s"$s is not a number")) - - private def outgoing(endpoint: IRI, node: IRI, limit: Int): ESIO[Outgoing] = - for { - triples <- stream2es(Endpoint(endpoint).triplesWithSubject(node)) - } yield Outgoing.fromTriples(node, endpoint, triples.toSet) - - // Monad[F].pure(Left(s"Not implemented")) - -} - -object APIService { - def apply(client: Client[IO]): APIService = - new APIService(client) -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/EndpointService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/EndpointService.scala deleted file mode 100644 index a0fcf255..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/EndpointService.scala +++ /dev/null @@ -1,83 +0,0 @@ -package es.weso.rdfshape.server.api -import cats.data.EitherT -import cats.effect._ -import com.typesafe.scalalogging.LazyLogging -import es.weso.rdfshape.server.api.ApiDefinitions._ -import es.weso.rdfshape.server.api.{Query => ServerQuery} -import es.weso.utils.IOUtils._ -import io.circe.Json -import org.http4s._ -import org.http4s.circe._ -import org.http4s.client.Client -import org.http4s.dsl._ -import org.http4s.multipart._ - -class EndpointService(client: Client[IO]) - extends Http4sDsl[IO] - with LazyLogging { - - private val relativeBase = Defaults.relativeBase - - def routes: HttpRoutes[IO] = HttpRoutes.of[IO] { - - case req @ POST -> Root / `api` / "endpoint" / "query" => - req.decode[Multipart[IO]] { m => - val partsMap = PartsMap(m.parts) - - val r: EitherT[IO, String, Json] = for { - ep <- EndpointParam.mkEndpoint(partsMap) -// json = Json.Null - endpoint <- ep.getEndpointAsRDFReader - either <- EitherT - .liftF[IO, String, Either[String, (ServerQuery, SparqlQueryParam)]]( - SparqlQueryParam.mkQuery(partsMap) - ) - pair <- EitherT.fromEither[IO](either) - (_, qp) = pair - optQueryStr = qp.query.map(_.str) - json <- { - logger.debug( - s"Query to endpoint $endpoint: ${optQueryStr.getOrElse("")}" - ) - io2es(endpoint.queryAsJson(optQueryStr.getOrElse(""))) - } - } yield json - - for { - either <- r.value - resp <- either.fold( - e => errJson(s"Error querying endpoint: ${e}"), - json => Ok(json) - ) - } yield resp - } - - case req @ POST -> Root / `api` / "endpoint" / "info" => - req.decode[Multipart[IO]] { m => - { - val partsMap = PartsMap(m.parts) - val r: EitherT[IO, String, Json] = for { - ep <- EndpointParam.mkEndpoint(partsMap) - ei <- EitherT.liftF[IO, String, EndpointInfo](ep.getInfo(client)) - } yield ei.asJson - for { - either <- r.value - resp <- either.fold( - e => errJson(s"Error obtaining info on Endpoint ${e}"), - json => Ok(json) - ) - } yield resp - } - } - - } - - private def errJson(msg: String): IO[Response[IO]] = - Ok(Json.fromFields(List(("error", Json.fromString(msg))))) - -} - -object EndpointService { - def apply(client: Client[IO]): EndpointService = - new EndpointService(client) -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/Query.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/Query.scala deleted file mode 100644 index d0c65f23..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/Query.scala +++ /dev/null @@ -1,3 +0,0 @@ -package es.weso.rdfshape.server.api - -case class Query(str: String) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala index dd436c4c..d5baa067 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala @@ -4,8 +4,8 @@ import cats.effect._ import cats.implicits._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.jena.RDFAsJenaModel -import es.weso.rdfshape.server.api.Defaults import es.weso.rdfshape.server.api.format.DataFormat +import es.weso.rdfshape.server.api.routes.Defaults import io.circe._ /** Represent each chunk of RDF data submitted (mainly on RDF-merging operations) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataExtractResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataExtractResult.scala index 9d605cce..b22caeda 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataExtractResult.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataExtractResult.scala @@ -1,11 +1,11 @@ package es.weso.rdfshape.server.api.results import cats.effect.IO -import es.weso.rdfshape.server.api.Defaults.{ +import es.weso.rdfshape.server.api.format._ +import es.weso.rdfshape.server.api.routes.Defaults.{ defaultSchemaEngine, defaultSchemaFormat } -import es.weso.rdfshape.server.api.format._ import es.weso.rdfshape.server.utils.json.JsonUtilsServer._ import es.weso.schema.Schema import es.weso.shapemaps.ResultShapeMap @@ -13,12 +13,12 @@ import io.circe.Json /** Data class representing the output of an extraction operation (input RDF data => output schema) * - * @param msg Output informational message after conversion. Used in case of error. - * @param optData RDF input data from which ShEx may be extracted - * @param optDataFormat RDF input data format - * @param optSchemaFormat Target schema format - * @param optSchemaEngine Target schema engine - * @param optSchema Resulting schema + * @param msg Output informational message after conversion. Used in case of error. + * @param optData RDF input data from which ShEx may be extracted + * @param optDataFormat RDF input data format + * @param optSchemaFormat Target schema format + * @param optSchemaEngine Target schema engine + * @param optSchema Resulting schema * @param optResultShapeMap Resulting shapemap */ case class DataExtractResult private ( @@ -32,6 +32,7 @@ case class DataExtractResult private ( ) { /** Convert an extraction result to its JSON representation + * * @return JSON representation of the extraction result */ def toJson: IO[Json] = optSchema match { diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataInfoResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataInfoResult.scala index c3deb0ea..b6357136 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataInfoResult.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataInfoResult.scala @@ -2,8 +2,8 @@ package es.weso.rdfshape.server.api.results import es.weso.rdf.PrefixMap import es.weso.rdf.nodes.IRI -import es.weso.rdfshape.server.api.ApiHelper import es.weso.rdfshape.server.api.format._ +import es.weso.rdfshape.server.api.routes.ApiHelper import es.weso.rdfshape.server.utils.json.JsonUtilsServer._ import io.circe.Json diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/ApiDefinitions.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/ApiDefinitions.scala similarity index 97% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/ApiDefinitions.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/ApiDefinitions.scala index 535b50f9..ee42a6cd 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/ApiDefinitions.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/ApiDefinitions.scala @@ -1,4 +1,4 @@ -package es.weso.rdfshape.server.api +package es.weso.rdfshape.server.api.routes import es.weso.rdf.nodes.IRI import es.weso.rdfshape.server.api.format.{DataFormat, SchemaFormat} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/ApiHelper.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/ApiHelper.scala similarity index 94% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/ApiHelper.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/ApiHelper.scala index 5567e3ef..b8a04abf 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/ApiHelper.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/ApiHelper.scala @@ -1,25 +1,37 @@ -package es.weso.rdfshape.server.api +package es.weso.rdfshape.server.api.routes -import cats.effect._ +import cats.effect.IO import cats.implicits._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.jena.RDFAsJenaModel -import es.weso.rdf.nodes._ +import es.weso.rdf.nodes.{IRI, Lang} import es.weso.rdf.{PrefixMap, RDFBuilder, RDFReasoner} -import es.weso.rdfshape.server.api.Defaults._ -import es.weso.rdfshape.server.api.format._ -import es.weso.rdfshape.server.api.results._ -import es.weso.rdfshape.server.utils.json.JsonUtilsServer._ +import es.weso.rdfshape.server.api.format.{DataFormat, SchemaFormat} +import es.weso.rdfshape.server.api.results.{ + DataExtractResult, + DataInfoResult, + SchemaConversionResult +} +import es.weso.rdfshape.server.api.routes.Defaults.{ + defaultSchemaEngine, + defaultSchemaFormat, + defaultShapeLabel +} +import es.weso.rdfshape.server.api.routes.data.DataParam +import es.weso.rdfshape.server.api.routes.schema.{SchemaParam, TriggerModeParam} +import es.weso.rdfshape.server.utils.json.JsonUtilsServer.maybeField import es.weso.schema._ -import es.weso.schemaInfer._ +import es.weso.schemaInfer.{InferOptions, PossiblePrefixes, SchemaInfer} import es.weso.shacl.converter.Shacl2ShEx import es.weso.shapemaps.{NodeSelector, ResultShapeMap, ShapeMap} -import es.weso.uml._ +import es.weso.uml.{PlantUMLOptions, Schema2UML, UML} import es.weso.utils.IOUtils._ -import io.circe._ -import org.http4s._ +import io.circe.Json +import org.http4s.Uri import org.http4s.client.{Client, JavaNetClientBuilder} +/** Static utils used by several API components + */ object ApiHelper extends LazyLogging { private val NoTime = 0L @@ -34,6 +46,7 @@ object ApiHelper extends LazyLogging { } yield json /** Get base URI + * * @return default URI obtained from current folder */ private[api] def getBase: Option[String] = Defaults.relativeBase.map(_.str) @@ -178,9 +191,13 @@ object ApiHelper extends LazyLogging { ) case Right(trigger) => val run = for { - startTime <- IO { System.nanoTime() } - result <- schema.validate(rdf, trigger, builder) - endTime <- IO { System.nanoTime() } + startTime <- IO { + System.nanoTime() + } + result <- schema.validate(rdf, trigger, builder) + endTime <- IO { + System.nanoTime() + } time: Long = endTime - startTime } yield (result, Some(trigger), time) run.handleErrorWith(e => { diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/ApiService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/ApiService.scala new file mode 100644 index 00000000..b33de24e --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/ApiService.scala @@ -0,0 +1,10 @@ +package es.weso.rdfshape.server.api.routes + +/** Simple interface all API services should comply with + */ +trait ApiService { + + /** The service's characteristic verb, e.g.: "permalink", "data", "wikidata"... + */ + val verb: String = "api" +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/QueryParams.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/IncomingRequestParameters.scala similarity index 82% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/QueryParams.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/IncomingRequestParameters.scala index f9cb0015..bfc99e89 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/QueryParams.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/IncomingRequestParameters.scala @@ -1,8 +1,13 @@ -package es.weso.rdfshape.server.api +package es.weso.rdfshape.server.api.routes -import org.http4s.dsl.io.{OptionalQueryParamDecoderMatcher, QueryParamDecoderMatcher} +import org.http4s.dsl.io.{ + OptionalQueryParamDecoderMatcher, + QueryParamDecoderMatcher +} -object QueryParams { +/** Definitions for all the possible parameters that may come from client requests + */ +object IncomingRequestParameters { lazy val data = "data" lazy val compoundData = "compoundData" lazy val dataURL = "dataURL" @@ -20,78 +25,123 @@ object QueryParams { lazy val url = "url" lazy val urlCode = "urlCode" lazy val hostname = "hostname" + object DataParameter extends OptionalQueryParamDecoderMatcher[String](data) - object OptDataParam extends OptionalQueryParamDecoderMatcher[String](data) + + object OptDataParam extends OptionalQueryParamDecoderMatcher[String](data) + object OptEndpointParam extends OptionalQueryParamDecoderMatcher[String](endpoint) + object OptDataURLParam extends OptionalQueryParamDecoderMatcher[String](dataURL) + object DataFormatParam extends OptionalQueryParamDecoderMatcher[String](dataFormat) + object CompoundDataParam extends OptionalQueryParamDecoderMatcher[String](compoundData) + object TargetDataFormatParam extends OptionalQueryParamDecoderMatcher[String](targetDataFormat) + object OptSchemaParam extends OptionalQueryParamDecoderMatcher[String](schema) + object OptEntityParam extends OptionalQueryParamDecoderMatcher[String](entity) - object OptNodeParam extends OptionalQueryParamDecoderMatcher[String](node) + + object OptNodeParam extends OptionalQueryParamDecoderMatcher[String](node) + object OptWithDotParam extends OptionalQueryParamDecoderMatcher[Boolean](withDot) + object SchemaURLParam extends OptionalQueryParamDecoderMatcher[String](schemaURL) + object SchemaFormatParam extends OptionalQueryParamDecoderMatcher[String](schemaFormat) + object OptNodeSelectorParam extends OptionalQueryParamDecoderMatcher[String]("nodeSelector") + object SchemaEngineParam extends OptionalQueryParamDecoderMatcher[String]("schemaEngine") + object OptView extends OptionalQueryParamDecoderMatcher[String]("view") + object TargetSchemaFormatParam extends OptionalQueryParamDecoderMatcher[String]("targetSchemaFormat") + object TargetSchemaEngineParam extends OptionalQueryParamDecoderMatcher[String]("targetSchemaEngine") + object OptTriggerModeParam extends OptionalQueryParamDecoderMatcher[String]("triggerMode") - object NodeParam extends OptionalQueryParamDecoderMatcher[String](node) + + object NodeParam extends OptionalQueryParamDecoderMatcher[String](node) + object ShapeParam extends OptionalQueryParamDecoderMatcher[String](shape) -// object NameParam extends OptionalQueryParamDecoderMatcher[String]("name") + + // object NameParam extends OptionalQueryParamDecoderMatcher[String]("name") object ShapeMapParameter extends OptionalQueryParamDecoderMatcher[String]("shapeMap") + object ShapeMapParameterAlt extends OptionalQueryParamDecoderMatcher[String]("shape-map") + object ShapeMapURLParameter extends OptionalQueryParamDecoderMatcher[String]("shapeMapURL") + object ShapeMapFileParameter extends OptionalQueryParamDecoderMatcher[String]("shapeMapFile") + object ShapeMapFormatParam extends OptionalQueryParamDecoderMatcher[String]("shapeMapFormat") + object SchemaEmbedded extends OptionalQueryParamDecoderMatcher[Boolean]("schemaEmbedded") + object InferenceParam extends OptionalQueryParamDecoderMatcher[String]("inference") + object ExamplesParam extends OptionalQueryParamDecoderMatcher[String]("examples") + object ManifestURLParam extends OptionalQueryParamDecoderMatcher[String]("manifestURL") + object OptExamplesParam extends OptionalQueryParamDecoderMatcher[String]("examples") + object OptQueryParam extends OptionalQueryParamDecoderMatcher[String]("query") + object OptActiveDataTabParam extends OptionalQueryParamDecoderMatcher[String]("activeDataTab") + object OptActiveSchemaTabParam extends OptionalQueryParamDecoderMatcher[String]("activeSchemaTab") + object OptActiveShapeMapTabParam extends OptionalQueryParamDecoderMatcher[String]("activeShapeMapTab") + object OptActiveQueryTabParam extends OptionalQueryParamDecoderMatcher[String]("activeQueryTab") + object WdEntityParam extends QueryParamDecoderMatcher[String]("wdEntity") + object WdSchemaParam extends QueryParamDecoderMatcher[String]("wdSchema") + object LanguageParam extends QueryParamDecoderMatcher[String]("language") - object LabelParam extends QueryParamDecoderMatcher[String]("label") - object UrlParam extends QueryParamDecoderMatcher[String](url) - object UrlCodeParam extends QueryParamDecoderMatcher[String](urlCode) + + object LabelParam extends QueryParamDecoderMatcher[String]("label") + + object UrlParam extends QueryParamDecoderMatcher[String](url) + + object UrlCodeParam extends QueryParamDecoderMatcher[String](urlCode) + object HostNameParam extends QueryParamDecoderMatcher[String](hostname) - object LimitParam extends OptionalQueryParamDecoderMatcher[String]("limit") + + object LimitParam extends OptionalQueryParamDecoderMatcher[String]("limit") + object ContinueParam extends OptionalQueryParamDecoderMatcher[String]("continue") diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/PartsMap.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/PartsMap.scala similarity index 96% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/PartsMap.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/PartsMap.scala index 7e9cc768..76441751 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/PartsMap.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/PartsMap.scala @@ -1,4 +1,5 @@ -package es.weso.rdfshape.server.api +package es.weso.rdfshape.server.api.routes + import cats.effect.IO import cats.implicits._ import fs2.text.utf8Decode diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/api/APIService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/api/APIService.scala new file mode 100644 index 00000000..28e63f3b --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/api/APIService.scala @@ -0,0 +1,38 @@ +package es.weso.rdfshape.server.api.routes.api + +import cats.effect._ +import es.weso.rdfshape.server.api.routes.ApiDefinitions._ +import es.weso.rdfshape.server.api.routes.{ApiService, Defaults} +import io.circe._ +import org.http4s._ +import org.http4s.circe._ +import org.http4s.client.Client +import org.http4s.dsl.Http4sDsl +import org.http4s.server.staticcontent.resourceServiceBuilder +import org.log4s.getLogger + +/** API service to handle multiple general tasks (server status, etc.) + * @param client HTTP4S client object + */ +class APIService(client: Client[IO]) extends Http4sDsl[IO] with ApiService { + + val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { + + case req @ GET -> Root / `api` / "health" => + Ok("OK") + + } + private val relativeBase = Defaults.relativeBase + private val logger = getLogger + private val swagger = + resourceServiceBuilder[IO]("/swagger") // ResourceService.Config()) + + private def errJson(msg: String): IO[Response[IO]] = + Ok(Json.fromFields(List(("error", Json.fromString(msg))))) + +} + +object APIService { + def apply(client: Client[IO]): APIService = + new APIService(client) +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataConverter.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataConverter.scala similarity index 98% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/DataConverter.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataConverter.scala index 90aed5f4..ff0013b0 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataConverter.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataConverter.scala @@ -1,4 +1,4 @@ -package es.weso.rdfshape.server.api +package es.weso.rdfshape.server.api.routes.data import cats.effect.IO import com.typesafe.scalalogging.LazyLogging @@ -8,7 +8,7 @@ import es.weso.rdf.sgraph.{RDF2SGraph, RDFDotPreferences} import es.weso.rdfshape.server.api.format.DataFormat import es.weso.rdfshape.server.api.merged.CompoundData import es.weso.rdfshape.server.api.results.DataConversionResult -import es.weso.utils.IOUtils._ +import es.weso.utils.IOUtils.{either2io, err} import guru.nidi.graphviz.engine.{Format, Graphviz} import guru.nidi.graphviz.model.MutableGraph import guru.nidi.graphviz.parse.Parser diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataParam.scala similarity index 98% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/DataParam.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataParam.scala index 79fb74aa..7c77e437 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataParam.scala @@ -1,4 +1,4 @@ -package es.weso.rdfshape.server.api +package es.weso.rdfshape.server.api.routes.data import cats.effect._ import cats.implicits._ @@ -6,11 +6,12 @@ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.jena._ import es.weso.rdf.nodes.IRI import es.weso.rdf.{InferenceEngine, RDFReasoner} -import es.weso.rdfshape.server.api.Defaults._ import es.weso.rdfshape.server.api.format._ import es.weso.rdfshape.server.api.merged.CompoundData +import es.weso.rdfshape.server.api.routes.Defaults._ +import es.weso.rdfshape.server.api.routes.PartsMap import es.weso.rdfshape.server.html2rdf.HTML2RDF -import es.weso.utils.IOUtils._ +import es.weso.utils.IOUtils.err import java.net.URI diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataService.scala similarity index 94% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/DataService.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataService.scala index 7e3f0ff9..6ab177fd 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataService.scala @@ -1,14 +1,16 @@ -package es.weso.rdfshape.server.api +package es.weso.rdfshape.server.api.routes.data import cats.effect._ import cats.implicits._ import com.typesafe.scalalogging.LazyLogging -import es.weso.rdfshape.server.api.ApiDefinitions._ -import es.weso.rdfshape.server.api.ApiHelper._ -import es.weso.rdfshape.server.api.Defaults.defaultDataFormat -import es.weso.rdfshape.server.api.QueryParams._ import es.weso.rdfshape.server.api.format._ import es.weso.rdfshape.server.api.results._ +import es.weso.rdfshape.server.api.routes.ApiDefinitions._ +import es.weso.rdfshape.server.api.routes.ApiHelper._ +import es.weso.rdfshape.server.api.routes.Defaults.defaultDataFormat +import es.weso.rdfshape.server.api.routes.IncomingRequestParameters._ +import es.weso.rdfshape.server.api.routes.endpoint.SparqlQueryParam +import es.weso.rdfshape.server.api.routes.{Defaults, PartsMap} import es.weso.rdfshape.server.api.utils.OptEitherF._ import es.weso.schema._ import es.weso.utils.IOUtils._ @@ -22,6 +24,9 @@ import org.http4s.multipart.Multipart import scala.util.Try +/** API Service to handle RDF data + * @param client HTTP4S client object + */ class DataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataValue.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataValue.scala similarity index 87% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/DataValue.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataValue.scala index b5f167f0..3e1c29a0 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/DataValue.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataValue.scala @@ -1,4 +1,5 @@ -package es.weso.rdfshape.server.api +package es.weso.rdfshape.server.api.routes.data + import es.weso.rdfshape.server.api.format.DataFormat case class DataValue( diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/EndpointParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/EndpointParam.scala similarity index 95% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/EndpointParam.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/EndpointParam.scala index a75f65d1..16125186 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/EndpointParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/EndpointParam.scala @@ -1,9 +1,10 @@ -package es.weso.rdfshape.server.api +package es.weso.rdfshape.server.api.routes.endpoint import cats.data.EitherT import cats.effect._ import es.weso.rdf.RDFReader import es.weso.rdf.jena.Endpoint +import es.weso.rdfshape.server.api.routes.PartsMap import io.circe.Json // import scalaj.http._ import es.weso.utils.IOUtils._ diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/EndpointService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/EndpointService.scala new file mode 100644 index 00000000..0b880e6b --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/EndpointService.scala @@ -0,0 +1,141 @@ +package es.weso.rdfshape.server.api.routes.endpoint + +import cats.data.EitherT +import cats.effect._ +import cats.implicits._ +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf.jena.Endpoint +import es.weso.rdf.nodes.IRI +import es.weso.rdfshape.server.api.routes.ApiDefinitions._ +import es.weso.rdfshape.server.api.routes.IncomingRequestParameters.{ + LimitParam, + OptEndpointParam, + OptNodeParam +} +import es.weso.rdfshape.server.api.routes.endpoint.{Query => ServerQuery} +import es.weso.rdfshape.server.api.routes.{Defaults, PartsMap} +import es.weso.rdfshape.server.utils.numeric.NumericUtils +import es.weso.utils.IOUtils._ +import io.circe.Json +import org.http4s._ +import org.http4s.circe._ +import org.http4s.client.Client +import org.http4s.dsl._ +import org.http4s.multipart._ + +/** API service to handle endpoints and operations targeted to them (queries, etc.) + * + * @param client HTTP4S client object + */ +class EndpointService(client: Client[IO]) + extends Http4sDsl[IO] + with LazyLogging { + + private val relativeBase = Defaults.relativeBase + + def routes: HttpRoutes[IO] = HttpRoutes.of[IO] { + + case req @ POST -> Root / `api` / "endpoint" / "query" => + req.decode[Multipart[IO]] { m => + val partsMap = PartsMap(m.parts) + + val r: EitherT[IO, String, Json] = for { + ep <- EndpointParam.mkEndpoint(partsMap) + // json = Json.Null + endpoint <- ep.getEndpointAsRDFReader + either <- EitherT + .liftF[IO, String, Either[ + String, + (ServerQuery, SparqlQueryParam) + ]]( + SparqlQueryParam.mkQuery(partsMap) + ) + pair <- EitherT.fromEither[IO](either) + (_, qp) = pair + optQueryStr = qp.query.map(_.str) + json <- { + logger.debug( + s"Query to endpoint $endpoint: ${optQueryStr.getOrElse("")}" + ) + io2es(endpoint.queryAsJson(optQueryStr.getOrElse(""))) + } + } yield json + + for { + either <- r.value + resp <- either.fold( + e => errJson(s"Error querying endpoint: $e"), + json => Ok(json) + ) + } yield resp + } + + case req @ POST -> Root / `api` / "endpoint" / "info" => + req.decode[Multipart[IO]] { m => + { + val partsMap = PartsMap(m.parts) + val r: EitherT[IO, String, Json] = for { + ep <- EndpointParam.mkEndpoint(partsMap) + ei <- EitherT.liftF[IO, String, EndpointInfo](ep.getInfo(client)) + } yield ei.asJson + for { + either <- r.value + resp <- either.fold( + e => errJson(s"Error obtaining info on Endpoint $e"), + json => Ok(json) + ) + } yield resp + } + } + + case GET -> Root / `api` / "endpoint" / "outgoing" :? + OptEndpointParam(optEndpoint) +& + OptNodeParam(optNode) +& + LimitParam(optLimit) => + for { + eitherOutgoing <- getOutgoing(optEndpoint, optNode, optLimit).value + resp <- eitherOutgoing.fold( + (s: String) => errJson(s"Error: $s"), + (outgoing: Outgoing) => Ok(outgoing.toJson) + ) + } yield resp + + } + + private def errJson(msg: String): IO[Response[IO]] = + Ok(Json.fromFields(List(("error", Json.fromString(msg))))) + + private def getOutgoing( + optEndpoint: Option[String], + optNode: Option[String], + optLimit: Option[String] + ): EitherT[IO, String, Outgoing] = { + for { + endpointIRI <- EitherT.fromEither[IO]( + Either + .fromOption(optEndpoint, "No endpoint provided") + .flatMap(IRI.fromString(_)) + ) + node <- EitherT.fromEither[IO]( + Either + .fromOption(optNode, "No node provided") + .flatMap(IRI.fromString(_)) + ) + limit <- EitherT.fromEither[IO]( + NumericUtils.parseInt(optLimit.getOrElse("1")) + ) + o <- outgoing(endpointIRI, node, limit) + } yield o + } + + private def outgoing(endpoint: IRI, node: IRI, limit: Int): ESIO[Outgoing] = + for { + triples <- stream2es(Endpoint(endpoint).triplesWithSubject(node)) + } yield Outgoing.fromTriples(node, endpoint, triples.toSet) + +} + +object EndpointService { + def apply(client: Client[IO]): EndpointService = + new EndpointService(client) +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/Outgoing.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/Outgoing.scala similarity index 97% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/Outgoing.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/Outgoing.scala index c4e41237..366b4605 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/Outgoing.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/Outgoing.scala @@ -1,4 +1,4 @@ -package es.weso.rdfshape.server.api +package es.weso.rdfshape.server.api.routes.endpoint import es.weso.rdf.nodes.{IRI, RDFNode} import es.weso.rdf.triples.RDFTriple diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/Query.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/Query.scala new file mode 100644 index 00000000..79fb48f6 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/Query.scala @@ -0,0 +1,6 @@ +package es.weso.rdfshape.server.api.routes.endpoint + +/** Data class representing a SPARQL query + * @param str query string + */ +case class Query(str: String) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/SparqlQueryParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/SparqlQueryParam.scala similarity index 93% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/SparqlQueryParam.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/SparqlQueryParam.scala index 052e0ade..09a3571e 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/SparqlQueryParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/SparqlQueryParam.scala @@ -1,7 +1,8 @@ -package es.weso.rdfshape.server.api +package es.weso.rdfshape.server.api.routes.endpoint import cats.effect.IO -import es.weso.rdfshape.server.api.Defaults._ +import es.weso.rdfshape.server.api.routes.Defaults._ +import es.weso.rdfshape.server.api.routes.PartsMap import scala.io.Source import scala.util.Try diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/FetchService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/fetch/FetchService.scala similarity index 80% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/FetchService.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/fetch/FetchService.scala index c1fbc3a6..1b9adccb 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/FetchService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/fetch/FetchService.scala @@ -1,8 +1,8 @@ -package es.weso.rdfshape.server.api +package es.weso.rdfshape.server.api.routes.fetch import cats.effect._ -import es.weso.rdfshape.server.api.ApiDefinitions._ -import es.weso.rdfshape.server.api.QueryParams.UrlParam +import es.weso.rdfshape.server.api.routes.ApiDefinitions._ +import es.weso.rdfshape.server.api.routes.IncomingRequestParameters.UrlParam import org.http4s._ import org.http4s.client.Client import org.http4s.dsl.Http4sDsl diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/PermalinkService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/PermalinkService.scala similarity index 94% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/PermalinkService.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/PermalinkService.scala index 9511dcad..6ba1b863 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/PermalinkService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/PermalinkService.scala @@ -1,9 +1,12 @@ -package es.weso.rdfshape.server.api +package es.weso.rdfshape.server.api.routes.permalink import cats.effect._ import com.typesafe.scalalogging.LazyLogging -import es.weso.rdfshape.server.api.ApiDefinitions._ -import es.weso.rdfshape.server.api.QueryParams.{UrlCodeParam, UrlParam} +import es.weso.rdfshape.server.api.routes.ApiDefinitions._ +import es.weso.rdfshape.server.api.routes.IncomingRequestParameters.{ + UrlCodeParam, + UrlParam +} import org.http4s._ import org.http4s.client.Client import org.http4s.dsl.Http4sDsl @@ -20,6 +23,10 @@ import scala.concurrent.duration.Duration import scala.concurrent.{Await, Promise} import scala.util.Random +/** API endpoint to handle the permalink service (creation, retrieval, etc.) + * + * @param client HTTP4S client object + */ class PermalinkService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { @@ -61,8 +68,10 @@ class PermalinkService(client: Client[IO]) observable.subscribe(new Observer[InsertOneResult] { override def onSubscribe(subscription: Subscription): Unit = subscription.request(1) + override def onNext(result: InsertOneResult): Unit = logger.info(s"Created permalink: $url => $urlCode") + override def onError(e: Throwable): Unit = { logger.error(s"Permalink creation failed: ${e.getMessage}") InternalServerError( @@ -101,6 +110,7 @@ class PermalinkService(client: Client[IO]) override def onSubscribe(subscription: Subscription): Unit = subscription.request(1) + override def onNext(result: Document): Unit = { val longUrl = result.getString("longUrl") val urlCode = result.getLong("urlCode") @@ -111,12 +121,14 @@ class PermalinkService(client: Client[IO]) // Refresh use date of the link updateUrl(urlCode) } + override def onError(e: Throwable): Unit = { logger.error(s"Original url recovery failed: ${e.getMessage}") promise.success( BadGateway(s"Original url recovery failed for code: $urlCode") ) } + override def onComplete(): Unit = { if(!promise.isCompleted) { logger.warn(s"Could not find the original url for code: $urlCode") @@ -161,6 +173,7 @@ class PermalinkService(client: Client[IO]) observable.subscribe(new Observer[Document] { override def onSubscribe(subscription: Subscription): Unit = subscription.request(1) + override def onNext(result: Document): Unit = { val urlCode = result.getLong("urlCode") @@ -170,10 +183,12 @@ class PermalinkService(client: Client[IO]) // Refresh use date of the link updateUrl(urlCode) } + override def onError(e: Throwable): Unit = { logger.error(s"Permalink recovery failed: ${e.getMessage}") promise.success(None) } + override def onComplete(): Unit = { if(!promise.isCompleted) { logger.error(s"Could not find the permalink for url: $urlPath") @@ -198,11 +213,14 @@ class PermalinkService(client: Client[IO]) observable.subscribe(new Observer[UpdateResult] { override def onSubscribe(subscription: Subscription): Unit = subscription.request(1) + override def onNext(result: UpdateResult): Unit = { logger.debug(s"Refreshed date of permalink: $code") } + override def onError(e: Throwable): Unit = () - override def onComplete(): Unit = () + + override def onComplete(): Unit = () }) } } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaInfoResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/SchemaInfoResult.scala similarity index 76% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaInfoResult.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/SchemaInfoResult.scala index f5f657e1..716cd96e 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaInfoResult.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/SchemaInfoResult.scala @@ -1,4 +1,4 @@ -package es.weso.rdfshape.server.api +package es.weso.rdfshape.server.api.routes.schema import io.circe.Json diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/SchemaParam.scala similarity index 98% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaParam.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/SchemaParam.scala index a4d5945a..48ce9934 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/SchemaParam.scala @@ -1,11 +1,12 @@ -package es.weso.rdfshape.server.api +package es.weso.rdfshape.server.api.routes.schema import cats.effect._ import cats.implicits._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.RDFReasoner -import es.weso.rdfshape.server.api.Defaults._ import es.weso.rdfshape.server.api.format._ +import es.weso.rdfshape.server.api.routes.Defaults._ +import es.weso.rdfshape.server.api.routes.{ApiHelper, PartsMap} import es.weso.schema.{Schema, Schemas} import scala.io.Source diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/SchemaService.scala similarity index 96% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaService.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/SchemaService.scala index a43a82f3..cf1894db 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/SchemaService.scala @@ -1,4 +1,4 @@ -package es.weso.rdfshape.server.api +package es.weso.rdfshape.server.api.routes.schema import cats.data._ import cats.effect._ @@ -6,12 +6,14 @@ import cats.implicits._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.jena.RDFAsJenaModel import es.weso.rdf.{InferenceEngine, RDFReasoner} -import es.weso.rdfshape.server.api.ApiDefinitions._ -import es.weso.rdfshape.server.api.ApiHelper._ -import es.weso.rdfshape.server.api.Defaults._ -import es.weso.rdfshape.server.api.QueryParams._ import es.weso.rdfshape.server.api.format._ import es.weso.rdfshape.server.api.results._ +import es.weso.rdfshape.server.api.routes.ApiDefinitions._ +import es.weso.rdfshape.server.api.routes.ApiHelper._ +import es.weso.rdfshape.server.api.routes.Defaults._ +import es.weso.rdfshape.server.api.routes.IncomingRequestParameters._ +import es.weso.rdfshape.server.api.routes.data.DataParam +import es.weso.rdfshape.server.api.routes.{Defaults, PartsMap} import es.weso.rdfshape.server.api.utils.OptEitherF._ import es.weso.schema._ import es.weso.utils.IOUtils._ @@ -25,6 +27,9 @@ import org.http4s.dsl.Http4sDsl import org.http4s.headers._ import org.http4s.multipart.Multipart +/** API service to handle schema-related operations + * @param client HTTP4S client object + */ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { @@ -321,7 +326,7 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { SchemaEmbedded(optSchemaEmbedded) +& InferenceParam(optInference) +& OptEndpointParam(optEndpoint) +& -// OptEndpointsParam(optEndpoints) +& + // OptEndpointsParam(optEndpoints) +& OptActiveDataTabParam(optActiveDataTab) +& OptActiveSchemaTabParam(optActiveSchemaTab) +& OptActiveShapeMapTabParam(optActiveShapeMapTab) => @@ -486,7 +491,7 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { } } -// private def either2f[A](e: Either[String,A]): F[A] = ??? + // private def either2f[A](e: Either[String,A]): F[A] = ??? // private def diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaValue.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/SchemaValue.scala similarity index 71% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaValue.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/SchemaValue.scala index e159fd54..cdd17497 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/SchemaValue.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/SchemaValue.scala @@ -1,5 +1,7 @@ -package es.weso.rdfshape.server.api -import es.weso.rdfshape.server.api.format._ +package es.weso.rdfshape.server.api.routes.schema + +import es.weso.rdfshape.server.api.format.SchemaFormat + case class SchemaValue( schema: Option[String], schemaURL: Option[String], diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/TriggerModeParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/TriggerModeParam.scala similarity index 97% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/TriggerModeParam.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/TriggerModeParam.scala index 4a2b802e..e43ddfbe 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/TriggerModeParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/TriggerModeParam.scala @@ -1,10 +1,11 @@ -package es.weso.rdfshape.server.api +package es.weso.rdfshape.server.api.routes.schema import cats.effect.IO import cats.implicits._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.PrefixMap -import es.weso.rdfshape.server.api.Defaults._ +import es.weso.rdfshape.server.api.routes.Defaults._ +import es.weso.rdfshape.server.api.routes.PartsMap import es.weso.shapemaps.ShapeMap case class TriggerModeParam( @@ -43,7 +44,7 @@ case class TriggerModeParam( case None => IO.pure((None, Left(s"No value for shapeMapURL"))) case Some(shapeMapUrl) => logger.trace(s"ShapeMapUrl: $shapeMapUrl") - + val shapeMapFormat = shapeMapFormatUrl.getOrElse(defaultShapeMapFormat) ShapeMap diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/ShapeMapParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/ShapeMapParam.scala similarity index 96% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/ShapeMapParam.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/ShapeMapParam.scala index 47a7329d..46c5116e 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/ShapeMapParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/ShapeMapParam.scala @@ -1,8 +1,9 @@ -package es.weso.rdfshape.server.api +package es.weso.rdfshape.server.api.routes.shapemap import cats.effect.IO import cats.implicits._ import es.weso.rdf.PrefixMap +import es.weso.rdfshape.server.api.routes.PartsMap import es.weso.shapemaps._ import org.log4s.getLogger @@ -17,7 +18,7 @@ case class ShapeMapParam( val shapeMapFormat: String = optShapeMapFormat.getOrElse(Compact).name val shapeMapTab: String = activeShapeMapTab.getOrElse(ShapeMapTextAreaType.id) - private[this] val logger = getLogger + private[this] val logger = getLogger def getShapeMap: IO[ShapeMap] = for { diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/ShapeMapService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/ShapeMapService.scala similarity index 81% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/ShapeMapService.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/ShapeMapService.scala index 1230eaa1..ef4431e3 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/ShapeMapService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/ShapeMapService.scala @@ -1,10 +1,11 @@ -package es.weso.rdfshape.server.api +package es.weso.rdfshape.server.api.routes.shapemap import cats.effect._ import com.typesafe.scalalogging.LazyLogging -import es.weso.rdfshape.server.api.ApiDefinitions._ -import es.weso.rdfshape.server.api.ApiHelper._ import es.weso.rdfshape.server.api.results.ShapeMapInfoResult +import es.weso.rdfshape.server.api.routes.ApiDefinitions._ +import es.weso.rdfshape.server.api.routes.ApiHelper._ +import es.weso.rdfshape.server.api.routes.PartsMap import es.weso.shapemaps.ShapeMap import io.circe._ import org.http4s._ @@ -13,6 +14,10 @@ import org.http4s.client.Client import org.http4s.dsl.Http4sDsl import org.http4s.multipart._ +/** API service to handle shapemap-related operations + * + * @param client HTTP4S client object + */ class ShapeMapService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/ShapeMapValue.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/ShapeMapValue.scala similarity index 79% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/ShapeMapValue.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/ShapeMapValue.scala index acd70825..e5414e1d 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/ShapeMapValue.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/ShapeMapValue.scala @@ -1,4 +1,4 @@ -package es.weso.rdfshape.server.api +package es.weso.rdfshape.server.api.routes.shapemap case class ShapeMapValue( shapeMap: Option[String], diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/ShExService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shex/ShExService.scala similarity index 84% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/ShExService.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shex/ShExService.scala index 1174bb7a..596033e2 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/ShExService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shex/ShExService.scala @@ -1,6 +1,7 @@ -package es.weso.rdfshape.server.api +package es.weso.rdfshape.server.api.routes.shex + import cats.effect._ -import es.weso.rdfshape.server.api.ApiDefinitions._ +import es.weso.rdfshape.server.api.routes.ApiDefinitions._ import es.weso.schema._ import io.circe._ import org.http4s._ diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/WikibaseSchemaParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikibaseSchemaParam.scala similarity index 95% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/WikibaseSchemaParam.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikibaseSchemaParam.scala index 19f06fe8..07ab14f6 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/WikibaseSchemaParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikibaseSchemaParam.scala @@ -1,12 +1,14 @@ -package es.weso.rdfshape.server.api +package es.weso.rdfshape.server.api.routes.wikibase import cats.effect._ import es.weso.rdf.RDFReasoner +import es.weso.rdfshape.server.api.routes.PartsMap +import es.weso.rdfshape.server.api.routes.schema.SchemaParam import es.weso.rdfshape.server.wikibase._ import es.weso.schema.{Schema, Schemas} +import org.http4s._ import org.http4s.client._ import org.http4s.dsl.io._ -import org.http4s._ case class WikibaseSchemaParam( maybeSchemaParam: Option[SchemaParam], diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/WikidataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala similarity index 93% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/WikidataService.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala index 25bbfecb..e436c6cb 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/WikidataService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala @@ -1,48 +1,44 @@ -package es.weso.rdfshape.server.api +package es.weso.rdfshape.server.api.routes.wikibase // import cats._ + import cats.data._ import cats.effect._ import cats.implicits._ -import es.weso._ +import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.RDFReader import es.weso.rdf.jena.RDFAsJenaModel -import es.weso.rdfshape.server.streams.Streams -import es.weso.rdfshape.server.api.QueryParams._ +import es.weso.rdf.nodes.IRI +import es.weso.rdf.sgraph._ +import es.weso.rdfshape.server.api.routes.IncomingRequestParameters._ +import es.weso.rdfshape.server.api.routes.wikibase.WikibaseSchemaParam +import es.weso.rdfshape.server.api.routes.ApiDefinitions._ +import es.weso.rdfshape.server.api.routes.ApiHelper._ +import es.weso.rdfshape.server.api.routes.PartsMap import es.weso.rdfshape.server.api.utils.Http4sUtils._ -import es.weso.rdfshape.server.api.values._ +import es.weso.schema.{Schema, ShapeMapTrigger} +import es.weso.schemaInfer.{InferOptions, SchemaInfer} +import es.weso.shapemaps.{Status => _, _} +import es.weso.utils.IOUtils._ +import es.weso.wikibaserdf._ import io.circe._ import io.circe.parser._ import fs2._ import org.http4s._ -import org.http4s.Uri -import org.http4s.Charset._ import org.http4s.circe._ import org.http4s.client._ +import org.http4s.client.middleware.FollowRedirect import org.http4s.dsl._ import org.http4s.headers._ -import org.http4s.multipart._ import org.http4s.implicits._ -import es.weso.rdf.sgraph._ -import ApiDefinitions._ -import es.weso.utils.IOUtils._ -import org.http4s.client.middleware.FollowRedirect -import es.weso.shapemaps.{Status => _, _} -import es.weso.rdf.nodes.IRI -import es.weso.schemaInfer.SchemaInfer -import es.weso.schema.Schema -import es.weso.schemaInfer.InferOptions -import es.weso.shex.ResolvedSchema -import es.weso.shex.validator.Validator -import es.weso.schema.ShapeMapTrigger -import es.weso.utils.internal.CollectionCompat._ +import org.http4s.multipart._ import scala.util.control.NoStackTrace import scala.util.matching.Regex -import es.weso.wikibaserdf._ -import ApiHelper._ -import com.typesafe.scalalogging.LazyLogging +/** API service to handle wikidata related operations + * @param client HTTP4S client object + */ class WikidataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { @@ -161,7 +157,7 @@ class WikidataService(client: Client[IO]) val limit: String = maybelimit.getOrElse(defaultLimit.toString) val continue: String = maybeContinue.getOrElse(defaultContinue.toString) - val requestUrl = s"${endpoint.getOrElse("https://www.wikidata.org")}" + val requestUrl = s"${endpoint.getOrElse("https: //www.wikidata.org")}" val uri = Uri .fromString(requestUrl) .valueOr(throw _) @@ -308,7 +304,8 @@ class WikidataService(client: Client[IO]) label <- EitherT(partsMap.eitherPartValue("entity")) info <- either2es[InfoEntity](cnvEntity(label)) _ <- { - logger.debug(s"Extraction URI: ${info.uri}"); ok_esf[Unit, IO](()) + logger.debug(s"Extraction URI: ${info.uri}"); + ok_esf[Unit, IO](()) } strRdf <- io2es(redirectClient.expect[String](info.uri)) eitherInferred <- io2es( @@ -335,7 +332,9 @@ class WikidataService(client: Client[IO]) val (schema, _) = pair schema.serialize("SHEXC") }) - _ <- { logger.trace(s"ShExC str: $shExCStr"); ok_es[Unit](()) } + _ <- { + logger.trace(s"ShExC str: $shExCStr"); ok_es[Unit](()) + } resp <- io2es(Ok(mkExtractAnswer(shExCStr, label))) } yield resp for { @@ -357,10 +356,14 @@ class WikidataService(client: Client[IO]) uri = uri"http://156.35.94.158:8081/shexer" ).withHeaders(`Content-Type`(MediaType.application.`json`)) .withEntity[Json](jsonParams) - _ <- { logger.debug(s"URI: ${jsonParams.spaces2}"); ok_es[Unit](()) } + _ <- { + logger.debug(s"URI: ${jsonParams.spaces2}"); ok_es[Unit](()) + } result <- f2es(redirectClient.expect[Json](postRequest)) - _ <- { logger.trace(s"Result\n${result.spaces2}"); ok_es[Unit](()) } - resp <- f2es(Ok(result)) + _ <- { + logger.trace(s"Result\n${result.spaces2}"); ok_es[Unit](()) + } + resp <- f2es(Ok(result)) } yield resp for { either <- r.value @@ -375,13 +378,21 @@ class WikidataService(client: Client[IO]) val partsMap = PartsMap(m.parts) val r: IO[Response[IO]] = for { eitherItem <- partsMap.eitherPartValue("item") - _ <- { logger.debug(eitherItem.toString); IO.pure(()) } - item <- fromEither(eitherItem) - _ <- { logger.debug(item); IO.pure(()) } - info <- fromEither(cnvEntity2(item)) - _ <- { logger.debug(info.toString); IO.pure(()) } - pair <- WikibaseSchemaParam.mkSchema(partsMap, None, client) - _ <- { logger.debug(pair.toString()); IO.pure(()) } + _ <- { + logger.debug(eitherItem.toString); IO.pure(()) + } + item <- fromEither(eitherItem) + _ <- { + logger.debug(item); IO.pure(()) + } + info <- fromEither(cnvEntity2(item)) + _ <- { + logger.debug(info.toString); IO.pure(()) + } + pair <- WikibaseSchemaParam.mkSchema(partsMap, None, client) + _ <- { + logger.debug(pair.toString()); IO.pure(()) + } (schema, wbp) = pair iriItem <- fromEither(IRI.fromString(info.sourceUri)) shapeMap <- fromEither(ShapeMap.empty.add(iriItem, Start)) @@ -403,10 +414,6 @@ class WikidataService(client: Client[IO]) } } - case class WikibaseServiceError(msg: String) - extends RuntimeException(msg) - with NoStackTrace - private def fromEither[A](either: Either[String, A]): IO[A] = { either.fold(s => IO.raiseError(WikibaseServiceError(s)), IO.pure(_)) } @@ -428,31 +435,6 @@ class WikidataService(client: Client[IO]) ) } - private def wikidataPrefixes: Either[String, Json] = { - val json = """{ - "http://wikiba.se/ontology#": "wikibase", - "http://www.bigdata.com/rdf#": "bd", - "http://www.wikidata.org/entity/": "wd", - "http://www.wikidata.org/prop/direct/": "wdt", - "http://www.wikidata.org/prop/direct-normalized/": "wdtn", - "http://www.wikidata.org/entity/statement/": "wds", - "http://www.wikidata.org/prop/": "p", - "http://www.wikidata.org/reference/": "wdref", - "http://www.wikidata.org/value/": "wdv", - "http://www.wikidata.org/prop/statement/": "ps", - "http://www.wikidata.org/prop/statement/value/": "psv", - "http://www.wikidata.org/prop/statement/value-normalized/": "psn", - "http://www.wikidata.org/prop/qualifier/": "pq", - "http://www.wikidata.org/prop/qualifier/value/": "pqv", - "http://www.wikidata.org/prop/qualifier/value-normalized/": "pqn", - "http://www.wikidata.org/prop/reference/": "pr", - "http://www.wikidata.org/prop/reference/value/": "prv", - "http://www.wikidata.org/prop/reference/value-normalized/": "prn", - "http://www.wikidata.org/prop/novalue/": "wdno" - }""" - parse(json).leftMap(e => s"Error parsing prefixes: $e") - } - private def mkShexerParams(entity: String): Either[String, Json] = for { prefixes <- wikidataPrefixes } yield Json.fromFields( @@ -481,7 +463,31 @@ class WikidataService(client: Client[IO]) ) ) - private case class InfoEntity(localName: String, uri: Uri, sourceUri: String) + private def wikidataPrefixes: Either[String, Json] = { + val json = + """{ + "http://wikiba.se/ontology#": "wikibase", + "http://www.bigdata.com/rdf#": "bd", + "http://www.wikidata.org/entity/": "wd", + "http://www.wikidata.org/prop/direct/": "wdt", + "http://www.wikidata.org/prop/direct-normalized/": "wdtn", + "http://www.wikidata.org/entity/statement/": "wds", + "http://www.wikidata.org/prop/": "p", + "http://www.wikidata.org/reference/": "wdref", + "http://www.wikidata.org/value/": "wdv", + "http://www.wikidata.org/prop/statement/": "ps", + "http://www.wikidata.org/prop/statement/value/": "psv", + "http://www.wikidata.org/prop/statement/value-normalized/": "psn", + "http://www.wikidata.org/prop/qualifier/": "pq", + "http://www.wikidata.org/prop/qualifier/value/": "pqv", + "http://www.wikidata.org/prop/qualifier/value-normalized/": "pqn", + "http://www.wikidata.org/prop/reference/": "pr", + "http://www.wikidata.org/prop/reference/value/": "prv", + "http://www.wikidata.org/prop/reference/value-normalized/": "prn", + "http://www.wikidata.org/prop/novalue/": "wdno" + }""" + parse(json).leftMap(e => s"Error parsing prefixes: $e") + } private def cnvEntity(entity: String): Either[String, InfoEntity] = { val wdRegex = "http://www.wikidata.org/entity/(.*)".r @@ -520,6 +526,72 @@ class WikidataService(client: Client[IO]) } } + def cnvEntities(json: Json): Either[String, Json] = for { + entities <- json.hcursor + .downField("search") + .values + .toRight("Error obtaining search value") + converted = Json.fromValues( + entities.map((value: Json) => + Json.fromFields( + List( + ( + "label", + value.hcursor.downField("label").focus.getOrElse(Json.Null) + ), + ("id", value.hcursor.downField("id").focus.getOrElse(Json.Null)), + ( + "uri", + value.hcursor.downField("concepturi").focus.getOrElse(Json.Null) + ), + ( + "descr", + value.hcursor.downField("description").focus.getOrElse(Json.Null) + ) + ) + ) + ) + ) + } yield converted + + private def cnvLanguages(json: Json): Either[String, Json] = for { + // query <- .focus.toRight(s"Error obtaining query at ${json.spaces2}" ) + languagesObj <- json.hcursor + .downField("query") + .downField("wbcontentlanguages") + .focus + .toRight(s"Error obtaining query/wbcontentlanguages at ${json.spaces2}") + keys <- languagesObj.hcursor.keys.toRight( + s"Error obtaining values from languages: ${languagesObj.spaces2}" + ) + converted = Json.fromValues( + keys.map(key => + Json.fromFields( + List( + ( + "label", + languagesObj.hcursor + .downField(key) + .downField("code") + .focus + .getOrElse(Json.Null) + ), + ( + "name", + languagesObj.hcursor + .downField(key) + .downField("autonym") + .focus + .getOrElse(Json.Null) + ) + ) + ) + ) + ) + } yield { + converted + } + private def wdEntity( optEntity: Option[String], withDot: Boolean @@ -529,12 +601,12 @@ class WikidataService(client: Client[IO]) case Some(entity) => { val process = for { uri <- getUri(entity) -// data <- resolve(uri) -// rdf <- getRDF(data) -// maybeDot <- generateDot(rdf, withDot)/* if (generateDot) + // data <- resolve(uri) + // rdf <- getRDF(data) + // maybeDot <- generateDot(rdf, withDot)/* if (generateDot) /* EitherT.fromEither[F](RDF2Dot.rdf2dot(rdf).bimap(e => s"Error * converting to Dot: $e", s => Some(s.toString))) */ -// else EitherT.pure(none) */ + // else EitherT.pure(none) */ json <- prepareJson( entity, proxyUri(uri) @@ -555,6 +627,14 @@ class WikidataService(client: Client[IO]) ): Option[Json] = e.fold(msg => Some(jsonErr(entity, msg)), identity) + private def jsonErr(entity: String, msg: String): Json = + Json.fromFields( + List( + ("entity", Json.fromString(entity)), + ("error", Json.fromString(msg)) + ) + ) + private def getUri(entity: String): EitherT[IO, String, Uri] = { logger.debug(s"get entity: $entity") val q = """Q(\d*)""".r @@ -569,6 +649,19 @@ class WikidataService(client: Client[IO]) } } + /* private def getRDF(str: Stream[F,String]): EitherT[F, String, RDFReader] = + * EitherT.liftF(LiftIO[F].liftIO(RDFAsJenaModel.empty)) */ + + private def prepareJson(entity: String, uri: Uri): EitherT[IO, String, Json] = + EitherT.pure( + Json.fromFields( + List( + ("entity", Json.fromString(entity)), + ("uri", Json.fromString(uri.toString)) + ) + ) + ) + private def resolve(uri: Uri): EitherT[IO, String, Stream[IO, String]] = { logger.debug(s"Resolve: $uri") for { @@ -579,11 +672,6 @@ class WikidataService(client: Client[IO]) } yield data } - /* private def getRDF(str: Stream[F,String]): EitherT[F, String, RDFReader] = - * EitherT.liftF(LiftIO[F].liftIO(RDFAsJenaModel.empty)) */ - - private def fromIO[A](io: IO[A]): EitherT[IO, String, A] = EitherT.liftF(io) - private def generateDot( rdf: RDFReader, maybeDot: Boolean @@ -596,24 +684,6 @@ class WikidataService(client: Client[IO]) else EitherT.pure(None) - private def jsonErr(entity: String, msg: String): Json = - Json.fromFields( - List( - ("entity", Json.fromString(entity)), - ("error", Json.fromString(msg)) - ) - ) - - private def prepareJson(entity: String, uri: Uri): EitherT[IO, String, Json] = - EitherT.pure( - Json.fromFields( - List( - ("entity", Json.fromString(entity)), - ("uri", Json.fromString(uri.toString)) - ) - ) - ) - private def prepareJsonOk( entity: String, uri: Uri, @@ -629,6 +699,8 @@ class WikidataService(client: Client[IO]) ) ++ dotField(maybeDot) ) + private def fromIO[A](io: IO[A]): EitherT[IO, String, A] = EitherT.liftF(io) + private def dotField(maybeDot: Option[String]): List[(String, Json)] = maybeDot.fold(List[(String, Json)]())(s => List(("dot", Json.fromString(s))) @@ -651,72 +723,6 @@ class WikidataService(client: Client[IO]) * "aa", "autonym": "Qafár af" }, "ab": { "code": "ab", "autonym": "Аҧсшәа" } * } } */ - def cnvEntities(json: Json): Either[String, Json] = for { - entities <- json.hcursor - .downField("search") - .values - .toRight("Error obtaining search value") - converted = Json.fromValues( - entities.map((value: Json) => - Json.fromFields( - List( - ( - "label", - value.hcursor.downField("label").focus.getOrElse(Json.Null) - ), - ("id", value.hcursor.downField("id").focus.getOrElse(Json.Null)), - ( - "uri", - value.hcursor.downField("concepturi").focus.getOrElse(Json.Null) - ), - ( - "descr", - value.hcursor.downField("description").focus.getOrElse(Json.Null) - ) - ) - ) - ) - ) - } yield converted - - private def cnvLanguages(json: Json): Either[String, Json] = for { - // query <- .focus.toRight(s"Error obtaining query at ${json.spaces2}" ) - languagesObj <- json.hcursor - .downField("query") - .downField("wbcontentlanguages") - .focus - .toRight(s"Error obtaining query/wbcontentlanguages at ${json.spaces2}") - keys <- languagesObj.hcursor.keys.toRight( - s"Error obtaining values from languages: ${languagesObj.spaces2}" - ) - converted = Json.fromValues( - keys.map(key => - Json.fromFields( - List( - ( - "label", - languagesObj.hcursor - .downField(key) - .downField("code") - .focus - .getOrElse(Json.Null) - ), - ( - "name", - languagesObj.hcursor - .downField(key) - .downField("autonym") - .focus - .getOrElse(Json.Null) - ) - ) - ) - ) - ) - } yield { - converted - } - private def cnvEntitySchema(wdSchema: String): Uri = { val uri = uri"https://www.wikidata.org".withPath( Uri.Path.unsafeFromString(s"/wiki/Special:EntitySchemaText/$wdSchema") @@ -724,6 +730,12 @@ class WikidataService(client: Client[IO]) uri } + case class WikibaseServiceError(msg: String) + extends RuntimeException(msg) + with NoStackTrace + + private case class InfoEntity(localName: String, uri: Uri, sourceUri: String) + } object WikidataService { diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/numeric/NumericUtils.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/numeric/NumericUtils.scala new file mode 100644 index 00000000..9ef106e7 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/numeric/NumericUtils.scala @@ -0,0 +1,17 @@ +package es.weso.rdfshape.server.utils.numeric + +import scala.util.Try + +/** Utilities related to math + */ +case object NumericUtils { + + /** Try to parse an integer + * + * @param str Text chain to be parsed + * @return The resulting integer if the string was parsed, an error message otherwise + */ + def parseInt(str: String): Either[String, Int] = + Try(str.toInt).map(Right(_)).getOrElse(Left(s"$str is not a number")) + +} diff --git a/modules/server/src/test/scala/es/weso/rdfshape/server/api/TestHttp4sTest.scala b/modules/server/src/test/scala/es/weso/rdfshape/server/api/TestHttp4sTest.scala index 8b1fd468..a5d89684 100644 --- a/modules/server/src/test/scala/es/weso/rdfshape/server/api/TestHttp4sTest.scala +++ b/modules/server/src/test/scala/es/weso/rdfshape/server/api/TestHttp4sTest.scala @@ -3,13 +3,16 @@ import cats.effect._ import io.circe.Json import fs2._ import munit.CatsEffectSuite +import org.http4s._ import org.http4s.circe._ import org.http4s.client.Client import org.http4s.dsl.Http4sDsl import org.http4s.ember.client.EmberClientBuilder import org.http4s.implicits._ -import org.http4s.{HttpRoutes, Request, Response, _} +/** API service for testing purposes + * @param client HTTP4S client object + */ class TestService(client: Client[IO]) extends Http4sDsl[IO] { val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { case GET -> Root / "hi" => From 80c1059b833f8950b39aa84ab242859e4aefd5c3 Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Wed, 18 Aug 2021 16:26:38 +0200 Subject: [PATCH 12/32] Refactored api utils. --- build.sbt | 10 +-- .../api/routes/wikibase/WikidataService.scala | 31 +++++--- .../server/api/utils/Http4sUtils.scala | 57 +++++++++++--- .../server/api/utils/OptEitherF.scala | 38 +++++++-- .../rdfshape/server/streams/Streams.scala | 78 +++++++++---------- version.sbt | 2 +- 6 files changed, 143 insertions(+), 73 deletions(-) diff --git a/build.sbt b/build.sbt index 8308a9aa..14453b68 100644 --- a/build.sbt +++ b/build.sbt @@ -33,9 +33,6 @@ lazy val packagingSettings = Seq( // Output filename on "sbt-native-packager" tasks Universal / packageName := (Global / packageName).value ) -ThisBuild / githubWorkflowJavaVersions := Seq(JavaCIVersion) -ThisBuild / githubWorkflowScalaVersions := Seq(ScalaCIVersion) - /* ------------------------------------------------------------------------- */ // Shared compilation settings for all modules. // https://docs.scala-lang.org/overviews/compiler-options/index.html @@ -49,6 +46,7 @@ lazy val compilationSettings = Seq( "-Yrangepos" ) ) + // Scaladoc settings for docs generation. Run task "doc" or "server / doc". // https://www.scala-sbt.org/1.x/docs/Howto-Scaladoc.html /* https://github.com/scala/scala/blob/2.13.x/src/scaladoc/scala/tools/nsc/doc/Settings.scala */ @@ -82,12 +80,14 @@ lazy val scaladocSettings: Seq[Def.Setting[_]] = Seq( // Need to generate docs to publish to oss Compile / packageDoc / publishArtifact := true ) +ThisBuild / githubWorkflowJavaVersions := Seq(JavaCIVersion) +ThisBuild / githubWorkflowScalaVersions := Seq(ScalaCIVersion) // Setup Mdoc + Docusaurus settings lazy val mdocSettings = Seq( mdocVariables := Map( "APP_NAME" -> (Global / name).value, "INNER_NAME" -> name.value, - "VERSION" -> (ThisBuild / version).value, + "VERSION" -> version.value, "WEBPAGE_URL" -> "https://www.weso.es/rdfshape-api/", "API_URL" -> "https://api.rdfshape.weso.es", "API_CONTAINER_REGISTRY" -> "https://github.com/orgs/weso/packages/container/package/rdfshape-api", @@ -271,7 +271,6 @@ lazy val server = project mongodb ) ) - /* ------------------------------------------------------------------------- */ // Documentation project, for MDoc + Docusaurus documentation lazy val docs = project @@ -291,7 +290,6 @@ lazy val MUnitFramework = new TestFramework("munit.Framework") /* DEPENDENCY versions */ lazy val http4sVersion = "1.0.0-M21" lazy val catsVersion = "2.5.0" - /* ------------------------------------------------------------------------- */ lazy val mongodbVersion = "4.1.1" lazy val any23Version = "2.2" diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala index e436c6cb..378bcbf1 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala @@ -10,10 +10,9 @@ import es.weso.rdf.RDFReader import es.weso.rdf.jena.RDFAsJenaModel import es.weso.rdf.nodes.IRI import es.weso.rdf.sgraph._ -import es.weso.rdfshape.server.api.routes.IncomingRequestParameters._ -import es.weso.rdfshape.server.api.routes.wikibase.WikibaseSchemaParam import es.weso.rdfshape.server.api.routes.ApiDefinitions._ import es.weso.rdfshape.server.api.routes.ApiHelper._ +import es.weso.rdfshape.server.api.routes.IncomingRequestParameters._ import es.weso.rdfshape.server.api.routes.PartsMap import es.weso.rdfshape.server.api.utils.Http4sUtils._ import es.weso.schema.{Schema, ShapeMapTrigger} @@ -37,6 +36,7 @@ import scala.util.control.NoStackTrace import scala.util.matching.Regex /** API service to handle wikidata related operations + * * @param client HTTP4S client object */ class WikidataService(client: Client[IO]) @@ -157,7 +157,7 @@ class WikidataService(client: Client[IO]) val limit: String = maybelimit.getOrElse(defaultLimit.toString) val continue: String = maybeContinue.getOrElse(defaultContinue.toString) - val requestUrl = s"${endpoint.getOrElse("https: //www.wikidata.org")}" + val requestUrl = s"${endpoint.getOrElse("https: //www.wikidata.org")}" val uri = Uri .fromString(requestUrl) .valueOr(throw _) @@ -333,7 +333,8 @@ class WikidataService(client: Client[IO]) schema.serialize("SHEXC") }) _ <- { - logger.trace(s"ShExC str: $shExCStr"); ok_es[Unit](()) + logger.trace(s"ShExC str: $shExCStr"); + ok_es[Unit](()) } resp <- io2es(Ok(mkExtractAnswer(shExCStr, label))) } yield resp @@ -357,11 +358,13 @@ class WikidataService(client: Client[IO]) ).withHeaders(`Content-Type`(MediaType.application.`json`)) .withEntity[Json](jsonParams) _ <- { - logger.debug(s"URI: ${jsonParams.spaces2}"); ok_es[Unit](()) + logger.debug(s"URI: ${jsonParams.spaces2}"); + ok_es[Unit](()) } result <- f2es(redirectClient.expect[Json](postRequest)) _ <- { - logger.trace(s"Result\n${result.spaces2}"); ok_es[Unit](()) + logger.trace(s"Result\n${result.spaces2}"); + ok_es[Unit](()) } resp <- f2es(Ok(result)) } yield resp @@ -379,19 +382,23 @@ class WikidataService(client: Client[IO]) val r: IO[Response[IO]] = for { eitherItem <- partsMap.eitherPartValue("item") _ <- { - logger.debug(eitherItem.toString); IO.pure(()) + logger.debug(eitherItem.toString); + IO.pure(()) } item <- fromEither(eitherItem) _ <- { - logger.debug(item); IO.pure(()) + logger.debug(item); + IO.pure(()) } info <- fromEither(cnvEntity2(item)) _ <- { - logger.debug(info.toString); IO.pure(()) + logger.debug(info.toString); + IO.pure(()) } pair <- WikibaseSchemaParam.mkSchema(partsMap, None, client) _ <- { - logger.debug(pair.toString()); IO.pure(()) + logger.debug(pair.toString()); + IO.pure(()) } (schema, wbp) = pair iriItem <- fromEither(IRI.fromString(info.sourceUri)) @@ -684,6 +691,8 @@ class WikidataService(client: Client[IO]) else EitherT.pure(None) + private def fromIO[A](io: IO[A]): EitherT[IO, String, A] = EitherT.liftF(io) + private def prepareJsonOk( entity: String, uri: Uri, @@ -699,8 +708,6 @@ class WikidataService(client: Client[IO]) ) ++ dotField(maybeDot) ) - private def fromIO[A](io: IO[A]): EitherT[IO, String, A] = EitherT.liftF(io) - private def dotField(maybeDot: Option[String]): List[(String, Json)] = maybeDot.fold(List[(String, Json)]())(s => List(("dot", Json.fromString(s))) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/Http4sUtils.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/Http4sUtils.scala index 1db070c2..1e38eb24 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/Http4sUtils.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/Http4sUtils.scala @@ -8,17 +8,48 @@ import org.http4s.client.Client import org.http4s.client.middleware.{FollowRedirect, Logger} import org.http4s.{Method, Request, Response, Uri} +/** Static utility methods to help work with http4s + */ object Http4sUtils { - def mkClient[F[_]: Concurrent: Async](c: Client[F]): Client[F] = - withRedirect(withLogging(c)) + /** Create a full-fledged http4s client from a base client object + * + * @param client Base http4s client + * @tparam F Type of the data managed by the client + * @return The client passed to the function with additional functionalities (follow redirects and logging) + */ + def mkClient[F[_]: Concurrent: Async](client: Client[F]): Client[F] = + withRedirect(withLogging(client)) - def withRedirect[F[_]: Concurrent](c: Client[F]): Client[F] = - FollowRedirect(10, _ => true)(c) + /** Create a redirecting http4s client from a base client object + * + * @param client Base http4s client + * @param maxRedirects Maximum number of redirects the client will follow + * @tparam F Type of the data managed by the client + * @return The client passed to the function with additional functionalities (follow redirects) + */ + def withRedirect[F[_]: Concurrent]( + client: Client[F], + maxRedirects: Int = 10 + ): Client[F] = + FollowRedirect(maxRedirects, _ => true)(client) + /** Create a logging http4s client from a base client object + * + * @param client Base http4s client + * @tparam F Type of the data managed by the client + * @return The client passed to the function with additional functionalities (logging data) + */ def withLogging[F[_]: Concurrent: Async](client: Client[F]): Client[F] = Logger(logHeaders = true, logBody = true, _ => false)(client) + /** Given a URI and an http4s client, fetch the URI contents + * + * @param uri URI with the resource to be resolved + * @param client Http4s client object that will fetch the resource + * @tparam F Type of the data managed by the client + * @return Either the body of the resource in the URI as a data Stream (using FS2) or an error message + */ def resolveStream[F[_]: Monad: Concurrent]( uri: Uri, client: Client[F] @@ -27,11 +58,19 @@ object Http4sUtils { client.toHttpApp(req).flatMap(resp => getBody(uri, resp)) } - def getBody[F[_]: Monad: Concurrent]( + /** Given a client response, extract the response body from it + * + * @param uri URI + * @param response Response object + * @tparam F Type of the data contained in the response + * @return Either the client's response body as plain text or an error message + */ + private def getBody[F[_]: Monad: Concurrent]( uri: Uri, - r: Response[F] - ): F[Either[String, Stream[F, String]]] = - if(r.status.isSuccess) r.bodyText.asRight.pure[F] - else s"Status error fetching $uri: ${r.status}".asLeft.pure[F] + response: Response[F] + ): F[Either[String, Stream[F, String]]] = { + if(response.status.isSuccess) response.bodyText.asRight.pure[F] + else s"Status error fetching $uri: ${response.status}".asLeft.pure[F] + } } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/OptEitherF.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/OptEitherF.scala index c24b56b9..a43825c9 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/OptEitherF.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/OptEitherF.scala @@ -1,28 +1,54 @@ package es.weso.rdfshape.server.api.utils + import cats._ import cats.data._ import cats.effect._ import cats.implicits._ +/** Static utility methods to help work with Optional and Either types + */ object OptEitherF { + /** Given an input optional value (type A) and a conversor function (A => Either(String, B)), + * attempt to convert the data and return an optional value (type B) + * + * @param maybe Input data, optional value + * @param function Conversion function from the input type to the output type + * @tparam A Encapsulated type of the input data + * @tparam B Encapsulated type of the output data + * @return Optional value with the conversion result + */ def optEither2f[A, B]( maybe: Option[A], - fn: A => Either[String, B] + function: A => Either[String, B] ): IO[Option[B]] = maybe match { case None => IO.pure(None) - case Some(v) => + case Some(value) => ApplicativeError[IO, Throwable].fromEither( - fn(v).map(Some(_)).leftMap(e => new RuntimeException(s"Error: $e")) + // "FUNCTION" returns an either from the value in the option. + function(value) + .map(Some(_)) // If Either is a right, it is mapped to an Option type + .leftMap(e => + new RuntimeException(s"Error: $e") + ) // If Left, an exception in thrown ) } + /** Given an input optional value (type A) and a conversor function (A => Either(String, B)), + * attempt to convert the data and return an either value (type B) + * + * @param maybe Input data, optional value + * @param function Conversion function from the input type to the output type + * @tparam A Encapsulated type of the input data + * @tparam B Encapsulated type of the output data + * @return Either value with the conversion result/error + */ def optEither2es[A, B]( maybe: Option[A], - fn: A => Either[String, B] + function: A => Either[String, B] ): EitherT[IO, String, Option[B]] = maybe match { - case None => EitherT.pure(None) - case Some(v) => EitherT.fromEither(fn(v).map(Some(_))) + case None => EitherT.pure(None) + case Some(value) => EitherT.fromEither(function(value).map(Some(_))) } } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/streams/Streams.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/streams/Streams.scala index f6396174..b85aa72d 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/streams/Streams.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/streams/Streams.scala @@ -15,7 +15,7 @@ import java.io.{OutputStream, StringWriter} import java.nio.charset.Charset import java.nio.charset.StandardCharsets.UTF_8 -/** Utilities for working with RDF data and its extraction from remote source +/** Utilities for working with RDF data and its extraction from remote sources */ object Streams extends LazyLogging { @@ -43,44 +43,6 @@ object Streams extends LazyLogging { } - /** Generic function for private use. Given an RDF-extracting function, executes it while checking for errors and closing all resources used in the process. - * - * @param uri URI to read from - * @param lang Output RDF syntax (turtle, n-triples...) - * @param getRdfLogic Logic in charge of extracting RDF from sources - * @param encoding Encoding with which the data extracted is stored - * @return String representation of the RDF data extracted (in the specified language and encoding) - */ - private def getRdf( - uri: Uri, - lang: Lang, - getRdfLogic: (StringWriter, OutputStream, StreamRDF) => IO[String], - encoding: Charset = UTF_8 - ): IO[String] = { - - /* Get the necessary elements (writer, streams, etc.) to read the RDF data - * and store it in plain text if needed. */ - val streamsIOElements = StreamsIOElements(lang, encoding) - val (stringWriter, outputStream, rdfStream) = - StreamsIOElements.unapply(streamsIOElements) - - /* Extract the String representation of the URI and pick up the data from - * the initial StringWriter. - * DATA => StreamRDF => OutputStream => StringWriter */ - try { - getRdfLogic(stringWriter, outputStream, rdfStream) - } catch { - // Log errors before throwing - case e: Throwable => - logger.error(s"Error parsing RDF data from $uri: ${e.getMessage}") - throw e - } finally { - // Always close the output stream - outputStream.close() - } - - } - /** @param uri URI to read from * @param lang Output RDF syntax (turtle, n-triples...) * @return Graphed RDF data from a remote URI in plain text using the specified syntax @@ -138,6 +100,44 @@ object Streams extends LazyLogging { } ) + /** Generic function for private use. Given an RDF-extracting function, executes it while checking for errors and closing all resources used in the process. + * + * @param uri URI to read from + * @param lang Output RDF syntax (turtle, n-triples...) + * @param getRdfLogic Logic in charge of extracting RDF from sources + * @param encoding Encoding with which the data extracted is stored + * @return String representation of the RDF data extracted (in the specified language and encoding) + */ + private def getRdf( + uri: Uri, + lang: Lang, + getRdfLogic: (StringWriter, OutputStream, StreamRDF) => IO[String], + encoding: Charset = UTF_8 + ): IO[String] = { + + /* Get the necessary elements (writer, streams, etc.) to read the RDF data + * and store it in plain text if needed. */ + val streamsIOElements = StreamsIOElements(lang, encoding) + val (stringWriter, outputStream, rdfStream) = + StreamsIOElements.unapply(streamsIOElements) + + /* Extract the String representation of the URI and pick up the data from + * the initial StringWriter. + * DATA => StreamRDF => OutputStream => StringWriter */ + try { + getRdfLogic(stringWriter, outputStream, rdfStream) + } catch { + // Log errors before throwing + case e: Throwable => + logger.error(s"Error parsing RDF data from $uri: ${e.getMessage}") + throw e + } finally { + // Always close the output stream + outputStream.close() + } + + } + } /** Data class used as a factory for the repetitive task of instantiating diff --git a/version.sbt b/version.sbt index bc2fdd9d..e0b9a265 100644 --- a/version.sbt +++ b/version.sbt @@ -1 +1 @@ -ThisBuild / version := "0.0.75a-SNAPSHOT" \ No newline at end of file +ThisBuild / version := "0.0.76a-SNAPSHOT" From 8d5e8727b8289cf994e3c150ee72ccad9d720374 Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Wed, 18 Aug 2021 19:03:27 +0200 Subject: [PATCH 13/32] Begin refactoring API routes. --- .../es/weso/rdfshape/server/Server.scala | 34 ++- .../api/results/DataConversionResult.scala | 2 +- .../api/results/DataExtractResult.scala | 2 +- .../server/api/results/DataInfoResult.scala | 2 +- .../api/results/SchemaConversionResult.scala | 2 +- .../api/results/ShapeMapInfoResult.scala | 2 +- .../server/api/routes/ApiHelper.scala | 276 ++---------------- .../server/api/routes/api/APIService.scala | 9 + .../server/api/routes/data/DataService.scala | 9 + .../api/routes/endpoint/EndpointService.scala | 8 + .../api/routes/fetch/FetchService.scala | 8 + .../routes/permalink/PermalinkService.scala | 9 + .../api/routes/schema/logic/SchemaInfo.scala | 66 +++++ .../schema/{ => logic}/SchemaInfoResult.scala | 2 +- .../schema/logic/SchemaOperations.scala | 99 +++++++ .../schema/{ => service}/SchemaParam.scala | 2 +- .../schema/{ => service}/SchemaService.scala | 142 ++++++++- .../schema/{ => service}/SchemaValue.scala | 2 +- .../{ => service}/TriggerModeParam.scala | 2 +- .../api/routes/shapemap/ShapeMapService.scala | 12 +- .../server/api/routes/shex/ShExService.scala | 8 + .../routes/wikibase/WikibaseSchemaParam.scala | 2 +- .../api/routes/wikibase/WikidataService.scala | 10 +- .../server/utils/json/JsonUtils.scala | 98 +++++++ .../server/utils/json/JsonUtilsServer.scala | 47 --- 25 files changed, 517 insertions(+), 338 deletions(-) create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaInfo.scala rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/{ => logic}/SchemaInfoResult.scala (74%) create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/{ => service}/SchemaParam.scala (99%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/{ => service}/SchemaService.scala (78%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/{ => service}/SchemaValue.scala (85%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/{ => service}/TriggerModeParam.scala (99%) create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtils.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtilsServer.scala diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala index 5d7dc789..81369c80 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala @@ -9,7 +9,7 @@ import es.weso.rdfshape.server.api.routes.data.DataService import es.weso.rdfshape.server.api.routes.endpoint.EndpointService import es.weso.rdfshape.server.api.routes.fetch.FetchService import es.weso.rdfshape.server.api.routes.permalink.PermalinkService -import es.weso.rdfshape.server.api.routes.schema.SchemaService +import es.weso.rdfshape.server.api.routes.schema.service.SchemaService import es.weso.rdfshape.server.api.routes.shapemap.ShapeMapService import es.weso.rdfshape.server.api.routes.shex.ShExService import es.weso.rdfshape.server.api.routes.wikibase.WikidataService @@ -34,11 +34,12 @@ import scala.util.{Failure, Success, Try} * A single Server is meant to be running simultaneously. * This class is private and closed to external usage modification, server initialization * is managed via its companion object. - * @param port Port where the API server is exposed - * @param https Whether if the server should try to create a secure context or not, given the user's - * environment configuration + * + * @param port Port where the API server is exposed + * @param https Whether if the server should try to create a secure context or not, given the user's + * environment configuration * @param requestTimeout Http4s application request timeout - * @param idleTimeout Http4s application idle timeout + * @param idleTimeout Http4s application idle timeout */ private class Server( val port: Int, @@ -49,18 +50,20 @@ private class Server( with LazyLogging { /** Start running the server, using the configuration stored in the instance attributes + * * @param args Arguments passed to the IOApp. Should be an empty list since the arguments have been processed beforehand. * @return Application's exit code */ override def run(args: List[String]): IO[ExitCode] = { println(s""" - |Starting server on port $port... - |Serving via ${if(https) "HTTPS" else "HTTP"}... - |""".stripMargin) + |Starting server on port $port... + |Serving via ${if(https) "HTTPS" else "HTTP"}... + |""".stripMargin) stream(getSslContext).compile.drain.as(ExitCode.Success) } /** Create an instance of a secure SSLContext for the application. + * * @return None if no HTTPS is required; an SSLContext if HTTPS is required and the context could be created * @see {@link es.weso.rdfshape.server.utils.secure.SSLHelper} * @note If an error occurs creating the SSLContext, program termination will occur @@ -81,6 +84,7 @@ private class Server( } /** Start an infinite stream in charge of processing incoming requests + * * @param sslContext SSLContext used by the application (may be empty) * @return Application's exit code */ @@ -97,7 +101,8 @@ private class Server( }.drain /** Create the final http4s server - * @param client Http4s' client in charge of the application + * + * @param client Http4s' client in charge of the application * @param sslContext SSLContext used by the application * @return The final http4s server, with the proper application and SSLContext bound */ @@ -119,6 +124,7 @@ private class Server( } /** Create an http4s application object + * * @param client Http4s' client in charge of the application * @return Http4s' application with the given client and a request-logging middleware */ @@ -168,7 +174,9 @@ object Server { ) // Act as a server factory + /** Apply method, used as a factory for Server objects + * * @param port Port where the API server will be exposed */ def apply(port: Int): Unit = { @@ -176,7 +184,8 @@ object Server { } /** Apply method, used as a factory for Server objects - * @param port Port where the API server will be exposed + * + * @param port Port where the API server will be exposed * @param https Whether if the server will try to create a secure context or not */ def apply(port: Int, https: Boolean): Unit = { @@ -184,14 +193,13 @@ object Server { s.main(Array.empty[String]) } - // Linked routes /** Configure the http4s application to use the specified sources as API routes */ private def routesService(client: Client[IO]): HttpRoutes[IO] = CORS( - SchemaService(client).routes <+> - APIService(client).routes <+> + APIService(client).routes <+> DataService(client).routes <+> + SchemaService(client).routes <+> ShExService(client).routes <+> ShapeMapService(client).routes <+> WikidataService(client).routes <+> diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataConversionResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataConversionResult.scala index 9d4483d8..9739672a 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataConversionResult.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataConversionResult.scala @@ -1,7 +1,7 @@ package es.weso.rdfshape.server.api.results import es.weso.rdfshape.server.api.format.DataFormat -import es.weso.rdfshape.server.utils.json.JsonUtilsServer._ +import es.weso.rdfshape.server.utils.json.JsonUtils._ import io.circe.Json /** Data class representing the output of a conversion operation diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataExtractResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataExtractResult.scala index b22caeda..3791848e 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataExtractResult.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataExtractResult.scala @@ -6,7 +6,7 @@ import es.weso.rdfshape.server.api.routes.Defaults.{ defaultSchemaEngine, defaultSchemaFormat } -import es.weso.rdfshape.server.utils.json.JsonUtilsServer._ +import es.weso.rdfshape.server.utils.json.JsonUtils._ import es.weso.schema.Schema import es.weso.shapemaps.ResultShapeMap import io.circe.Json diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataInfoResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataInfoResult.scala index b6357136..b43a7c64 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataInfoResult.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataInfoResult.scala @@ -4,7 +4,7 @@ import es.weso.rdf.PrefixMap import es.weso.rdf.nodes.IRI import es.weso.rdfshape.server.api.format._ import es.weso.rdfshape.server.api.routes.ApiHelper -import es.weso.rdfshape.server.utils.json.JsonUtilsServer._ +import es.weso.rdfshape.server.utils.json.JsonUtils._ import io.circe.Json /** Data class representing the output of an "information" operation diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/SchemaConversionResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/SchemaConversionResult.scala index dcc9c63f..13875930 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/SchemaConversionResult.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/SchemaConversionResult.scala @@ -1,6 +1,6 @@ package es.weso.rdfshape.server.api.results -import es.weso.rdfshape.server.utils.json.JsonUtilsServer.maybeField +import es.weso.rdfshape.server.utils.json.JsonUtils.maybeField import es.weso.shapemaps.ShapeMap import io.circe.Json import io.circe.syntax._ diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/ShapeMapInfoResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/ShapeMapInfoResult.scala index 142fc4ee..dacd3d86 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/ShapeMapInfoResult.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/ShapeMapInfoResult.scala @@ -1,6 +1,6 @@ package es.weso.rdfshape.server.api.results -import es.weso.rdfshape.server.utils.json.JsonUtilsServer._ +import es.weso.rdfshape.server.utils.json.JsonUtils._ import es.weso.shapemaps._ import io.circe.Json diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/ApiHelper.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/ApiHelper.scala index b8a04abf..399f2e19 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/ApiHelper.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/ApiHelper.scala @@ -7,23 +7,21 @@ import es.weso.rdf.jena.RDFAsJenaModel import es.weso.rdf.nodes.{IRI, Lang} import es.weso.rdf.{PrefixMap, RDFBuilder, RDFReasoner} import es.weso.rdfshape.server.api.format.{DataFormat, SchemaFormat} -import es.weso.rdfshape.server.api.results.{ - DataExtractResult, - DataInfoResult, - SchemaConversionResult -} +import es.weso.rdfshape.server.api.results.{DataExtractResult, DataInfoResult} import es.weso.rdfshape.server.api.routes.Defaults.{ defaultSchemaEngine, defaultSchemaFormat, defaultShapeLabel } import es.weso.rdfshape.server.api.routes.data.DataParam -import es.weso.rdfshape.server.api.routes.schema.{SchemaParam, TriggerModeParam} -import es.weso.rdfshape.server.utils.json.JsonUtilsServer.maybeField +import es.weso.rdfshape.server.api.routes.schema.service.{ + SchemaParam, + TriggerModeParam +} +import es.weso.rdfshape.server.utils.json.JsonUtils.maybeField import es.weso.schema._ import es.weso.schemaInfer.{InferOptions, PossiblePrefixes, SchemaInfer} -import es.weso.shacl.converter.Shacl2ShEx -import es.weso.shapemaps.{NodeSelector, ResultShapeMap, ShapeMap} +import es.weso.shapemaps.{NodeSelector, ResultShapeMap} import es.weso.uml.{PlantUMLOptions, Schema2UML, UML} import es.weso.utils.IOUtils._ import io.circe.Json @@ -34,12 +32,19 @@ import org.http4s.client.{Client, JavaNetClientBuilder} */ object ApiHelper extends LazyLogging { - private val NoTime = 0L - - private val options = PlantUMLOptions( + /** Additional options passed down to PlantUML when generating diagrams on the fly. + */ + val umlOptions = PlantUMLOptions( watermark = Some("Generated by [[https://rdfshape.weso.es rdfshape]]") ) + /** Long value used as a "no time" value + */ + private val NoTime = 0L + + /** @param result Schema validation result + * @return JSON representation of the schema validation result + */ def result2json(result: Result): IO[Json] = for { emptyRes <- RDFAsJenaModel.empty json <- emptyRes.use(emptyBuilder => result.toJson(emptyBuilder)) @@ -51,15 +56,20 @@ object ApiHelper extends LazyLogging { */ private[api] def getBase: Option[String] = Defaults.relativeBase.map(_.str) - private[api] def prefixMap2Json(pm: PrefixMap): Json = { - Json.fromFields(pm.pm.map { case (prefix, iri) => + /** @param prefixMap Input prefix map + * @return JSON representation of the prefix map + */ + private[api] def prefixMap2Json(prefixMap: PrefixMap): Json = { + Json.fromFields(prefixMap.pm.map { case (prefix, iri) => (prefix.str, Json.fromString(iri.getLexicalForm)) }) } + /** @return For a given resource address, attempt to return its contents. + */ + // TODO: handle timeouts and remove unsafe code private[api] def resolveUri(baseUri: Uri, urlStr: String): IO[String] = { logger.info(s"Handling Uri: $urlStr") - // TODO: handle timeouts Uri .fromString(urlStr) .fold( @@ -85,37 +95,6 @@ object ApiHelper extends LazyLogging { ) } - private[api] def schemaConvert( - optSchema: Option[String], - optSchemaFormat: Option[String], - optSchemaEngine: Option[String], - optTargetSchemaFormat: Option[String], - optTargetSchemaEngine: Option[String], - base: Option[String] - ): IO[Option[String]] = - optSchema match { - case None => IO(None) - case Some(schemaStr) => - val schemaFormat = - optSchemaFormat.getOrElse(Schemas.defaultSchemaFormat) - val schemaEngine = optSchemaEngine.getOrElse(Schemas.defaultSchemaName) - /* val x: EitherT[IO,String,Schema] = Schemas.fromString(schemaStr, - * schemaFormat, schemaEngine, base) */ - for { - schema <- Schemas.fromString( - schemaStr, - schemaFormat, - schemaEngine, - base - ) - result <- schema.convert( - optTargetSchemaFormat, - optTargetSchemaEngine, - base.map(IRI(_)) - ) - } yield Some(result) - } - private[api] def validateStr( data: String, optDataFormat: Option[DataFormat], @@ -290,90 +269,6 @@ object ApiHelper extends LazyLogging { } } - private[api] def convertSchema( - schema: Schema, - schemaStr: Option[String], - schemaFormat: SchemaFormat, - schemaEngine: String, - optTargetSchemaFormat: Option[SchemaFormat], - optTargetSchemaEngine: Option[String] - ): IO[SchemaConversionResult] = { - val result: IO[SchemaConversionResult] = for { - pair <- doSchemaConversion( - schema, - optTargetSchemaFormat.map(_.name), - optTargetSchemaEngine - ) - sourceStr <- schemaStr match { - case None => schema.serialize(schemaFormat.name) - case Some(source) => IO(source) - } - (resultStr, resultShapeMap) = pair - } yield SchemaConversionResult.fromConversion( - sourceStr, - schemaFormat.name, - schemaEngine, - optTargetSchemaFormat.map(_.name), - optTargetSchemaEngine, - resultStr, - resultShapeMap - ) - - for { - either <- result.attempt - } yield either.fold( - err => SchemaConversionResult.fromMsg(s"error converting schema: $err"), - identity - ) - } - - private def doSchemaConversion( - schema: Schema, - targetSchemaFormat: Option[String], - optTargetSchemaEngine: Option[String] - ): IO[(String, ShapeMap)] = { - logger.debug( - s"Schema conversion, name: ${schema.name}, targetSchema: $targetSchemaFormat" - ) - val default = for { - str <- schema.convert(targetSchemaFormat, optTargetSchemaEngine, None) - } yield (str, ShapeMap.empty) - schema match { - case shacl: ShaclexSchema => - optTargetSchemaEngine.map(_.toUpperCase()) match { - case Some("SHEX") => - logger.debug("Schema conversion: SHACLEX -> SHEX") - Shacl2ShEx - .shacl2ShEx(shacl.schema) - .fold( - e => - IO.raiseError( - new RuntimeException( - s"Error converting SHACL -> ShEx: $e" - ) - ), - pair => { - val (schema, shapeMap) = pair - logger.debug(s"shapeMap: $shapeMap") - for { - emptyBuilder <- RDFAsJenaModel.empty - str <- emptyBuilder.use(builder => - es.weso.shex.Schema.serialize( - schema, - targetSchemaFormat.getOrElse("SHEXC"), - None, - builder - ) - ) - } yield (str, shapeMap) - } - ) - case _ => default - } - case _ => default - } - } - private[api] def shapeInfer( rdf: RDFReasoner, optNodeSelector: Option[String], @@ -413,7 +308,7 @@ object ApiHelper extends LazyLogging { case None => IO.pure(None) case Some(pair) => val (uml, warnings) = pair - uml.toSVG(options).map(Some(_)) + uml.toSVG(umlOptions).map(Some(_)) }) str <- io2es(schemaInfer.serialize(schemaFormat.name)) } yield Json.fromFields( @@ -428,7 +323,7 @@ object ApiHelper extends LazyLogging { "uml", (pair: (UML, List[String])) => { val (uml, warnings) = pair - Json.fromString(uml.toPlantUML(options)) + Json.fromString(uml.toPlantUML(umlOptions)) } ) ++ maybeField(maybeSvg, "svg", Json.fromString) @@ -482,121 +377,4 @@ object ApiHelper extends LazyLogging { ) } - private[api] def schemaInfo(schema: Schema): Json = { - val info = schema.info - SchemaInfoReply( - Some(info.schemaName), - Some(info.schemaEngine), - info.isWellFormed, - schema.shapes, - schema.pm.pm.toList.map { case (prefix, iri) => (prefix.str, iri.str) }, - info.errors - ).toJson - } - - private[api] def schemaCytoscape(schema: Schema): Json = { - val eitherJson = for { - pair <- Schema2UML.schema2UML(schema) - } yield { - val (uml, warnings) = pair - uml.toJson - } - eitherJson.fold( - e => - Json.fromFields( - List( - ("error", Json.fromString(s"Error converting to schema 2 JSON: $e")) - ) - ), - identity - ) - } - - private[api] def schemaVisualize(schema: Schema): IO[Json] = for { - pair <- schema2SVG(schema) - } yield { - val (svg, plantuml) = pair - val info = schema.info - val fields: List[(String, Json)] = - List( - ("schemaName", Json.fromString(info.schemaName)), - ("schemaEngine", Json.fromString(info.schemaEngine)), - ("wellFormed", Json.fromBoolean(info.isWellFormed)), - ("errors", Json.fromValues(info.errors.map(Json.fromString))), - ("parsed", Json.fromString("Parsed OK")), - ("svg", Json.fromString(svg)), - ("plantUML", Json.fromString(plantuml)) - ) - Json.fromFields(fields) - } - - private[api] def schema2SVG(schema: Schema): IO[(String, String)] = { - val eitherUML = Schema2UML.schema2UML(schema) - eitherUML.fold( - e => IO.pure((s"SVG conversion: $e", s"Error converting UML: $e")), - pair => { - val (uml, warnings) = pair - logger.debug(s"UML converted: $uml") - (for { - str <- uml.toSVG(options) - } yield { - (str, uml.toPlantUML(options)) - }).handleErrorWith(e => - IO.pure( - ( - s"SVG conversion error: ${e.getMessage}", - uml.toPlantUML(options) - ) - ) - ) - } - ) - } - - private[api] def mkJsonErr(msg: String) = - Json.fromFields(List(("error", Json.fromString(msg)))) - - case class SchemaInfoReply( - schemaName: Option[String], - schemaEngine: Option[String], - wellFormed: Boolean, - shapes: List[String], - shapesPrefixMap: List[(String, String)], - errors: List[String] - ) { - def toJson: Json = Json.fromFields( - List( - ("schemaName", schemaName.fold(Json.Null)(Json.fromString)), - ("schemaEngine", schemaEngine.fold(Json.Null)(Json.fromString)), - ("wellFormed", Json.fromBoolean(wellFormed)), - ("shapes", Json.fromValues(shapes.map(Json.fromString))), - ( - "shapesPrefixMap", - Json.fromValues( - shapesPrefixMap.map(pair => - Json.fromFields( - List( - ("prefix", Json.fromString(pair._1)), - ("uri", Json.fromString(pair._2)) - ) - ) - ) - ) - ), - ("errors", Json.fromValues(errors.map(Json.fromString))) - ) - ) - } - - /* private[server] def getSchemaEmbedded(sp: SchemaParam): Boolean = { - * sp.schemaEmbedded match { case Some(true) => true case Some(false) => false - * case None => defaultSchemaEmbedded } } */ - - object SchemaInfoReply { - def fromError(msg: String): SchemaInfoReply = { - logger.debug(s"SchemaInfoReply from $msg") - SchemaInfoReply(None, None, wellFormed = false, List(), List(), List(msg)) - } - } - } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/api/APIService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/api/APIService.scala index 28e63f3b..97fd2683 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/api/APIService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/api/APIService.scala @@ -12,10 +12,13 @@ import org.http4s.server.staticcontent.resourceServiceBuilder import org.log4s.getLogger /** API service to handle multiple general tasks (server status, etc.) + * * @param client HTTP4S client object */ class APIService(client: Client[IO]) extends Http4sDsl[IO] with ApiService { + /** Describe the API routes handled by this service and the actions performed on each of them + */ val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { case req @ GET -> Root / `api` / "health" => @@ -33,6 +36,12 @@ class APIService(client: Client[IO]) extends Http4sDsl[IO] with ApiService { } object APIService { + + /** Service factory + * + * @param client Underlying http4s client + * @return A new API Service + */ def apply(client: Client[IO]): APIService = new APIService(client) } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataService.scala index 6ab177fd..c8866329 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataService.scala @@ -25,10 +25,13 @@ import org.http4s.multipart.Multipart import scala.util.Try /** API Service to handle RDF data + * * @param client HTTP4S client object */ class DataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { + /** Describe the API routes handled by this service and the actions performed on each of them + */ val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { // Input RDF data formats include html-microdata, turtle, json-ld... @@ -278,6 +281,12 @@ class DataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { } object DataService { + + /** Service factory + * + * @param client Underlying http4s client + * @return A new Data Service + */ def apply(client: Client[IO]): DataService = new DataService(client) } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/EndpointService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/EndpointService.scala index 0b880e6b..976d1ff7 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/EndpointService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/EndpointService.scala @@ -33,6 +33,8 @@ class EndpointService(client: Client[IO]) private val relativeBase = Defaults.relativeBase + /** Describe the API routes handled by this service and the actions performed on each of them + */ def routes: HttpRoutes[IO] = HttpRoutes.of[IO] { case req @ POST -> Root / `api` / "endpoint" / "query" => @@ -136,6 +138,12 @@ class EndpointService(client: Client[IO]) } object EndpointService { + + /** Service factory + * + * @param client Underlying http4s client + * @return A new Endpoint Service + */ def apply(client: Client[IO]): EndpointService = new EndpointService(client) } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/fetch/FetchService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/fetch/FetchService.scala index 1b9adccb..c22ae63a 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/fetch/FetchService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/fetch/FetchService.scala @@ -10,6 +10,8 @@ import scalaj.http.Http class FetchService() extends Http4sDsl[IO] { + /** Describe the API routes handled by this service and the actions performed on each of them + */ val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { // Query URL and return the response @@ -32,6 +34,12 @@ class FetchService() extends Http4sDsl[IO] { } object FetchService { + + /** Service factory + * + * @param client Underlying http4s client + * @return A new Fetch Service + */ def apply(client: Client[IO]): FetchService = new FetchService() } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/PermalinkService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/PermalinkService.scala index 6ba1b863..2232385f 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/PermalinkService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/PermalinkService.scala @@ -38,6 +38,9 @@ class PermalinkService(client: Client[IO]) // Utils for url generation val random: Random.type = Random + + /** Describe the API routes handled by this service and the actions performed on each of them + */ val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { // Insert a reference to the permalink in DB @@ -226,5 +229,11 @@ class PermalinkService(client: Client[IO]) } object PermalinkService { + + /** Service factory + * + * @param client Underlying http4s client + * @return A new Permalink Service + */ def apply(client: Client[IO]): PermalinkService = new PermalinkService(client) } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaInfo.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaInfo.scala new file mode 100644 index 00000000..aa91a5e7 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaInfo.scala @@ -0,0 +1,66 @@ +package es.weso.rdfshape.server.api.routes.schema.logic + +import com.typesafe.scalalogging.LazyLogging +import io.circe.Json + +/** Data class representing the data contained in a schema + * + * @param schemaName Name of the schema + * @param schemaEngine Engine of the schema + * @param wellFormed Is the schema well formed + * @param shapes List of shapes in the schema + * @param shapesPrefixMap Prefix map of the shapes in the schema + * @param errors Errors in the schema + */ +private[schema] case class SchemaInfo( + schemaName: Option[String], + schemaEngine: Option[String], + wellFormed: Boolean, + shapes: List[String], + shapesPrefixMap: List[(String, String)], + errors: List[String] +) { + + /** Transform a Schema Info result to a JSON representation + * + * @return JSON representation of the schema information + */ + def toJson: Json = Json.fromFields( + List( + ("schemaName", schemaName.fold(Json.Null)(Json.fromString)), + ("schemaEngine", schemaEngine.fold(Json.Null)(Json.fromString)), + ("wellFormed", Json.fromBoolean(wellFormed)), + ("shapes", Json.fromValues(shapes.map(Json.fromString))), + ( + "shapesPrefixMap", + Json.fromValues( + shapesPrefixMap.map(pair => + Json.fromFields( + List( + ("prefix", Json.fromString(pair._1)), + ("uri", Json.fromString(pair._2)) + ) + ) + ) + ) + ), + ("errors", Json.fromValues(errors.map(Json.fromString))) + ) + ) +} + +/** Static utilities of the SchemaInfoReply class + */ +object SchemaInfo extends LazyLogging { + + /** Create an empty SchemaInfoReply with an error message. + * Used when errors occur extracting the schema information + * + * @param msg Message attached to the failing schema + * @return Empty SchemaInfoReply object with no data except for an error message + */ + def fromError(msg: String): SchemaInfo = { + logger.debug(s"SchemaInfoReply from $msg") + SchemaInfo(None, None, wellFormed = false, List(), List(), List(msg)) + } +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/SchemaInfoResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaInfoResult.scala similarity index 74% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/SchemaInfoResult.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaInfoResult.scala index 716cd96e..41daf028 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/SchemaInfoResult.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaInfoResult.scala @@ -1,4 +1,4 @@ -package es.weso.rdfshape.server.api.routes.schema +package es.weso.rdfshape.server.api.routes.schema.logic import io.circe.Json diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala new file mode 100644 index 00000000..2f26b6ad --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala @@ -0,0 +1,99 @@ +package es.weso.rdfshape.server.api.routes.schema.logic + +import cats.effect.IO +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdfshape.server.api.routes.ApiHelper.umlOptions +import es.weso.schema.Schema +import es.weso.uml.Schema2UML +import io.circe.Json + +/** Static utilities used by the {@link es.weso.rdfshape.server.api.routes.schema.service.SchemaService} + * to operate on schemas + */ +private[schema] object SchemaOperations extends LazyLogging { + + /** Obtain the information from an schema + * + * @param schema Input schema + * @return Schema information as a data instance of {@link SchemaInfo}. + */ + def schemaInfo(schema: Schema): SchemaInfo = { + val info = schema.info + SchemaInfo( + Some(info.schemaName), + Some(info.schemaEngine), + info.isWellFormed, + schema.shapes, + schema.pm.pm.toList.map { case (prefix, iri) => (prefix.str, iri.str) }, + info.errors + ) + } + + /** @param schema Input schema + * @return JSON representation of the schema as a Cytoscape graph to be drawn on clients (or an error message) + */ + // TODO: return another status code on failure, so that clients can handle it + def schemaCytoscape(schema: Schema): Json = { + val eitherJson = for { + pair <- Schema2UML.schema2UML(schema) + } yield { + val (uml, warnings) = pair + uml.toJson + } + eitherJson.fold( + e => + Json.fromFields( + List( + ("error", Json.fromString(s"Error converting to schema 2 JSON: $e")) + ) + ), + identity + ) + } + + /** @param schema Input schema + * @return JSON representation of the schema as a Graphviz graph to be drawn on clients (or an error message) + */ + // TODO: return another status code on failure, so that clients can handle it + def schemaVisualize(schema: Schema): IO[Json] = for { + pair <- schema2SVG(schema) + } yield { + val (svg, plantuml) = pair + val info = schema.info + val fields: List[(String, Json)] = + List( + ("schemaName", Json.fromString(info.schemaName)), + ("schemaEngine", Json.fromString(info.schemaEngine)), + ("wellFormed", Json.fromBoolean(info.isWellFormed)), + ("errors", Json.fromValues(info.errors.map(Json.fromString))), + ("parsed", Json.fromString("Parsed OK")), + ("svg", Json.fromString(svg)), + ("plantUML", Json.fromString(plantuml)) + ) + Json.fromFields(fields) + } + + def schema2SVG(schema: Schema): IO[(String, String)] = { + val eitherUML = Schema2UML.schema2UML(schema) + eitherUML.fold( + e => IO.pure((s"SVG conversion: $e", s"Error converting UML: $e")), + pair => { + val (uml, warnings) = pair + logger.debug(s"UML converted: $uml") + (for { + str <- uml.toSVG(umlOptions) + } yield { + (str, uml.toPlantUML(umlOptions)) + }).handleErrorWith(e => + IO.pure( + ( + s"SVG conversion error: ${e.getMessage}", + uml.toPlantUML(umlOptions) + ) + ) + ) + } + ) + } + +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/SchemaParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaParam.scala similarity index 99% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/SchemaParam.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaParam.scala index 48ce9934..d59506a5 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/SchemaParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaParam.scala @@ -1,4 +1,4 @@ -package es.weso.rdfshape.server.api.routes.schema +package es.weso.rdfshape.server.api.routes.schema.service import cats.effect._ import cats.implicits._ diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/SchemaService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala similarity index 78% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/SchemaService.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala index cf1894db..02f00691 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/SchemaService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala @@ -1,4 +1,4 @@ -package es.weso.rdfshape.server.api.routes.schema +package es.weso.rdfshape.server.api.routes.schema.service import cats.data._ import cats.effect._ @@ -13,9 +13,22 @@ import es.weso.rdfshape.server.api.routes.ApiHelper._ import es.weso.rdfshape.server.api.routes.Defaults._ import es.weso.rdfshape.server.api.routes.IncomingRequestParameters._ import es.weso.rdfshape.server.api.routes.data.DataParam +import es.weso.rdfshape.server.api.routes.schema.logic.SchemaOperations.{ + schema2SVG, + schemaCytoscape, + schemaInfo, + schemaVisualize +} +import es.weso.rdfshape.server.api.routes.schema.logic.{ + SchemaInfo, + SchemaInfoResult +} import es.weso.rdfshape.server.api.routes.{Defaults, PartsMap} import es.weso.rdfshape.server.api.utils.OptEitherF._ +import es.weso.rdfshape.server.utils.json.JsonUtils.responseJson import es.weso.schema._ +import es.weso.shacl.converter.Shacl2ShEx +import es.weso.shapemaps.ShapeMap import es.weso.utils.IOUtils._ import io.circe._ import io.circe.generic.auto._ @@ -28,10 +41,13 @@ import org.http4s.headers._ import org.http4s.multipart.Multipart /** API service to handle schema-related operations + * * @param client HTTP4S client object */ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { + /** Describe the API routes handled by this service and the actions performed on each of them + */ val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { case GET -> Root / `api` / "schema" / "engines" => @@ -98,7 +114,7 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { .fromString(schemaStr, schemaFormat, schemaEngine, None) .attempt r <- either.fold( - e => errJson(s"Error reading schema: $e\nString: $schemaStr"), + e => responseJson(s"Error reading schema: $e\nString: $schemaStr"), schema => { val shapes: List[String] = schema.shapes val jsonShapes = Json.fromValues(shapes.map(Json.fromString)) @@ -124,13 +140,13 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { schemaPair <- SchemaParam.mkSchema(partsMap, None) (schema, sp) = schemaPair } yield { - schemaInfo(schema) + schemaInfo(schema).toJson } for { e <- r.attempt v <- e.fold( t => { - Ok(SchemaInfoReply.fromError(t.getMessage).toJson) + Ok(SchemaInfo.fromError(t.getMessage).toJson) }, Ok(_) ) @@ -138,7 +154,7 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { } } - case req @ GET -> Root / `api` / "schema" / "convert" :? + case GET -> Root / `api` / "schema" / "convert" :? OptSchemaParam(optSchema) +& SchemaFormatParam(optSchemaFormat) +& SchemaEngineParam(optSchemaEngine) +& @@ -159,7 +175,7 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { .fromString(schemaStr, schemaFormat.name, schemaEngine, None) .attempt r <- either.fold( - e => errJson(s"Error reading schema: $e\nString: $schemaStr"), + e => responseJson(s"Error reading schema: $e\nString: $schemaStr"), schema => { for { optTargetSchemaFormat <- optEither2f( @@ -233,7 +249,7 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { } for { e <- r.attempt - v <- e.fold(t => errJson(t.getMessage), Ok(_)) + v <- e.fold(t => responseJson(t.getMessage), Ok(_)) } yield v } } @@ -251,7 +267,7 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { } for { e <- r.attempt - v <- e.fold(t => errJson(t.getMessage), Ok(_)) + v <- e.fold(t => responseJson(t.getMessage), Ok(_)) } yield v } } @@ -297,7 +313,7 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { for { either <- run_es(r) v <- either.fold( - s => errJson(s"Error obtaining schema $s"), + s => responseJson(s"Error obtaining schema $s"), svg => { Ok(svg).map( _.withContentType(`Content-Type`(MediaType.image.`svg+xml`)) @@ -337,7 +353,7 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { } yield (df, sf) either match { - case Left(str) => errJson(str) + case Left(str) => responseJson(str, status = BadRequest) case Right(pair) => val (optDataFormat, optSchemaFormat) = pair val baseUri = req.uri @@ -396,8 +412,6 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { optActiveShapeMapTab ) - // val (dataStr, eitherRDF) = - val eitherResult: IO[Response[IO]] = for { pairData <- io2f(dp.getData(relativeBase)) (dataStr, resourceRdf) = pairData @@ -460,16 +474,106 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { for { e <- r.attempt - v <- e.fold(t => errJson(t.getMessage), json => Ok(json)) + v <- e.fold(t => responseJson(t.getMessage), json => Ok(json)) } yield v } } } private val relativeBase = Defaults.relativeBase - // TODO: Move this method to a more generic place... - private def errJson(msg: String): IO[Response[IO]] = - Ok(mkJsonErr(msg)) // + /** Given an input schema, convert it to another output schema with the parameters specified. + * + * @param schema Input schema + * @param schemaStr Input schema contents + * @param schemaFormat Input schema format + * @param schemaEngine Input schema engine + * @param optTargetSchemaFormat Output schema desired format + * @param optTargetSchemaEngine Output schema desired engine + * @return Optionally, the raw output schema contents + */ + private[schema] def convertSchema( + schema: Schema, + schemaStr: Option[String], + schemaFormat: SchemaFormat, + schemaEngine: String, + optTargetSchemaFormat: Option[SchemaFormat], + optTargetSchemaEngine: Option[String] + ): IO[SchemaConversionResult] = { + val result: IO[SchemaConversionResult] = for { + pair <- doSchemaConversion( + schema, + optTargetSchemaFormat.map(_.name), + optTargetSchemaEngine + ) + sourceStr <- schemaStr match { + case None => schema.serialize(schemaFormat.name) + case Some(source) => IO(source) + } + (resultStr, resultShapeMap) = pair + } yield SchemaConversionResult.fromConversion( + sourceStr, + schemaFormat.name, + schemaEngine, + optTargetSchemaFormat.map(_.name), + optTargetSchemaEngine, + resultStr, + resultShapeMap + ) + + for { + either <- result.attempt + } yield either.fold( + err => SchemaConversionResult.fromMsg(s"error converting schema: $err"), + identity + ) + } + + private def doSchemaConversion( + schema: Schema, + targetSchemaFormat: Option[String], + optTargetSchemaEngine: Option[String] + ): IO[(String, ShapeMap)] = { + logger.debug( + s"Schema conversion, name: ${schema.name}, targetSchema: $targetSchemaFormat" + ) + val default = for { + str <- schema.convert(targetSchemaFormat, optTargetSchemaEngine, None) + } yield (str, ShapeMap.empty) + schema match { + case shacl: ShaclexSchema => + optTargetSchemaEngine.map(_.toUpperCase()) match { + case Some("SHEX") => + logger.debug("Schema conversion: SHACLEX -> SHEX") + Shacl2ShEx + .shacl2ShEx(shacl.schema) + .fold( + e => + IO.raiseError( + new RuntimeException( + s"Error converting SHACL -> ShEx: $e" + ) + ), + pair => { + val (schema, shapeMap) = pair + logger.debug(s"shapeMap: $shapeMap") + for { + emptyBuilder <- RDFAsJenaModel.empty + str <- emptyBuilder.use(builder => + es.weso.shex.Schema.serialize( + schema, + targetSchemaFormat.getOrElse("SHEXC"), + None, + builder + ) + ) + } yield (str, shapeMap) + } + ) + case _ => default + } + case _ => default + } + } private def info(msg: String): EitherT[IO, String, Unit] = EitherT.liftF[IO, String, Unit](IO(logger.info(msg))) @@ -498,6 +602,12 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { } object SchemaService { + + /** Service factory + * + * @param client Underlying http4s client + * @return A new Schema Service + */ def apply(client: Client[IO]): SchemaService = new SchemaService(client) } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/SchemaValue.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaValue.scala similarity index 85% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/SchemaValue.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaValue.scala index cdd17497..f2d43a81 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/SchemaValue.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaValue.scala @@ -1,4 +1,4 @@ -package es.weso.rdfshape.server.api.routes.schema +package es.weso.rdfshape.server.api.routes.schema.service import es.weso.rdfshape.server.api.format.SchemaFormat diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/TriggerModeParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/TriggerModeParam.scala similarity index 99% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/TriggerModeParam.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/TriggerModeParam.scala index e43ddfbe..56fc22d6 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/TriggerModeParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/TriggerModeParam.scala @@ -1,4 +1,4 @@ -package es.weso.rdfshape.server.api.routes.schema +package es.weso.rdfshape.server.api.routes.schema.service import cats.effect.IO import cats.implicits._ diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/ShapeMapService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/ShapeMapService.scala index ef4431e3..9bf8e9fe 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/ShapeMapService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/ShapeMapService.scala @@ -4,8 +4,8 @@ import cats.effect._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdfshape.server.api.results.ShapeMapInfoResult import es.weso.rdfshape.server.api.routes.ApiDefinitions._ -import es.weso.rdfshape.server.api.routes.ApiHelper._ import es.weso.rdfshape.server.api.routes.PartsMap +import es.weso.rdfshape.server.utils.json.JsonUtils.responseJson import es.weso.shapemaps.ShapeMap import io.circe._ import org.http4s._ @@ -22,6 +22,8 @@ class ShapeMapService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { + /** Describe the API routes handled by this service and the actions performed on each of them + */ val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { case GET -> Root / `api` / "shapeMap" / "formats" => @@ -36,7 +38,7 @@ class ShapeMapService(client: Client[IO]) ShapeMapParam.mkShapeMap(partsMap) t.attempt.flatMap( _.fold( - e => Ok(mkJsonErr(e.getMessage())), + e => responseJson(e.getMessage, BadRequest), pair => { val (sm, smp) = pair val smi: ShapeMapInfoResult = ShapeMapInfoResult.fromShapeMap( @@ -54,5 +56,11 @@ class ShapeMapService(client: Client[IO]) } object ShapeMapService { + + /** Service factory + * + * @param client Underlying http4s client + * @return A new ShapeMap Service + */ def apply(client: Client[IO]): ShapeMapService = new ShapeMapService(client) } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shex/ShExService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shex/ShExService.scala index 596033e2..289292a8 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shex/ShExService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shex/ShExService.scala @@ -11,6 +11,8 @@ import org.http4s.dsl.Http4sDsl class ShExService(client: Client[IO]) extends Http4sDsl[IO] { + /** Describe the API routes handled by this service and the actions performed on each of them + */ val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { case GET -> Root / `api` / "shEx" / "formats" => @@ -21,6 +23,12 @@ class ShExService(client: Client[IO]) extends Http4sDsl[IO] { } object ShExService { + + /** Service factory + * + * @param client Underlying http4s client + * @return A new ShEx Service + */ def apply(client: Client[IO]): ShExService = new ShExService(client) } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikibaseSchemaParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikibaseSchemaParam.scala index 07ab14f6..f9068afa 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikibaseSchemaParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikibaseSchemaParam.scala @@ -3,7 +3,7 @@ package es.weso.rdfshape.server.api.routes.wikibase import cats.effect._ import es.weso.rdf.RDFReasoner import es.weso.rdfshape.server.api.routes.PartsMap -import es.weso.rdfshape.server.api.routes.schema.SchemaParam +import es.weso.rdfshape.server.api.routes.schema.service.SchemaParam import es.weso.rdfshape.server.wikibase._ import es.weso.schema.{Schema, Schemas} import org.http4s._ diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala index 378bcbf1..9b0cd622 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala @@ -50,6 +50,8 @@ class WikidataService(client: Client[IO]) val defaultContinue = 0 val redirectClient = FollowRedirect(3)(client) + /** Describe the API routes handled by this service and the actions performed on each of them + */ def routes: HttpRoutes[IO] = HttpRoutes.of[IO] { case GET -> Root / `api` / "wikidata" / "test" => { @@ -157,7 +159,7 @@ class WikidataService(client: Client[IO]) val limit: String = maybelimit.getOrElse(defaultLimit.toString) val continue: String = maybeContinue.getOrElse(defaultContinue.toString) - val requestUrl = s"${endpoint.getOrElse("https: //www.wikidata.org")}" + val requestUrl = s"${endpoint.getOrElse("https: //www.wikidata.org")}" val uri = Uri .fromString(requestUrl) .valueOr(throw _) @@ -746,6 +748,12 @@ class WikidataService(client: Client[IO]) } object WikidataService { + + /** Service factory + * + * @param client Underlying http4s client + * @return A new Wikidata Service + */ def apply(client: Client[IO]): WikidataService = new WikidataService(client) } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtils.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtils.scala new file mode 100644 index 00000000..5429f6ce --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtils.scala @@ -0,0 +1,98 @@ +package es.weso.rdfshape.server.utils.json + +import cats.effect.IO +import io.circe.Json +import org.http4s.circe._ +import org.http4s.dsl.Http4sDsl +import org.http4s.{Response, Status} + +/** Helper utilities to extract JSON from the complex data managed by the API. + */ +object JsonUtils extends Http4sDsl[IO] { + + /** Utility map relating each http4s status to its generating type + */ + private val mapStatusCodes = Map[Status, Any]( + Status.Ok -> Ok, + Status.Created -> Created, + Status.Accepted -> Accepted, + Status.NoContent -> NoContent, + Status.Found -> Found, + Status.NotFound -> NotFound, + Status.NotAcceptable -> NotAcceptable, + Status.NotModified -> NotModified, + Status.Forbidden -> Forbidden, + Status.SeeOther -> SeeOther, + Status.BadRequest -> BadRequest, + Status.BadGateway -> BadGateway, + Status.InternalServerError -> InternalServerError + ) + + /** Converts some object to JSON, given a converter function. + * + * @param data Data to be converted to JSON + * @param name Name given to the data + * @param cnv Converter function from A to Json + * @tparam A Type of the data to be converted to JSON + * @return A list with containing a single tuple: the name given to the data and the JSON representation of "A" itself. + * The list will be empty if no data is provided for conversion. + */ + def maybeField[A]( + data: Option[A], + name: String, + cnv: A => Json + ): List[(String, Json)] = + data match { + case None => List() + case Some(v) => List((name, cnv(v))) + } + + /** Converts some object to JSON, given a converter function. + * + * @param data Data to be converted to JSON + * @param name Name given to the data + * @param cnv Converter function from A to Json + * @tparam A Type of the data to be converted to JSON + * @return A list with containing a single tuple: the name given to the data and the JSON representation of "A" itself. + * In case no data is provided, the list will contain: the name given to the data and the message given instead of the data. + */ + def eitherField[A]( + data: Either[String, A], + name: String, + cnv: A => Json + ): List[(String, Json)] = + data match { + case Left(msg) => List((name, Json.fromString(msg))) + case Right(v) => List((name, cnv(v))) + } + + /** Create a response object with a given message (will be embedded in JSON) and status code + * + * @param msg Raw message that the response will contain inside a JSON + * @param status Desired HTTP status of the response + * @return The response object, ready to be dispatched elsewhere + */ + def responseJson(msg: String, status: Status = Ok): IO[Response[IO]] = { + val responseMessage = mkJson(msg) + mapStatusCodes(status) match { + case Status.Created => Created(responseMessage) + case Status.Accepted => Accepted(responseMessage) + case Status.NoContent => NoContent() + case Status.Found => Found(responseMessage) + case Status.Forbidden => Forbidden(responseMessage) + case Status.SeeOther => SeeOther(responseMessage) + case Status.NotAcceptable => NotAcceptable(responseMessage) + case Status.NotModified => NotModified() + case Status.NotFound => NotFound(responseMessage) + case Status.BadRequest => BadRequest(responseMessage) + case Status.BadGateway => BadGateway(responseMessage) + case Status.InternalServerError => InternalServerError(responseMessage) + + case _ => Ok(responseMessage) + } + } + + private def mkJson(msg: String): Json = + Json.fromFields(List(("error", Json.fromString(msg)))) + +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtilsServer.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtilsServer.scala deleted file mode 100644 index a3e5cb62..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtilsServer.scala +++ /dev/null @@ -1,47 +0,0 @@ -package es.weso.rdfshape.server.utils.json - -import io.circe.Json - -/** Helper utilities to extract JSON from the complex data managed by the API. - */ -object JsonUtilsServer { - - /** Converts some object to JSON, given a converter function. - * - * @param data Data to be converted to JSON - * @param name Name given to the data - * @param cnv Converter function from A to Json - * @tparam A Type of the data to be converted to JSON - * @return A list with containing a single tuple: the name given to the data and the JSON representation of "A" itself. - * The list will be empty if no data is provided for conversion. - */ - def maybeField[A]( - data: Option[A], - name: String, - cnv: A => Json - ): List[(String, Json)] = - data match { - case None => List() - case Some(v) => List((name, cnv(v))) - } - - /** Converts some object to JSON, given a converter function. - * - * @param data Data to be converted to JSON - * @param name Name given to the data - * @param cnv Converter function from A to Json - * @tparam A Type of the data to be converted to JSON - * @return A list with containing a single tuple: the name given to the data and the JSON representation of "A" itself. - * In case no data is provided, the list will contain: the name given to the data and the message given instead of the data. - */ - def eitherField[A]( - data: Either[String, A], - name: String, - cnv: A => Json - ): List[(String, Json)] = - data match { - case Left(msg) => List((name, Json.fromString(msg))) - case Right(v) => List((name, cnv(v))) - } - -} From 236e7927dbed46115c4e1fe2795d9a4974e204d7 Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Sat, 21 Aug 2021 16:38:29 +0200 Subject: [PATCH 14/32] Further refactoring. --- .../es/weso/rdfshape/server/Server.scala | 14 +- .../ApiDefaults.scala} | 14 +- .../api/definitions/ApiDefinitions.scala | 10 + .../api/definitions/UmlDefinitions.scala | 14 + .../server/api/merged/DataElement.scala | 6 +- .../server/api/routes/ApiHelper.scala | 380 ------------------ .../routes/api/{ => service}/APIService.scala | 20 +- .../server/api/routes/data/DataValue.scala | 14 - .../data/logic}/DataConversionResult.scala | 4 +- .../data/{ => logic}/DataConverter.scala | 6 +- .../data/logic}/DataExtractResult.scala | 6 +- .../data/logic}/DataInfoResult.scala | 6 +- .../routes/data/logic/DataOperations.scala | 241 +++++++++++ .../routes/data/{ => service}/DataParam.scala | 4 +- .../data/{ => service}/DataService.scala | 70 ++-- .../api/routes/endpoint/EndpointParam.scala | 62 --- .../server/api/routes/endpoint/Query.scala | 6 - .../api/routes/endpoint/logic/Endpoint.scala | 75 ++++ .../endpoint/{ => logic}/Outgoing.scala | 2 +- .../{ => logic}/SparqlQueryParam.scala | 51 ++- .../{ => service}/EndpointService.scala | 42 +- .../fetch/{ => service}/FetchService.scala | 4 +- .../{ => service}/PermalinkService.scala | 4 +- .../logic}/SchemaConversionResult.scala | 2 +- .../schema/logic/SchemaOperations.scala | 160 +++++++- .../routes/schema/service/SchemaParam.scala | 14 +- .../routes/schema/service/SchemaService.scala | 35 +- .../routes/schema/service/SchemaValue.scala | 13 - .../schema/service/TriggerModeParam.scala | 5 +- .../api/routes/shapemap/ShapeMapValue.scala | 9 - .../shapemap/logic}/ShapeMapInfoResult.scala | 2 +- .../{ => service}/ShapeMapParam.scala | 2 +- .../{ => service}/ShapeMapService.scala | 6 +- .../shex/{ => service}/ShExService.scala | 4 +- .../api/routes/wikibase/WikidataService.scala | 7 +- 35 files changed, 678 insertions(+), 636 deletions(-) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{routes/ApiDefinitions.scala => definitions/ApiDefaults.scala} (89%) create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefinitions.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/UmlDefinitions.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/ApiHelper.scala rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/api/{ => service}/APIService.scala (51%) delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataValue.scala rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{results => routes/data/logic}/DataConversionResult.scala (89%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/{ => logic}/DataConverter.scala (97%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{results => routes/data/logic}/DataExtractResult.scala (96%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{results => routes/data/logic}/DataInfoResult.scala (92%) create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataOperations.scala rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/{ => service}/DataParam.scala (98%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/{ => service}/DataService.scala (82%) delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/EndpointParam.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/Query.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/Endpoint.scala rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/{ => logic}/Outgoing.scala (96%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/{ => logic}/SparqlQueryParam.scala (54%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/{ => service}/EndpointService.scala (77%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/fetch/{ => service}/FetchService.scala (89%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/{ => service}/PermalinkService.scala (98%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{results => routes/schema/logic}/SchemaConversionResult.scala (98%) delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaValue.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/ShapeMapValue.scala rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{results => routes/shapemap/logic}/ShapeMapInfoResult.scala (96%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/{ => service}/ShapeMapParam.scala (98%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/{ => service}/ShapeMapService.scala (89%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shex/{ => service}/ShExService.scala (86%) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala index 81369c80..56aef825 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala @@ -4,14 +4,14 @@ import cats.effect._ import cats.implicits._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdfshape.server.Server._ -import es.weso.rdfshape.server.api.routes.api.APIService -import es.weso.rdfshape.server.api.routes.data.DataService -import es.weso.rdfshape.server.api.routes.endpoint.EndpointService -import es.weso.rdfshape.server.api.routes.fetch.FetchService -import es.weso.rdfshape.server.api.routes.permalink.PermalinkService +import es.weso.rdfshape.server.api.routes.api.service.APIService +import es.weso.rdfshape.server.api.routes.data.service.DataService +import es.weso.rdfshape.server.api.routes.endpoint.service.EndpointService +import es.weso.rdfshape.server.api.routes.fetch.service.FetchService +import es.weso.rdfshape.server.api.routes.permalink.service.PermalinkService import es.weso.rdfshape.server.api.routes.schema.service.SchemaService -import es.weso.rdfshape.server.api.routes.shapemap.ShapeMapService -import es.weso.rdfshape.server.api.routes.shex.ShExService +import es.weso.rdfshape.server.api.routes.shapemap.service.ShapeMapService +import es.weso.rdfshape.server.api.routes.shex.service.ShExService import es.weso.rdfshape.server.api.routes.wikibase.WikidataService import es.weso.rdfshape.server.utils.error.exceptions.SSLContextCreationException import es.weso.rdfshape.server.utils.error.{ExitCodes, SysUtils} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/ApiDefinitions.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala similarity index 89% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/ApiDefinitions.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala index ee42a6cd..023f6d22 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/ApiDefinitions.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala @@ -1,23 +1,13 @@ -package es.weso.rdfshape.server.api.routes +package es.weso.rdfshape.server.api.definitions import es.weso.rdf.nodes.IRI import es.weso.rdfshape.server.api.format.{DataFormat, SchemaFormat} import es.weso.schema.{Schemas, ShapeMapTrigger} import es.weso.shapemaps.ShapeMap -/** Global definitions used in the API - */ -object ApiDefinitions { - - /** API route inside the web server - */ - val api = "api" -} - /** Application-wide defaults */ -object Defaults { - +case object ApiDefaults { val availableDataFormats: List[DataFormat] = DataFormat.availableFormats val defaultDataFormat: DataFormat = DataFormat.defaultFormat val availableSchemaFormats: List[SchemaFormat] = SchemaFormat.availableFormats diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefinitions.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefinitions.scala new file mode 100644 index 00000000..af92658f --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefinitions.scala @@ -0,0 +1,10 @@ +package es.weso.rdfshape.server.api.definitions + +/** Global definitions used in the API + */ +case object ApiDefinitions { + + /** API route inside the web server + */ + val api = "api" +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/UmlDefinitions.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/UmlDefinitions.scala new file mode 100644 index 00000000..d7871441 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/UmlDefinitions.scala @@ -0,0 +1,14 @@ +package es.weso.rdfshape.server.api.definitions + +import es.weso.uml.PlantUMLOptions + +/** UML-generation related data + */ +case object UmlDefinitions { + + /** Additional options passed down to PlantUML when generating diagrams on the fly. + */ + val umlOptions: PlantUMLOptions = PlantUMLOptions( + watermark = Some("Generated by [[https://rdfshape.weso.es rdfshape]]") + ) +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala index d5baa067..6ad1903f 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala @@ -4,8 +4,8 @@ import cats.effect._ import cats.implicits._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.jena.RDFAsJenaModel +import es.weso.rdfshape.server.api.definitions.ApiDefaults import es.weso.rdfshape.server.api.format.DataFormat -import es.weso.rdfshape.server.api.routes.Defaults import io.circe._ /** Represent each chunk of RDF data submitted (mainly on RDF-merging operations) @@ -71,7 +71,7 @@ object DataElement extends LazyLogging { dataUrl = None, endpoint = None, dataFile = None, - Defaults.defaultDataFormat, + ApiDefaults.defaultDataFormat, ActiveDataTab.default ) @@ -170,7 +170,7 @@ object DataElement extends LazyLogging { dataFormatStr <- cursor .downField("dataFormat") .as[String] - .orElse(Right(Defaults.defaultDataFormat.name)) + .orElse(Right(ApiDefaults.defaultDataFormat.name)) dataFormat <- DataFormat .fromString(dataFormatStr) .leftMap(s => diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/ApiHelper.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/ApiHelper.scala deleted file mode 100644 index 399f2e19..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/ApiHelper.scala +++ /dev/null @@ -1,380 +0,0 @@ -package es.weso.rdfshape.server.api.routes - -import cats.effect.IO -import cats.implicits._ -import com.typesafe.scalalogging.LazyLogging -import es.weso.rdf.jena.RDFAsJenaModel -import es.weso.rdf.nodes.{IRI, Lang} -import es.weso.rdf.{PrefixMap, RDFBuilder, RDFReasoner} -import es.weso.rdfshape.server.api.format.{DataFormat, SchemaFormat} -import es.weso.rdfshape.server.api.results.{DataExtractResult, DataInfoResult} -import es.weso.rdfshape.server.api.routes.Defaults.{ - defaultSchemaEngine, - defaultSchemaFormat, - defaultShapeLabel -} -import es.weso.rdfshape.server.api.routes.data.DataParam -import es.weso.rdfshape.server.api.routes.schema.service.{ - SchemaParam, - TriggerModeParam -} -import es.weso.rdfshape.server.utils.json.JsonUtils.maybeField -import es.weso.schema._ -import es.weso.schemaInfer.{InferOptions, PossiblePrefixes, SchemaInfer} -import es.weso.shapemaps.{NodeSelector, ResultShapeMap} -import es.weso.uml.{PlantUMLOptions, Schema2UML, UML} -import es.weso.utils.IOUtils._ -import io.circe.Json -import org.http4s.Uri -import org.http4s.client.{Client, JavaNetClientBuilder} - -/** Static utils used by several API components - */ -object ApiHelper extends LazyLogging { - - /** Additional options passed down to PlantUML when generating diagrams on the fly. - */ - val umlOptions = PlantUMLOptions( - watermark = Some("Generated by [[https://rdfshape.weso.es rdfshape]]") - ) - - /** Long value used as a "no time" value - */ - private val NoTime = 0L - - /** @param result Schema validation result - * @return JSON representation of the schema validation result - */ - def result2json(result: Result): IO[Json] = for { - emptyRes <- RDFAsJenaModel.empty - json <- emptyRes.use(emptyBuilder => result.toJson(emptyBuilder)) - } yield json - - /** Get base URI - * - * @return default URI obtained from current folder - */ - private[api] def getBase: Option[String] = Defaults.relativeBase.map(_.str) - - /** @param prefixMap Input prefix map - * @return JSON representation of the prefix map - */ - private[api] def prefixMap2Json(prefixMap: PrefixMap): Json = { - Json.fromFields(prefixMap.pm.map { case (prefix, iri) => - (prefix.str, Json.fromString(iri.getLexicalForm)) - }) - } - - /** @return For a given resource address, attempt to return its contents. - */ - // TODO: handle timeouts and remove unsafe code - private[api] def resolveUri(baseUri: Uri, urlStr: String): IO[String] = { - logger.info(s"Handling Uri: $urlStr") - Uri - .fromString(urlStr) - .fold( - fail => { - logger.info(s"Error parsing $urlStr") - IO.raiseError[String]( - new RuntimeException( - s"Error resolving $urlStr as URL: ${fail.message}" - ) - ) - }, - uri => { - // TODO: The following code is unsafe... - // implicit val cs: ContextShift[IO] = IO.contextShift(global) - // implicit val timer: Timer[IO] = IO.timer(global) - // val blockingPool = Executors.newFixedThreadPool(5) - // val blocker = Blocker.liftExecutorService(blockingPool) - val httpClient: Client[IO] = JavaNetClientBuilder[IO].create - val resolvedUri = baseUri.resolve(uri) - logger.info(s"Resolved: $resolvedUri") - httpClient.expect[String](resolvedUri) - } - ) - } - - private[api] def validateStr( - data: String, - optDataFormat: Option[DataFormat], - optSchema: Option[String], - optSchemaFormat: Option[SchemaFormat], - optSchemaEngine: Option[String], - tp: TriggerModeParam, - optInference: Option[String], - relativeBase: Option[IRI], - builder: RDFBuilder - ): IO[(Result, Option[ValidationTrigger], Long)] = { - val dp = DataParam.empty.copy( - data = Some(data), - dataFormatTextarea = optDataFormat, - inference = optInference - ) - val sp = SchemaParam.empty.copy( - schema = optSchema, - schemaFormatTextArea = optSchemaFormat, - schemaEngine = optSchemaEngine - ) - - val result: IO[(Result, Option[ValidationTrigger], Long)] = for { - pair <- dp.getData(relativeBase) - (maybeStr, resourceRdf) = pair - result <- resourceRdf.use(rdf => - for { - pairSchema <- sp.getSchema(Some(rdf)) - (_, eitherSchema) = pairSchema - schema <- IO.fromEither( - eitherSchema.leftMap(s => - new RuntimeException(s"Error obtaining schema: $s") - ) - ) - res <- validate(rdf, dp, schema, sp, tp, relativeBase, builder) - } yield res - ) - } yield result - - result.attempt.flatMap(_.fold(e => err(e.getMessage), IO.pure)) - } - - private[api] def validate( - rdf: RDFReasoner, - dp: DataParam, - schema: Schema, - sp: SchemaParam, - tp: TriggerModeParam, - relativeBase: Option[IRI], - builder: RDFBuilder - ): IO[(Result, Option[ValidationTrigger], Long)] = { - logger.debug(s"APIHelper: validate") - - val base = relativeBase.map(_.str) // Some(FileUtils.currentFolderURL) - val triggerMode = tp.triggerMode - for { - pm <- rdf.getPrefixMap - p <- tp.getShapeMap(pm, schema.pm) - (optShapeMapStr, eitherShapeMap) = p - pair <- - ValidationTrigger.findTrigger( - triggerMode.getOrElse(Defaults.defaultTriggerMode), - optShapeMapStr.getOrElse(""), - base, - None, - None, - pm, - schema.pm - ) match { - case Left(msg) => - err( - s"Cannot obtain trigger: $triggerMode\nshapeMap: $optShapeMapStr\nmsg: $msg" - ) - case Right(trigger) => - val run = for { - startTime <- IO { - System.nanoTime() - } - result <- schema.validate(rdf, trigger, builder) - endTime <- IO { - System.nanoTime() - } - time: Long = endTime - startTime - } yield (result, Some(trigger), time) - run.handleErrorWith(e => { - val msg = s"Error validating: ${e.getMessage}" - logger.error(msg) - err(s"Error validating: ${e.getMessage}") - }) - } - } yield pair - } - - private def err(msg: String) = - IO((Result.errStr(s"Error: $msg"), None, NoTime)) - - /* private[server] def query(data: String, optDataFormat: Option[DataFormat], - * optQuery: Option[String], optInference: Option[String] ): IO[Json] = { - * optQuery match { case None => IO(Json.Null) case Some(queryStr) => val - * dataFormat = optDataFormat.getOrElse(defaultDataFormat) val base = - * Some(IRI(FileUtils.currentFolderURL)) for { basicRdf <- - * RDFAsJenaModel.fromChars(data, dataFormat.name, base) rdf <- - * basicRdf.applyInference(optInference.getOrElse("None")) json <- - * rdf.queryAsJson(queryStr) } yield json } } */ - private[api] def dataExtract( - rdf: RDFReasoner, - optData: Option[String], - optDataFormat: Option[DataFormat], - optNodeSelector: Option[String], - optInference: Option[String], - optEngine: Option[String], - optSchemaFormat: Option[SchemaFormat], - optLabelName: Option[String], - relativeBase: Option[IRI] - ): IO[DataExtractResult] = { - val base = relativeBase.map(_.str) - val engine = optEngine.getOrElse(defaultSchemaEngine) - val schemaFormat = optSchemaFormat.getOrElse(defaultSchemaFormat) - optNodeSelector match { - case None => - IO.pure( - DataExtractResult.fromMsg("DataExtract: Node selector not specified") - ) - case Some(nodeSelector) => - val es: ESIO[(Schema, ResultShapeMap)] = for { - pm <- io2es(rdf.getPrefixMap) - selector <- either2es(NodeSelector.fromString(nodeSelector, base, pm)) - eitherResult <- { - logger.debug(s"Node selector: $selector") - - val inferOptions: InferOptions = InferOptions( - inferTypePlainNode = true, - addLabelLang = Some(Lang("en")), - possiblePrefixMap = PossiblePrefixes.wikidataPrefixMap, - maxFollowOn = 1, - followOnLs = List(), - followOnThreshold = Some(1), - sortFunction = InferOptions.orderByIRI - ) - io2es( - SchemaInfer.runInferSchema( - rdf, - selector, - engine, - optLabelName.map(IRI(_)).getOrElse(defaultShapeLabel), - inferOptions - ) - ) - } - pair <- either2es(eitherResult) - str <- io2es(pair._1.serialize("ShExC")) - _ <- io2es(IO(logger.debug(s"Extracted; $str"))) - } yield { - pair - } - for { - either <- run_es(es) - } yield either.fold( - err => DataExtractResult.fromMsg(err), - pair => { - val (schema, resultShapeMap) = pair - DataExtractResult.fromExtraction( - optData, - optDataFormat, - schemaFormat.name, - engine, - schema, - resultShapeMap - ) - } - ) - } - } - - private[api] def shapeInfer( - rdf: RDFReasoner, - optNodeSelector: Option[String], - optInference: Option[String], - optEngine: Option[String], - optSchemaFormat: Option[SchemaFormat], - optLabelName: Option[String], - relativeBase: Option[IRI], - withUml: Boolean - ): ESIO[Json] = { - val base = relativeBase.map(_.str) - val engine = optEngine.getOrElse(defaultSchemaEngine) - val schemaFormat = optSchemaFormat.getOrElse(defaultSchemaFormat) - optNodeSelector match { - case None => ok_es(Json.Null) - case Some(nodeSelector) => - for { - pm <- io2es(rdf.getPrefixMap) - selector <- either2es(NodeSelector.fromString(nodeSelector, base, pm)) - eitherResult <- io2es { - logger.debug(s"Selector: $selector") - - SchemaInfer.runInferSchema( - rdf, - selector, - engine, - optLabelName.map(IRI(_)).getOrElse(defaultShapeLabel) - ) - } - result <- either2es(eitherResult) - (schemaInfer, resultMap) = result - maybePair <- - if(withUml) - either2es(Schema2UML.schema2UML(schemaInfer).map(Some(_))) - else ok_es(None) - maybeSvg <- io2es(maybePair match { - case None => IO.pure(None) - case Some(pair) => - val (uml, warnings) = pair - uml.toSVG(umlOptions).map(Some(_)) - }) - str <- io2es(schemaInfer.serialize(schemaFormat.name)) - } yield Json.fromFields( - List( - ("inferredShape", Json.fromString(str)), - ("format", Json.fromString(schemaFormat.name)), - ("engine", Json.fromString(engine)), - ("nodeSelector", Json.fromString(nodeSelector)) - ) ++ - maybeField( - maybePair, - "uml", - (pair: (UML, List[String])) => { - val (uml, warnings) = pair - Json.fromString(uml.toPlantUML(umlOptions)) - } - ) ++ - maybeField(maybeSvg, "svg", Json.fromString) - ) - } - } - - private[api] def dataFormatOrDefault(df: Option[String]): String = - df.getOrElse(DataFormats.defaultFormatName) - - private[api] def dataInfoFromString( - data: String, - dataFormatStr: String - ): IO[Json] = { - val either: ESIO[Json] = for { - dataFormat <- either2es(DataFormat.fromString(dataFormatStr)) - json <- io2es( - RDFAsJenaModel - .fromChars(data, dataFormat.name) - .flatMap(_.use(rdf => dataInfo(rdf, Some(data), Some(dataFormat)))) - ) - } yield json - - either.fold(e => DataInfoResult.fromMsg(e).toJson, identity) - } - - /* private[server] def getSchema(sv: SchemaValue): EitherT[IO,String,Schema] = - * { val schemaEngine = sv.currentSchemaEngine val schemaFormat = - * sv.currentSchemaFormat val schemaStr = sv.schema.getOrElse("") val base = - * Some(FileUtils.currentFolderURL) Schemas.fromString(schemaStr, - * schemaFormat.name, schemaEngine, base) } */ - - private[api] def dataInfo( - rdf: RDFReasoner, - data: Option[String], - dataFormat: Option[DataFormat] - ): IO[Json] = { - val either: IO[Either[Throwable, DataInfoResult]] = (for { - numberStatements <- rdf.getNumberOfStatements() - predicates <- rdf.predicates().compile.toList - pm <- rdf.getPrefixMap - } yield DataInfoResult.fromData( - data, - dataFormat, - predicates.toSet, - numberStatements, - pm - )).attempt - either.map( - _.fold(e => DataInfoResult.fromMsg(e.getMessage).toJson, _.toJson) - ) - } - -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/api/APIService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/api/service/APIService.scala similarity index 51% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/api/APIService.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/api/service/APIService.scala index 97fd2683..e72aeacd 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/api/APIService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/api/service/APIService.scala @@ -1,15 +1,11 @@ -package es.weso.rdfshape.server.api.routes.api +package es.weso.rdfshape.server.api.routes.api.service import cats.effect._ -import es.weso.rdfshape.server.api.routes.ApiDefinitions._ -import es.weso.rdfshape.server.api.routes.{ApiService, Defaults} -import io.circe._ +import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api +import es.weso.rdfshape.server.api.routes.ApiService import org.http4s._ -import org.http4s.circe._ import org.http4s.client.Client import org.http4s.dsl.Http4sDsl -import org.http4s.server.staticcontent.resourceServiceBuilder -import org.log4s.getLogger /** API service to handle multiple general tasks (server status, etc.) * @@ -21,17 +17,9 @@ class APIService(client: Client[IO]) extends Http4sDsl[IO] with ApiService { */ val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { - case req @ GET -> Root / `api` / "health" => + case GET -> Root / `api` / "health" => Ok("OK") - } - private val relativeBase = Defaults.relativeBase - private val logger = getLogger - private val swagger = - resourceServiceBuilder[IO]("/swagger") // ResourceService.Config()) - - private def errJson(msg: String): IO[Response[IO]] = - Ok(Json.fromFields(List(("error", Json.fromString(msg))))) } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataValue.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataValue.scala deleted file mode 100644 index 3e1c29a0..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataValue.scala +++ /dev/null @@ -1,14 +0,0 @@ -package es.weso.rdfshape.server.api.routes.data - -import es.weso.rdfshape.server.api.format.DataFormat - -case class DataValue( - data: Option[String], - dataURL: Option[String], - currentDataFormat: DataFormat, - availableDataFormats: List[DataFormat], - currentInferenceEngine: String, - availableInferenceEngines: List[String], - endpoint: Option[String], - activeDataTab: String -) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataConversionResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConversionResult.scala similarity index 89% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataConversionResult.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConversionResult.scala index 9739672a..54b84042 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataConversionResult.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConversionResult.scala @@ -1,7 +1,7 @@ -package es.weso.rdfshape.server.api.results +package es.weso.rdfshape.server.api.routes.data.logic import es.weso.rdfshape.server.api.format.DataFormat -import es.weso.rdfshape.server.utils.json.JsonUtils._ +import es.weso.rdfshape.server.utils.json.JsonUtils.maybeField import io.circe.Json /** Data class representing the output of a conversion operation diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataConverter.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConverter.scala similarity index 97% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataConverter.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConverter.scala index ff0013b0..cc72058d 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataConverter.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConverter.scala @@ -1,4 +1,4 @@ -package es.weso.rdfshape.server.api.routes.data +package es.weso.rdfshape.server.api.routes.data.logic import cats.effect.IO import com.typesafe.scalalogging.LazyLogging @@ -7,7 +7,7 @@ import es.weso.rdf.jena.RDFAsJenaModel import es.weso.rdf.sgraph.{RDF2SGraph, RDFDotPreferences} import es.weso.rdfshape.server.api.format.DataFormat import es.weso.rdfshape.server.api.merged.CompoundData -import es.weso.rdfshape.server.api.results.DataConversionResult +import es.weso.rdfshape.server.api.routes.data.logic import es.weso.utils.IOUtils.{either2io, err} import guru.nidi.graphviz.engine.{Format, Graphviz} import guru.nidi.graphviz.model.MutableGraph @@ -107,7 +107,7 @@ object DataConverter extends LazyLogging { for { converted <- doConversion - } yield DataConversionResult( + } yield logic.DataConversionResult( "Conversion successful!", data, dataFormat, diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataExtractResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataExtractResult.scala similarity index 96% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataExtractResult.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataExtractResult.scala index 3791848e..b0b66f52 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataExtractResult.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataExtractResult.scala @@ -1,11 +1,11 @@ -package es.weso.rdfshape.server.api.results +package es.weso.rdfshape.server.api.routes.data.logic import cats.effect.IO -import es.weso.rdfshape.server.api.format._ -import es.weso.rdfshape.server.api.routes.Defaults.{ +import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ defaultSchemaEngine, defaultSchemaFormat } +import es.weso.rdfshape.server.api.format._ import es.weso.rdfshape.server.utils.json.JsonUtils._ import es.weso.schema.Schema import es.weso.shapemaps.ResultShapeMap diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataInfoResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataInfoResult.scala similarity index 92% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataInfoResult.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataInfoResult.scala index b43a7c64..99317308 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/DataInfoResult.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataInfoResult.scala @@ -1,9 +1,9 @@ -package es.weso.rdfshape.server.api.results +package es.weso.rdfshape.server.api.routes.data.logic import es.weso.rdf.PrefixMap import es.weso.rdf.nodes.IRI import es.weso.rdfshape.server.api.format._ -import es.weso.rdfshape.server.api.routes.ApiHelper +import es.weso.rdfshape.server.api.routes.data.logic.DataOperations.prefixMap2Json import es.weso.rdfshape.server.utils.json.JsonUtils._ import io.circe.Json @@ -42,7 +42,7 @@ case class DataInfoResult private ( (df: DataFormat) => Json.fromString(df.name) ) ++ maybeField(numberStatements, "numberStatements", Json.fromInt) ++ - maybeField(prefixMap, "prefixMap", ApiHelper.prefixMap2Json) ++ + maybeField(prefixMap, "prefixMap", prefixMap2Json) ++ maybeField( predicates, "predicates", diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataOperations.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataOperations.scala new file mode 100644 index 00000000..6a0fd5fa --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataOperations.scala @@ -0,0 +1,241 @@ +package es.weso.rdfshape.server.api.routes.data.logic + +import cats.effect.IO +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf.jena.RDFAsJenaModel +import es.weso.rdf.nodes.{IRI, Lang} +import es.weso.rdf.{PrefixMap, RDFReasoner} +import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ + defaultSchemaEngine, + defaultSchemaFormat, + defaultShapeLabel +} +import es.weso.rdfshape.server.api.definitions.UmlDefinitions.umlOptions +import es.weso.rdfshape.server.api.format.{DataFormat, SchemaFormat} +import es.weso.rdfshape.server.utils.json.JsonUtils.maybeField +import es.weso.schema.{DataFormats, Schema} +import es.weso.schemaInfer.{InferOptions, PossiblePrefixes, SchemaInfer} +import es.weso.shapemaps.{NodeSelector, ResultShapeMap} +import es.weso.uml.{Schema2UML, UML} +import es.weso.utils.IOUtils._ +import io.circe.Json + +/** Static utilities used by the {@link es.weso.rdfshape.server.api.routes.data.service.DataService} + * to operate on RDF data + */ +private[api] object DataOperations extends LazyLogging { + + /** @param df Data format + * @return The given data format or the default one in case none was provided + */ + def dataFormatOrDefault(df: Option[String]): String = + df.getOrElse(DataFormats.defaultFormatName) + + /** For a given RDF input (plain text), return information about it + * + * @param data Input data string + * @param dataFormatStr Input data format + * @return Information about the input RDF: statements, well-formed, etc. + */ + def dataInfoFromString( + data: String, + dataFormatStr: String + ): IO[DataInfoResult] = { + val either: ESIO[DataInfoResult] = for { + dataFormat <- either2es(DataFormat.fromString(dataFormatStr)) + json <- io2es( + RDFAsJenaModel + .fromChars(data, dataFormat.name) + .flatMap(_.use(rdf => dataInfo(rdf, Some(data), Some(dataFormat)))) + ) + } yield json + + either.fold(e => DataInfoResult.fromMsg(e), identity) + } + + /** For a given RDF input, return information about it + * + * @param rdf Input RDF + * @param data Input data string + * @param dataFormat Input data format + * @return Information about the input RDF: statements, well-formed, etc. + */ + def dataInfo( + rdf: RDFReasoner, + data: Option[String], + dataFormat: Option[DataFormat] + ): IO[DataInfoResult] = { + val either: IO[Either[Throwable, DataInfoResult]] = (for { + numberStatements <- rdf.getNumberOfStatements() + predicates <- rdf.predicates().compile.toList + pm <- rdf.getPrefixMap + } yield DataInfoResult.fromData( + data, + dataFormat, + predicates.toSet, + numberStatements, + pm + )).attempt + either.map( + _.fold(e => DataInfoResult.fromMsg(e.getMessage), r => r) + ) + } + + /** Extract Shex from a given RDF input + * + * @param rdf Input RDF + * @param optData Input data (optional) + * @param optDataFormat Input data format (optional) + * @param optNodeSelector Node selector (optional) + * @param optInference Conversion inference (optional) + * @param optEngine Conversion engine (optional) + * @param optSchemaFormat Target schema format (optional) + * @param optLabelName Label name (optional) + * @param relativeBase Relative base + * @return + */ + def dataExtract( + rdf: RDFReasoner, + optData: Option[String], + optDataFormat: Option[DataFormat], + optNodeSelector: Option[String], + optInference: Option[String], + optEngine: Option[String], + optSchemaFormat: Option[SchemaFormat], + optLabelName: Option[String], + relativeBase: Option[IRI] + ): IO[DataExtractResult] = { + val base = relativeBase.map(_.str) + val engine = optEngine.getOrElse(defaultSchemaEngine) + val schemaFormat = optSchemaFormat.getOrElse(defaultSchemaFormat) + optNodeSelector match { + case None => + IO.pure( + DataExtractResult.fromMsg("DataExtract: Node selector not specified") + ) + case Some(nodeSelector) => + val es: ESIO[(Schema, ResultShapeMap)] = for { + pm <- io2es(rdf.getPrefixMap) + selector <- either2es(NodeSelector.fromString(nodeSelector, base, pm)) + eitherResult <- { + logger.debug(s"Node selector: $selector") + + val inferOptions: InferOptions = InferOptions( + inferTypePlainNode = true, + addLabelLang = Some(Lang("en")), + possiblePrefixMap = PossiblePrefixes.wikidataPrefixMap, + maxFollowOn = 1, + followOnLs = List(), + followOnThreshold = Some(1), + sortFunction = InferOptions.orderByIRI + ) + io2es( + SchemaInfer.runInferSchema( + rdf, + selector, + engine, + optLabelName.map(IRI(_)).getOrElse(defaultShapeLabel), + inferOptions + ) + ) + } + pair <- either2es(eitherResult) + str <- io2es(pair._1.serialize("ShExC")) + _ <- io2es(IO(logger.debug(s"Extracted; $str"))) + } yield { + pair + } + for { + either <- run_es(es) + } yield either.fold( + err => DataExtractResult.fromMsg(err), + pair => { + val (schema, resultShapeMap) = pair + DataExtractResult.fromExtraction( + optData, + optDataFormat, + schemaFormat.name, + engine, + schema, + resultShapeMap + ) + } + ) + } + } + + // TODO: remove if unused? + def shapeInfer( + rdf: RDFReasoner, + optNodeSelector: Option[String], + optInference: Option[String], + optEngine: Option[String], + optSchemaFormat: Option[SchemaFormat], + optLabelName: Option[String], + relativeBase: Option[IRI], + withUml: Boolean + ): ESIO[Json] = { + val base = relativeBase.map(_.str) + val engine = optEngine.getOrElse(defaultSchemaEngine) + val schemaFormat = optSchemaFormat.getOrElse(defaultSchemaFormat) + optNodeSelector match { + case None => ok_es(Json.Null) + case Some(nodeSelector) => + for { + pm <- io2es(rdf.getPrefixMap) + selector <- either2es(NodeSelector.fromString(nodeSelector, base, pm)) + eitherResult <- io2es { + logger.debug(s"Selector: $selector") + + SchemaInfer.runInferSchema( + rdf, + selector, + engine, + optLabelName.map(IRI(_)).getOrElse(defaultShapeLabel) + ) + } + result <- either2es(eitherResult) + (schemaInfer, resultMap) = result + maybePair <- + if(withUml) + either2es(Schema2UML.schema2UML(schemaInfer).map(Some(_))) + else ok_es(None) + maybeSvg <- io2es(maybePair match { + case None => IO.pure(None) + case Some(pair) => + val (uml, warnings) = pair + uml.toSVG(umlOptions).map(Some(_)) + }) + str <- io2es(schemaInfer.serialize(schemaFormat.name)) + } yield Json.fromFields( + List( + ("inferredShape", Json.fromString(str)), + ("format", Json.fromString(schemaFormat.name)), + ("engine", Json.fromString(engine)), + ("nodeSelector", Json.fromString(nodeSelector)) + ) ++ + maybeField( + maybePair, + "uml", + (pair: (UML, List[String])) => { + val (uml, warnings) = pair + Json.fromString(uml.toPlantUML(umlOptions)) + } + ) ++ + maybeField(maybeSvg, "svg", Json.fromString) + ) + } + } + + /** Convert a given prefix map to JSON format for API operations + * + * @param prefixMap Input prefix map + * @return JSON representation of the prefix map + */ + private[api] def prefixMap2Json(prefixMap: PrefixMap): Json = { + Json.fromFields(prefixMap.pm.map { case (prefix, iri) => + (prefix.str, Json.fromString(iri.getLexicalForm)) + }) + } + +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataParam.scala similarity index 98% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataParam.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataParam.scala index 7c77e437..e2fda622 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataParam.scala @@ -1,4 +1,4 @@ -package es.weso.rdfshape.server.api.routes.data +package es.weso.rdfshape.server.api.routes.data.service import cats.effect._ import cats.implicits._ @@ -6,9 +6,9 @@ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.jena._ import es.weso.rdf.nodes.IRI import es.weso.rdf.{InferenceEngine, RDFReasoner} +import es.weso.rdfshape.server.api.definitions.ApiDefaults.defaultActiveDataTab import es.weso.rdfshape.server.api.format._ import es.weso.rdfshape.server.api.merged.CompoundData -import es.weso.rdfshape.server.api.routes.Defaults._ import es.weso.rdfshape.server.api.routes.PartsMap import es.weso.rdfshape.server.html2rdf.HTML2RDF import es.weso.utils.IOUtils.err diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala similarity index 82% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataService.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala index c8866329..c91a9903 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/DataService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala @@ -1,16 +1,29 @@ -package es.weso.rdfshape.server.api.routes.data +package es.weso.rdfshape.server.api.routes.data.service import cats.effect._ import cats.implicits._ import com.typesafe.scalalogging.LazyLogging +import es.weso.rdfshape.server.api.definitions.ApiDefaults +import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ + availableInferenceEngines, + defaultDataFormat, + defaultInference +} +import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.format._ -import es.weso.rdfshape.server.api.results._ -import es.weso.rdfshape.server.api.routes.ApiDefinitions._ -import es.weso.rdfshape.server.api.routes.ApiHelper._ -import es.weso.rdfshape.server.api.routes.Defaults.defaultDataFormat import es.weso.rdfshape.server.api.routes.IncomingRequestParameters._ -import es.weso.rdfshape.server.api.routes.endpoint.SparqlQueryParam -import es.weso.rdfshape.server.api.routes.{Defaults, PartsMap} +import es.weso.rdfshape.server.api.routes.PartsMap +import es.weso.rdfshape.server.api.routes.data.logic.DataOperations.{ + dataExtract, + dataFormatOrDefault, + dataInfo, + dataInfoFromString +} +import es.weso.rdfshape.server.api.routes.data.logic.{ + DataConverter, + DataExtractResult +} +import es.weso.rdfshape.server.api.routes.endpoint.logic.SparqlQueryParam import es.weso.rdfshape.server.api.utils.OptEitherF._ import es.weso.schema._ import es.weso.utils.IOUtils._ @@ -51,12 +64,12 @@ class DataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { Ok(Json.fromString(dataFormat)) case GET -> Root / `api` / "data" / "inferenceEngines" => - val inferenceEngines = Defaults.availableInferenceEngines + val inferenceEngines = availableInferenceEngines val json = Json.fromValues(inferenceEngines.map(Json.fromString)) Ok(json) case GET -> Root / `api` / "data" / "inferenceEngines" / "default" => - val defaultInferenceEngine = Defaults.defaultInference + val defaultInferenceEngine = defaultInference Ok(Json.fromString(defaultInferenceEngine)) case GET -> Root / `api` / "data" / "visualize" / "formats" => @@ -78,7 +91,8 @@ class DataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { for { data <- client.expect[String](dataUrl) result <- io2f(dataInfoFromString(data, dataFormat)) - r <- Ok(result).map( + json = result.toJson + r <- Ok(json).map( _.withContentType(`Content-Type`(MediaType.application.json)) ) } yield r @@ -89,22 +103,22 @@ class DataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { val partsMap = PartsMap(m.parts) for { dataParam <- DataParam.mkData(partsMap, relativeBase) - (resource, dp) = dataParam - dataFormat = dataFormatOrDefault(dp.dataFormat.map(_.name)) + (resourceRdf, dp) = dataParam + dataFormat = dataFormatOrDefault(dp.dataFormat.map(_.name)) response <- dp.data match { case Some(data) => for { - r <- dataInfoFromString(data, dataFormat) - ok <- Ok(r) + result <- dataInfoFromString(data, dataFormat) + json: Json = result.toJson + ok <- Ok(json) } yield ok case None => - val resp: IO[Json] = - resource.use(rdf => dataInfo(rdf, None, dp.dataFormat)) - val x: IO[Response[IO]] = for { - json <- resp + for { + d <- + resourceRdf.use(rdf => dataInfo(rdf, None, dp.dataFormat)) + json <- IO(d.toJson) ok <- Ok(json) } yield ok - x } } yield response } @@ -139,10 +153,18 @@ class DataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { optActiveDataTab, optCompoundData ) + // for { + // dataParam <- io2f(dp.getData(relativeBase)) + // (maybeStr, resourceRdf) = dataParam + /* json <- io2f(resourceRdf.use(rdf => dataInfo(rdf, maybeStr, + * optDataFormat))) */ + // ok <- Ok(json) + // } yield ok for { dataParam <- io2f(dp.getData(relativeBase)) - (maybeStr, res) = dataParam - json <- io2f(res.use(rdf => dataInfo(rdf, maybeStr, optDataFormat))) + (maybeStr, resourceRdf) = dataParam + d <- resourceRdf.use(rdf => dataInfo(rdf, maybeStr, optDataFormat)) + json <- IO(d.toJson) ok <- Ok(json) } yield ok } @@ -200,11 +222,11 @@ class DataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { for { dataParam <- DataParam.mkData(partsMap, relativeBase) (resourceRdf, dp) = dataParam - maybePair <- SparqlQueryParam.mkQuery(partsMap) + maybePair <- SparqlQueryParam.getSparqlQuery(partsMap) resp <- maybePair match { case Left(err) => errJson(s"Error obtaining Query data $err") case Right((queryStr, qp)) => - val optQueryStr = qp.query.map(_.str) + val optQueryStr = qp.queryRaw logger.debug(s"Data query optQueryStr: $optQueryStr") for { json <- io2f( @@ -270,7 +292,7 @@ class DataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { } } - private val relativeBase = Defaults.relativeBase + private val relativeBase = ApiDefaults.relativeBase private def parseInt(s: String): Either[String, Int] = Try(s.toInt).map(Right(_)).getOrElse(Left(s"$s is not a number")) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/EndpointParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/EndpointParam.scala deleted file mode 100644 index 16125186..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/EndpointParam.scala +++ /dev/null @@ -1,62 +0,0 @@ -package es.weso.rdfshape.server.api.routes.endpoint - -import cats.data.EitherT -import cats.effect._ -import es.weso.rdf.RDFReader -import es.weso.rdf.jena.Endpoint -import es.weso.rdfshape.server.api.routes.PartsMap -import io.circe.Json -// import scalaj.http._ -import es.weso.utils.IOUtils._ -import org.http4s.client.Client - -case class EndpointInfo(msg: String, status: Option[String] = None) { - def asJson: Json = Json.fromFields( - List( - ("msg", Json.fromString(msg)), - ("status", Json.fromString(status.getOrElse(""))) - ) - ) -} - -case class EndpointParam(url: String) { - - def getEndpointAsRDFReader: ESIO[RDFReader] = - io2es(Endpoint.fromString(url)) -// io2es[RDFReader,F](Endpoint.fromString(url)) - - def getInfo(client: Client[IO]): IO[EndpointInfo] = { - IO.println(s"Obtaining info of endpoint $url") *> - // Effect[F].liftIO({ - // implicit val cs: ContextShift[IO] = IO.contextShift(global) - // implicit val timer: Timer[IO] = IO.timer(global) - // val blockingPool = Executors.newFixedThreadPool(5) - // val blocker = Blocker.liftExecutorService(blockingPool) - // val httpClient: Client[IO] = JavaNetClientBuilder[IO](blocker).create - // val resolvedUri = baseUri.resolve(uri) - //logger.info(s"Resolved: $resolvedUri") - client.expect[String](url).map(EndpointInfo(_)) - // } - /* try { val response: HttpResponse[String] = Http(url).asString val - * statusLine = response.statusLine println(s"Response: $statusLine") - * EndpointInfo(msg = "OK", status = Some(statusLine)) } catch { case e : - * Throwable => EndpointInfo(msg =s"Excepton: ${e.getMessage}") } */ - // ) - } -} - -object EndpointParam { - - private[api] def mkEndpoint( - partsMap: PartsMap - ): EitherT[IO, String, EndpointParam] = for { - maybeStr <- EitherT.liftF[IO, String, Option[String]]( - partsMap.optPartValue("endpoint") - ) - ep <- maybeStr match { - case None => - EitherT.leftT[IO, EndpointParam](s"No value for param endpoint") - case Some(str) => EitherT.rightT[IO, String](EndpointParam(str)) - } - } yield ep -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/Query.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/Query.scala deleted file mode 100644 index 79fb48f6..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/Query.scala +++ /dev/null @@ -1,6 +0,0 @@ -package es.weso.rdfshape.server.api.routes.endpoint - -/** Data class representing a SPARQL query - * @param str query string - */ -case class Query(str: String) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/Endpoint.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/Endpoint.scala new file mode 100644 index 00000000..78c892e9 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/Endpoint.scala @@ -0,0 +1,75 @@ +package es.weso.rdfshape.server.api.routes.endpoint.logic + +import cats.data.EitherT +import cats.effect.IO +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf.RDFReader +import es.weso.rdf.jena.{Endpoint => EndpointJena} +import es.weso.rdfshape.server.api.routes.PartsMap +import es.weso.utils.IOUtils.{ESIO, io2es} +import io.circe.Json +import org.http4s.client.Client + +import java.net.URL +import scala.util.{Failure, Success, Try} + +/** Data class representing an endpoint + * + * @param msg Message attached to the information/returned by the endpoint + * @param status Status of the endpoint + */ +case class Endpoint(msg: String, status: Option[String] = None) { + def asJson: Json = Json.fromFields( + List( + ("msg", Json.fromString(msg)), + ("status", Json.fromString(status.getOrElse(""))) + ) + ) +} + +/** Static utilities used by the {@link es.weso.rdfshape.server.api.routes.endpoint.service.EndpointService} + * to operate on endpoints + */ +private[api] object Endpoint extends LazyLogging { + + /** Fetch information from an endpoint and return the RDF Reader to operate the information + * + * @param url Endpoint URL + * @return An RDF Reader to operate the information in the endpoint + */ + def getEndpointAsRDFReader(url: URL): ESIO[RDFReader] = + io2es(EndpointJena.fromString(url.toString)) + + /** Given an endpoint URL, fetch and return its data + * + * @param url Endpoint URL + * @param client Client used to fetch the URL + * @return An instance of EndpointInfo with the information contained in the endpoint + */ + def getEndpointInfo(url: URL, client: Client[IO]): IO[Endpoint] = { + IO.println(s"Obtaining info of endpoint $url") *> + client.expect[String](url.toString).map(Endpoint(_)) + } + + /** Given a request's parameters, try to extract an endpoint URL from them + * + * @param partsMap Request's parameter + * @return The endpoint URL or an error message + */ + def getEndpointUrl( + partsMap: PartsMap + ): EitherT[IO, String, URL] = for { + maybeStr <- EitherT.liftF[IO, String, Option[String]]( + partsMap.optPartValue("endpoint") + ) + ep <- maybeStr match { + case None => + EitherT.leftT[IO, URL](s"No value for param endpoint") + case Some(str) => + Try(new URL(str)) match { + case Success(url) => EitherT.rightT[IO, String](url) + case Failure(ex) => EitherT.leftT[IO, URL](ex.getMessage) + } + } + } yield ep +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/Outgoing.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/Outgoing.scala similarity index 96% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/Outgoing.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/Outgoing.scala index 366b4605..a1311a9a 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/Outgoing.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/Outgoing.scala @@ -1,4 +1,4 @@ -package es.weso.rdfshape.server.api.routes.endpoint +package es.weso.rdfshape.server.api.routes.endpoint.logic import es.weso.rdf.nodes.{IRI, RDFNode} import es.weso.rdf.triples.RDFTriple diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/SparqlQueryParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/SparqlQueryParam.scala similarity index 54% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/SparqlQueryParam.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/SparqlQueryParam.scala index 09a3571e..0c9d6464 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/SparqlQueryParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/SparqlQueryParam.scala @@ -1,27 +1,41 @@ -package es.weso.rdfshape.server.api.routes.endpoint +package es.weso.rdfshape.server.api.routes.endpoint.logic import cats.effect.IO -import es.weso.rdfshape.server.api.routes.Defaults._ +import es.weso.rdfshape.server.api.definitions.ApiDefaults.defaultActiveQueryTab import es.weso.rdfshape.server.api.routes.PartsMap +import java.net.URL import scala.io.Source import scala.util.Try +/** Data class representing a SPARQL query + * + * @param query Query string + */ +case class SparqlQuery(query: String) + +/** Data class representing the parameters needed for SPARQL querying + * + * @param queryRaw Query raw text (optional) + * @param queryURL Query containing URL (optional) + * @param queryFile Query containing file (optional) + * @param activeQueryTab Active tab, used to know which query source to use + */ case class SparqlQueryParam( - query: Option[Query], + queryRaw: Option[String], queryURL: Option[String], queryFile: Option[String], activeQueryTab: Option[String] ) { - def getQuery: (Option[String], Either[String, Query]) = { + def getSparqlQuery: (Option[String], Either[String, SparqlQuery]) = { activeQueryTab.getOrElse(defaultActiveQueryTab) match { case "#queryUrl" => queryURL match { case None => (None, Left(s"No value for queryURL")) case Some(queryUrl) => Try { - val url = new java.net.URL(queryUrl) + val url = new URL(queryUrl) val src = Source.fromURL(url) val str = src.mkString src.close() @@ -34,20 +48,20 @@ case class SparqlQueryParam( s"Error obtaining data from url $queryUrl: ${err.getMessage} " ) ) - case Right(str) => (Some(str), Right(Query(str))) + case Right(str) => (Some(str), Right(SparqlQuery(str))) } } case "#queryFile" => queryFile match { case None => (None, Left(s"No value for queryFile")) case Some(queryStr) => - (Some(queryStr), Right(Query(queryStr))) + (Some(queryStr), Right(SparqlQuery(queryStr))) } case "#queryTextArea" => - query match { - case None => (None, Right(Query(""))) - case Some(query) => - (Some(query.str), Right(query)) + queryRaw match { + case None => (None, Right(SparqlQuery(""))) + case Some(queryText) => + (Some(queryText), Right(SparqlQuery(queryText))) } case other => (None, Left(s"Unknown value for activeQueryTab: $other")) } @@ -57,15 +71,20 @@ case class SparqlQueryParam( object SparqlQueryParam { - private[api] def mkQuery( + /** Given a request's parameters, try to extract a SPARQL query from them + * + * @param partsMap Request's parameter + * @return The SPARQL query or an error message + */ + private[api] def getSparqlQuery( partsMap: PartsMap - ): IO[Either[String, (Query, SparqlQueryParam)]] = for { + ): IO[Either[String, (SparqlQuery, SparqlQueryParam)]] = for { qp <- mkQueryParam(partsMap) } yield { - val (maybeStr, maybeQuery) = qp.getQuery + val (maybeStr, maybeQuery) = qp.getSparqlQuery maybeQuery match { case Left(str) => Left(str) - case Right(query) => Right((query, qp.copy(query = Some(query)))) + case Right(query) => Right((query, qp.copy(queryRaw = Some(query.query)))) } } @@ -76,7 +95,7 @@ object SparqlQueryParam { queryFile <- partsMap.optPartValue("queryFile") activeQueryTab <- partsMap.optPartValue("activeQueryTab") } yield SparqlQueryParam( - queryStr.map(Query), + queryStr, queryURL, queryFile, activeQueryTab diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/EndpointService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala similarity index 77% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/EndpointService.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala index 976d1ff7..dfe5ef50 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/EndpointService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala @@ -1,19 +1,30 @@ -package es.weso.rdfshape.server.api.routes.endpoint +package es.weso.rdfshape.server.api.routes.endpoint.service import cats.data.EitherT import cats.effect._ import cats.implicits._ import com.typesafe.scalalogging.LazyLogging -import es.weso.rdf.jena.Endpoint +import es.weso.rdf.jena.{Endpoint => EndpointJena} import es.weso.rdf.nodes.IRI -import es.weso.rdfshape.server.api.routes.ApiDefinitions._ +import es.weso.rdfshape.server.api.definitions.ApiDefaults +import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.routes.IncomingRequestParameters.{ LimitParam, OptEndpointParam, OptNodeParam } -import es.weso.rdfshape.server.api.routes.endpoint.{Query => ServerQuery} -import es.weso.rdfshape.server.api.routes.{Defaults, PartsMap} +import es.weso.rdfshape.server.api.routes.PartsMap +import es.weso.rdfshape.server.api.routes.endpoint.logic.Endpoint.{ + getEndpointAsRDFReader, + getEndpointInfo, + getEndpointUrl +} +import es.weso.rdfshape.server.api.routes.endpoint.logic.{ + Endpoint, + Outgoing, + SparqlQuery, + SparqlQueryParam +} import es.weso.rdfshape.server.utils.numeric.NumericUtils import es.weso.utils.IOUtils._ import io.circe.Json @@ -31,7 +42,7 @@ class EndpointService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { - private val relativeBase = Defaults.relativeBase + private val relativeBase = ApiDefaults.relativeBase /** Describe the API routes handled by this service and the actions performed on each of them */ @@ -42,19 +53,18 @@ class EndpointService(client: Client[IO]) val partsMap = PartsMap(m.parts) val r: EitherT[IO, String, Json] = for { - ep <- EndpointParam.mkEndpoint(partsMap) - // json = Json.Null - endpoint <- ep.getEndpointAsRDFReader + endpointUrl <- getEndpointUrl(partsMap) + endpoint <- getEndpointAsRDFReader(endpointUrl) either <- EitherT .liftF[IO, String, Either[ String, - (ServerQuery, SparqlQueryParam) + (SparqlQuery, SparqlQueryParam) ]]( - SparqlQueryParam.mkQuery(partsMap) + SparqlQueryParam.getSparqlQuery(partsMap) ) pair <- EitherT.fromEither[IO](either) (_, qp) = pair - optQueryStr = qp.query.map(_.str) + optQueryStr = qp.queryRaw json <- { logger.debug( s"Query to endpoint $endpoint: ${optQueryStr.getOrElse("")}" @@ -77,8 +87,10 @@ class EndpointService(client: Client[IO]) { val partsMap = PartsMap(m.parts) val r: EitherT[IO, String, Json] = for { - ep <- EndpointParam.mkEndpoint(partsMap) - ei <- EitherT.liftF[IO, String, EndpointInfo](ep.getInfo(client)) + endpointUrl <- getEndpointUrl(partsMap) + ei <- EitherT.liftF[IO, String, Endpoint]( + getEndpointInfo(endpointUrl, client) + ) } yield ei.asJson for { either <- r.value @@ -132,7 +144,7 @@ class EndpointService(client: Client[IO]) private def outgoing(endpoint: IRI, node: IRI, limit: Int): ESIO[Outgoing] = for { - triples <- stream2es(Endpoint(endpoint).triplesWithSubject(node)) + triples <- stream2es(EndpointJena(endpoint).triplesWithSubject(node)) } yield Outgoing.fromTriples(node, endpoint, triples.toSet) } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/fetch/FetchService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/fetch/service/FetchService.scala similarity index 89% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/fetch/FetchService.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/fetch/service/FetchService.scala index c22ae63a..c48c2f9c 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/fetch/FetchService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/fetch/service/FetchService.scala @@ -1,7 +1,7 @@ -package es.weso.rdfshape.server.api.routes.fetch +package es.weso.rdfshape.server.api.routes.fetch.service import cats.effect._ -import es.weso.rdfshape.server.api.routes.ApiDefinitions._ +import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.routes.IncomingRequestParameters.UrlParam import org.http4s._ import org.http4s.client.Client diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/PermalinkService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/service/PermalinkService.scala similarity index 98% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/PermalinkService.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/service/PermalinkService.scala index 2232385f..9240ef5b 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/PermalinkService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/service/PermalinkService.scala @@ -1,8 +1,8 @@ -package es.weso.rdfshape.server.api.routes.permalink +package es.weso.rdfshape.server.api.routes.permalink.service import cats.effect._ import com.typesafe.scalalogging.LazyLogging -import es.weso.rdfshape.server.api.routes.ApiDefinitions._ +import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.routes.IncomingRequestParameters.{ UrlCodeParam, UrlParam diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/SchemaConversionResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaConversionResult.scala similarity index 98% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/results/SchemaConversionResult.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaConversionResult.scala index 13875930..d953adc4 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/SchemaConversionResult.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaConversionResult.scala @@ -1,4 +1,4 @@ -package es.weso.rdfshape.server.api.results +package es.weso.rdfshape.server.api.routes.schema.logic import es.weso.rdfshape.server.utils.json.JsonUtils.maybeField import es.weso.shapemaps.ShapeMap diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala index 2f26b6ad..dccbfe3b 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala @@ -1,16 +1,31 @@ package es.weso.rdfshape.server.api.routes.schema.logic import cats.effect.IO +import cats.syntax.either._ import com.typesafe.scalalogging.LazyLogging -import es.weso.rdfshape.server.api.routes.ApiHelper.umlOptions -import es.weso.schema.Schema +import es.weso.rdf.jena.RDFAsJenaModel +import es.weso.rdf.nodes.IRI +import es.weso.rdf.{RDFBuilder, RDFReasoner} +import es.weso.rdfshape.server.api.definitions.ApiDefaults +import es.weso.rdfshape.server.api.definitions.UmlDefinitions.umlOptions +import es.weso.rdfshape.server.api.format.{DataFormat, SchemaFormat} +import es.weso.rdfshape.server.api.routes.data.service.DataParam +import es.weso.rdfshape.server.api.routes.schema.service.{ + SchemaParam, + TriggerModeParam +} +import es.weso.schema.{Result, Schema, ValidationTrigger} import es.weso.uml.Schema2UML import io.circe.Json /** Static utilities used by the {@link es.weso.rdfshape.server.api.routes.schema.service.SchemaService} * to operate on schemas */ -private[schema] object SchemaOperations extends LazyLogging { +private[api] object SchemaOperations extends LazyLogging { + + /** Long value used as a "no time" value for errored validations + */ + private val NoTime = 0L /** Obtain the information from an schema * @@ -37,7 +52,7 @@ private[schema] object SchemaOperations extends LazyLogging { val eitherJson = for { pair <- Schema2UML.schema2UML(schema) } yield { - val (uml, warnings) = pair + val (uml, _) = pair uml.toJson } eitherJson.fold( @@ -78,7 +93,7 @@ private[schema] object SchemaOperations extends LazyLogging { eitherUML.fold( e => IO.pure((s"SVG conversion: $e", s"Error converting UML: $e")), pair => { - val (uml, warnings) = pair + val (uml, _) = pair logger.debug(s"UML converted: $uml") (for { str <- uml.toSVG(umlOptions) @@ -96,4 +111,139 @@ private[schema] object SchemaOperations extends LazyLogging { ) } + /** @param result Schema validation result + * @return JSON representation of the schema validation result + */ + def schemaResult2json(result: Result): IO[Json] = for { + emptyRes <- RDFAsJenaModel.empty + json <- emptyRes.use(emptyBuilder => result.toJson(emptyBuilder)) + } yield json + + /** Get base URI + * + * @return default URI obtained from current folder + */ + def getBase: Option[String] = ApiDefaults.relativeBase.map(_.str) + + /** For a given data (raw text) and schema, attempt to validate it with WESO libraries + * + * @param data Input RDF data + * @param optDataFormat RDF data format (optional) + * @param optSchema Input validation schema (optional) + * @param optSchemaFormat Validation schema format (optional) + * @param optSchemaEngine Validation schema engine (optional) + * @param tp Trigger mode + * @param optInference Validation inference (optional) + * @param relativeBase Relative base (optional) + * @param builder RDF builder + * @return + */ + private[api] def schemaValidateStr( + data: String, + optDataFormat: Option[DataFormat], + optSchema: Option[String], + optSchemaFormat: Option[SchemaFormat], + optSchemaEngine: Option[String], + tp: TriggerModeParam, + optInference: Option[String], + relativeBase: Option[IRI], + builder: RDFBuilder + ): IO[(Result, Option[ValidationTrigger], Long)] = { + val dp = DataParam.empty.copy( + data = Some(data), + dataFormatTextarea = optDataFormat, + inference = optInference + ) + val sp = SchemaParam.empty.copy( + schema = optSchema, + schemaFormatTextArea = optSchemaFormat, + schemaEngine = optSchemaEngine + ) + + val result: IO[(Result, Option[ValidationTrigger], Long)] = for { + pair <- dp.getData(relativeBase) + (_, resourceRdf) = pair + result <- resourceRdf.use(rdf => + for { + pairSchema <- sp.getSchema(Some(rdf)) + (_, eitherSchema) = pairSchema + schema <- IO.fromEither( + eitherSchema.leftMap(s => + new RuntimeException(s"Error obtaining schema: $s") + ) + ) + res <- schemaValidate(rdf, schema, tp, relativeBase, builder) + } yield res + ) + } yield result + + result.attempt.flatMap(_.fold(e => schemaErr(e.getMessage), IO.pure)) + } + + /** For a given data and schema, attempt to validate it with WESO libraries + * + * @param rdf Input RDF data + * @param schema Input schema + * @param tp Trigger mode + * @param relativeBase Relative base (optional) + * @param builder RDF builder + * @return + */ + def schemaValidate( + rdf: RDFReasoner, + schema: Schema, + tp: TriggerModeParam, + relativeBase: Option[IRI], + builder: RDFBuilder + ): IO[(Result, Option[ValidationTrigger], Long)] = { + logger.debug(s"APIHelper: validate") + + val base = relativeBase.map(_.str) // Some(FileUtils.currentFolderURL) + val triggerMode = tp.triggerMode + for { + pm <- rdf.getPrefixMap + p <- tp.getShapeMap(pm, schema.pm) + (optShapeMapStr, _) = p + pair <- + ValidationTrigger.findTrigger( + triggerMode.getOrElse(ApiDefaults.defaultTriggerMode), + optShapeMapStr.getOrElse(""), + base, + None, + None, + pm, + schema.pm + ) match { + case Left(msg) => + schemaErr( + s"Cannot obtain trigger: $triggerMode\nshapeMap: $optShapeMapStr\nmsg: $msg" + ) + case Right(trigger) => + val run = for { + startTime <- IO { + System.nanoTime() + } + result <- schema.validate(rdf, trigger, builder) + endTime <- IO { + System.nanoTime() + } + time: Long = endTime - startTime + } yield (result, Some(trigger), time) + run.handleErrorWith(e => { + val msg = s"Error validating: ${e.getMessage}" + logger.error(msg) + schemaErr(s"Error validating: ${e.getMessage}") + }) + } + } yield pair + } + + /** Given an error message, return an empty schema validation result containing it + * + * @param msg error message + * @return Empty schema validation result containing the error message + */ + private def schemaErr(msg: String) = + IO((Result.errStr(s"Error: $msg"), None, NoTime)) + } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaParam.scala index d59506a5..d676017c 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaParam.scala @@ -4,9 +4,13 @@ import cats.effect._ import cats.implicits._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.RDFReasoner +import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ + defaultActiveSchemaTab, + defaultSchemaEngine +} import es.weso.rdfshape.server.api.format._ -import es.weso.rdfshape.server.api.routes.Defaults._ -import es.weso.rdfshape.server.api.routes.{ApiHelper, PartsMap} +import es.weso.rdfshape.server.api.routes.PartsMap +import es.weso.rdfshape.server.api.routes.schema.logic.SchemaOperations.getBase import es.weso.schema.{Schema, Schemas} import scala.io.Source @@ -93,7 +97,7 @@ case class SchemaParam( str, schemaFormat.getOrElse(SchemaFormat.defaultFormat).name, schemaEngine.getOrElse(defaultSchemaEngine), - ApiHelper.getBase + getBase ) // .leftMap(s => s"Error parsing contents of $schemaUrl: $s\nContents:\n$str") _ <- IO { logger.debug("Schema parsed") } } yield (str, schema) @@ -121,7 +125,7 @@ case class SchemaParam( schemaStr, schemaFormatStr, schemaEngineStr, - ApiHelper.getBase + getBase ) .attempt .map( @@ -140,7 +144,7 @@ case class SchemaParam( schemaStr, schemaFormat.getOrElse(SchemaFormat.defaultFormat).name, schemaEngine.getOrElse(defaultSchemaEngine), - ApiHelper.getBase + getBase ) .attempt .map( diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala index 02f00691..6db5d7ae 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala @@ -6,24 +6,23 @@ import cats.implicits._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.jena.RDFAsJenaModel import es.weso.rdf.{InferenceEngine, RDFReasoner} +import es.weso.rdfshape.server.api.definitions.ApiDefaults +import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ + defaultSchemaEngine, + defaultSchemaFormat +} +import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.format._ -import es.weso.rdfshape.server.api.results._ -import es.weso.rdfshape.server.api.routes.ApiDefinitions._ -import es.weso.rdfshape.server.api.routes.ApiHelper._ -import es.weso.rdfshape.server.api.routes.Defaults._ import es.weso.rdfshape.server.api.routes.IncomingRequestParameters._ -import es.weso.rdfshape.server.api.routes.data.DataParam -import es.weso.rdfshape.server.api.routes.schema.logic.SchemaOperations.{ - schema2SVG, - schemaCytoscape, - schemaInfo, - schemaVisualize -} +import es.weso.rdfshape.server.api.routes.PartsMap +import es.weso.rdfshape.server.api.routes.data.logic.DataOperations.prefixMap2Json +import es.weso.rdfshape.server.api.routes.data.service.DataParam +import es.weso.rdfshape.server.api.routes.schema.logic.SchemaOperations._ import es.weso.rdfshape.server.api.routes.schema.logic.{ + SchemaConversionResult, SchemaInfo, SchemaInfoResult } -import es.weso.rdfshape.server.api.routes.{Defaults, PartsMap} import es.weso.rdfshape.server.api.utils.OptEitherF._ import es.weso.rdfshape.server.utils.json.JsonUtils.responseJson import es.weso.schema._ @@ -427,17 +426,15 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { new RuntimeException(s"Error obtaining schema: $s") ) ) - res <- validate( + res <- schemaValidate( rdf, - dp, schema, - sp, tp, relativeBase, builder ) (result, maybeTrigger, time) = res - json <- result2json(res._1) + json <- schemaResult2json(res._1) } yield json } } yield vv) @@ -464,9 +461,9 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { tp <- TriggerModeParam.mkTriggerModeParam(partsMap) newRdf <- applyInference(rdf, dp.inference) r <- io2f( - validate(newRdf, dp, schema, sp, tp, relativeBase, builder) + schemaValidate(newRdf, schema, tp, relativeBase, builder) ) - json <- io2f(result2json(r._1)) + json <- io2f(schemaResult2json(r._1)) } yield json } } yield vv @@ -479,7 +476,7 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { } } } - private val relativeBase = Defaults.relativeBase + private val relativeBase = ApiDefaults.relativeBase /** Given an input schema, convert it to another output schema with the parameters specified. * diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaValue.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaValue.scala deleted file mode 100644 index f2d43a81..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaValue.scala +++ /dev/null @@ -1,13 +0,0 @@ -package es.weso.rdfshape.server.api.routes.schema.service - -import es.weso.rdfshape.server.api.format.SchemaFormat - -case class SchemaValue( - schema: Option[String], - schemaURL: Option[String], - currentSchemaFormat: SchemaFormat, - availableSchemaFormats: List[SchemaFormat], - currentSchemaEngine: String, - availableSchemaEngines: List[String], - activeSchemaTab: String -) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/TriggerModeParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/TriggerModeParam.scala index 56fc22d6..873a73c4 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/TriggerModeParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/TriggerModeParam.scala @@ -4,7 +4,10 @@ import cats.effect.IO import cats.implicits._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.PrefixMap -import es.weso.rdfshape.server.api.routes.Defaults._ +import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ + defaultActiveShapeMapTab, + defaultShapeMapFormat +} import es.weso.rdfshape.server.api.routes.PartsMap import es.weso.shapemaps.ShapeMap diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/ShapeMapValue.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/ShapeMapValue.scala deleted file mode 100644 index e5414e1d..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/ShapeMapValue.scala +++ /dev/null @@ -1,9 +0,0 @@ -package es.weso.rdfshape.server.api.routes.shapemap - -case class ShapeMapValue( - shapeMap: Option[String], - shapeMapURL: Option[String], - currentShapeMapFormat: String, - availableShapeMapFormats: List[String], - activeShapeMapTab: String -) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/ShapeMapInfoResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMapInfoResult.scala similarity index 96% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/results/ShapeMapInfoResult.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMapInfoResult.scala index dacd3d86..e8ccb533 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/results/ShapeMapInfoResult.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMapInfoResult.scala @@ -1,4 +1,4 @@ -package es.weso.rdfshape.server.api.results +package es.weso.rdfshape.server.api.routes.shapemap.logic import es.weso.rdfshape.server.utils.json.JsonUtils._ import es.weso.shapemaps._ diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/ShapeMapParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapParam.scala similarity index 98% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/ShapeMapParam.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapParam.scala index 46c5116e..c0d5b62c 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/ShapeMapParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapParam.scala @@ -1,4 +1,4 @@ -package es.weso.rdfshape.server.api.routes.shapemap +package es.weso.rdfshape.server.api.routes.shapemap.service import cats.effect.IO import cats.implicits._ diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/ShapeMapService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala similarity index 89% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/ShapeMapService.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala index 9bf8e9fe..0dd51d68 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/ShapeMapService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala @@ -1,10 +1,10 @@ -package es.weso.rdfshape.server.api.routes.shapemap +package es.weso.rdfshape.server.api.routes.shapemap.service import cats.effect._ import com.typesafe.scalalogging.LazyLogging -import es.weso.rdfshape.server.api.results.ShapeMapInfoResult -import es.weso.rdfshape.server.api.routes.ApiDefinitions._ +import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.routes.PartsMap +import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMapInfoResult import es.weso.rdfshape.server.utils.json.JsonUtils.responseJson import es.weso.shapemaps.ShapeMap import io.circe._ diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shex/ShExService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shex/service/ShExService.scala similarity index 86% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shex/ShExService.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shex/service/ShExService.scala index 289292a8..3a6d0212 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shex/ShExService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shex/service/ShExService.scala @@ -1,7 +1,7 @@ -package es.weso.rdfshape.server.api.routes.shex +package es.weso.rdfshape.server.api.routes.shex.service import cats.effect._ -import es.weso.rdfshape.server.api.routes.ApiDefinitions._ +import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.schema._ import io.circe._ import org.http4s._ diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala index 9b0cd622..6cb991d0 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala @@ -10,8 +10,9 @@ import es.weso.rdf.RDFReader import es.weso.rdf.jena.RDFAsJenaModel import es.weso.rdf.nodes.IRI import es.weso.rdf.sgraph._ -import es.weso.rdfshape.server.api.routes.ApiDefinitions._ -import es.weso.rdfshape.server.api.routes.ApiHelper._ +import es.weso.rdfshape.server.api.routes.schema.logic.SchemaOperations.schemaResult2json +import es.weso.rdfshape.server.api.definitions._ +import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.routes.IncomingRequestParameters._ import es.weso.rdfshape.server.api.routes.PartsMap import es.weso.rdfshape.server.api.utils.Http4sUtils._ @@ -412,7 +413,7 @@ class WikidataService(client: Client[IO]) vv <- (res1, res2).tupled.use { case (rdf, builder) => for { r <- schema.validate(rdf, triggerMode, builder) - json <- result2json(r) + json <- schemaResult2json(r) } yield json } } yield vv From 354a9fa73977d727bfecbd53cd30f3b044af38da Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Sat, 21 Aug 2021 18:59:49 +0200 Subject: [PATCH 15/32] Further refactoring. --- .../server/api/definitions/ApiDefaults.scala | 1 - .../api/routes/api/service/APIService.scala | 2 + ...taConverter.scala => DataConversion.scala} | 43 +++- .../data/logic/DataConversionResult.scala | 35 --- .../api/routes/data/logic/DataExtract.scala | 185 +++++++++++++++ .../routes/data/logic/DataExtractResult.scala | 94 -------- .../{DataInfoResult.scala => DataInfo.scala} | 80 ++++++- .../routes/data/logic/DataOperations.scala | 215 +----------------- .../data/{service => logic}/DataParam.scala | 10 +- .../api/routes/data/service/DataService.scala | 98 ++++---- .../routes/endpoint/logic/SparqlQuery.scala | 121 ++++++++++ .../endpoint/logic/SparqlQueryParam.scala | 104 --------- .../endpoint/service/EndpointService.scala | 44 ++-- .../routes/fetch/service/FetchService.scala | 8 +- .../permalink/service/PermalinkService.scala | 8 +- .../schema/logic/SchemaOperations.scala | 2 +- .../routes/schema/service/SchemaService.scala | 39 ++-- .../shapemap/service/ShapeMapService.scala | 9 +- .../api/routes/shex/service/ShExService.scala | 11 +- .../api/routes/wikibase/WikidataService.scala | 27 ++- 20 files changed, 556 insertions(+), 580 deletions(-) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/{DataConverter.scala => DataConversion.scala} (81%) delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConversionResult.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataExtract.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataExtractResult.scala rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/{DataInfoResult.scala => DataInfo.scala} (56%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/{service => logic}/DataParam.scala (96%) create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/SparqlQuery.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/SparqlQueryParam.scala diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala index 023f6d22..8dca6531 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala @@ -25,7 +25,6 @@ case object ApiDefaults { val defaultInference: String = availableInferenceEngines.head val defaultActiveDataTab = "#dataTextArea" val defaultActiveSchemaTab = "#schemaTextArea" - val defaultActiveQueryTab = "#queryTextArea" val defaultShapeMapFormat: String = ShapeMap.defaultFormat val availableShapeMapFormats: List[String] = ShapeMap.formats val defaultActiveShapeMapTab = "#shapeMapTextArea" diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/api/service/APIService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/api/service/APIService.scala index e72aeacd..2ca08c5d 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/api/service/APIService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/api/service/APIService.scala @@ -13,6 +13,8 @@ import org.http4s.dsl.Http4sDsl */ class APIService(client: Client[IO]) extends Http4sDsl[IO] with ApiService { + override val verb: String = "" + /** Describe the API routes handled by this service and the actions performed on each of them */ val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConverter.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConversion.scala similarity index 81% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConverter.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConversion.scala index cc72058d..918caea9 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConverter.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConversion.scala @@ -8,10 +8,12 @@ import es.weso.rdf.sgraph.{RDF2SGraph, RDFDotPreferences} import es.weso.rdfshape.server.api.format.DataFormat import es.weso.rdfshape.server.api.merged.CompoundData import es.weso.rdfshape.server.api.routes.data.logic +import es.weso.rdfshape.server.utils.json.JsonUtils.maybeField import es.weso.utils.IOUtils.{either2io, err} import guru.nidi.graphviz.engine.{Format, Graphviz} import guru.nidi.graphviz.model.MutableGraph import guru.nidi.graphviz.parse.Parser +import io.circe.Json import java.io.ByteArrayOutputStream import java.util.Base64 @@ -19,7 +21,40 @@ import javax.imageio.ImageIO import scala.collection.immutable import scala.util.Try -object DataConverter extends LazyLogging { +/** Data class representing the output of a conversion operation + * + * @param msg Output informational message after conversion + * @param data Data to be converted + * @param dataFormat Initial data format + * @param targetFormat Target data format + * @param result Data after conversion + */ +final case class DataConversion( + msg: String, + data: Option[String], + dataFormat: DataFormat, + targetFormat: String, + result: String +) { + + /** Convert a conversion result to its JSON representation + * + * @return JSON representation of the conversion result + */ + def toJson: Json = Json.fromFields( + List( + ("msg", Json.fromString(msg)), + ("result", Json.fromString(result)), + ("dataFormat", Json.fromString(dataFormat.name)), + ("targetDataFormat", Json.fromString(targetFormat)) + ) ++ + maybeField(data, "data", Json.fromString) + ) +} + +/** Static utilities for data conversion + */ +private[api] object DataConversion extends LazyLogging { lazy val availableGraphFormatNames: immutable.Seq[String] = availableGraphFormats.map(_.name) @@ -36,7 +71,7 @@ object DataConverter extends LazyLogging { dataFormat: DataFormat, maybeCompoundData: Option[String], targetFormat: String - ): IO[DataConversionResult] = { + ): IO[DataConversion] = { logger.debug( s"Converting $maybeData with format $dataFormat to $targetFormat. OptTargetFormat: $targetFormat" ) @@ -72,7 +107,7 @@ object DataConverter extends LazyLogging { data: Option[String], dataFormat: DataFormat, targetFormat: String - ): IO[DataConversionResult] = { + ): IO[DataConversion] = { val doConversion: IO[String] = { logger.info(s"Conversion target format: $targetFormat") targetFormat.toUpperCase match { @@ -107,7 +142,7 @@ object DataConverter extends LazyLogging { for { converted <- doConversion - } yield logic.DataConversionResult( + } yield logic.DataConversion( "Conversion successful!", data, dataFormat, diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConversionResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConversionResult.scala deleted file mode 100644 index 54b84042..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConversionResult.scala +++ /dev/null @@ -1,35 +0,0 @@ -package es.weso.rdfshape.server.api.routes.data.logic - -import es.weso.rdfshape.server.api.format.DataFormat -import es.weso.rdfshape.server.utils.json.JsonUtils.maybeField -import io.circe.Json - -/** Data class representing the output of a conversion operation - * - * @param msg Output informational message after conversion - * @param data Data to be converted - * @param dataFormat Initial data format - * @param targetFormat Target data format - * @param result Data after conversion - */ -case class DataConversionResult( - msg: String, - data: Option[String], - dataFormat: DataFormat, - targetFormat: String, - result: String -) { - - /** Convert a conversion result to its JSON representation - * @return JSON representation of the conversion result - */ - def toJson: Json = Json.fromFields( - List( - ("msg", Json.fromString(msg)), - ("result", Json.fromString(result)), - ("dataFormat", Json.fromString(dataFormat.name)), - ("targetDataFormat", Json.fromString(targetFormat)) - ) ++ - maybeField(data, "data", Json.fromString) - ) -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataExtract.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataExtract.scala new file mode 100644 index 00000000..97bbd691 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataExtract.scala @@ -0,0 +1,185 @@ +package es.weso.rdfshape.server.api.routes.data.logic + +import cats.effect.IO +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf.RDFReasoner +import es.weso.rdf.nodes.{IRI, Lang} +import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ + defaultSchemaEngine, + defaultSchemaFormat, + defaultShapeLabel +} +import es.weso.rdfshape.server.api.format._ +import es.weso.rdfshape.server.utils.json.JsonUtils._ +import es.weso.schema.Schema +import es.weso.schemaInfer.{InferOptions, PossiblePrefixes, SchemaInfer} +import es.weso.shapemaps.{NodeSelector, ResultShapeMap} +import es.weso.utils.IOUtils.{ESIO, either2es, io2es, run_es} +import io.circe.Json + +/** Data class representing the output of an extraction operation (input RDF data => output schema) + * + * @param msg Output informational message after conversion. Used in case of error. + * @param optData RDF input data from which ShEx may be extracted + * @param optDataFormat RDF input data format + * @param optSchemaFormat Target schema format + * @param optSchemaEngine Target schema engine + * @param optSchema Resulting schema + * @param optResultShapeMap Resulting shapemap + */ +final case class DataExtract private ( + msg: String, + optData: Option[String], + optDataFormat: Option[DataFormat], + optSchemaFormat: Option[String], + optSchemaEngine: Option[String], + optSchema: Option[Schema], + optResultShapeMap: Option[ResultShapeMap] +) { + + /** Convert an extraction result to its JSON representation + * + * @return JSON representation of the extraction result + */ + def toJson: IO[Json] = optSchema match { + case None => IO(Json.fromFields(List(("msg", Json.fromString(msg))))) + case Some(schema) => + val engine = optSchemaEngine.getOrElse(defaultSchemaEngine) + val schemaFormat = optSchemaFormat.getOrElse(defaultSchemaFormat.name) + for { + schemaStr <- schema.serialize(schemaFormat) + } yield Json.fromFields( + List( + ("msg", Json.fromString(msg)), + ("inferredShape", Json.fromString(schemaStr)), + ("schemaFormat", Json.fromString(schemaFormat)), + ("schemaEngine", Json.fromString(engine)) + ) ++ + maybeField(optData, "data", Json.fromString) ++ + maybeField( + optDataFormat, + "dataFormat", + (df: DataFormat) => Json.fromString(df.name) + ) ++ + maybeField( + optResultShapeMap, + "resultShapeMap", + (r: ResultShapeMap) => Json.fromString(r.toString) + ) + ) + } +} + +/** Static utilities to extract schemas from RDF data + */ +object DataExtract extends LazyLogging { + + /** Extract Shex from a given RDF input + * + * @param rdf Input RDF + * @param optData Input data (optional) + * @param optDataFormat Input data format (optional) + * @param optNodeSelector Node selector (optional) + * @param optInference Conversion inference (optional) + * @param optEngine Conversion engine (optional) + * @param optSchemaFormat Target schema format (optional) + * @param optLabelName Label name (optional) + * @param relativeBase Relative base + * @return + */ + def dataExtract( + rdf: RDFReasoner, + optData: Option[String], + optDataFormat: Option[DataFormat], + optNodeSelector: Option[String], + optInference: Option[String], + optEngine: Option[String], + optSchemaFormat: Option[SchemaFormat], + optLabelName: Option[String], + relativeBase: Option[IRI] + ): IO[DataExtract] = { + val base = relativeBase.map(_.str) + val engine = optEngine.getOrElse(defaultSchemaEngine) + val schemaFormat = optSchemaFormat.getOrElse(defaultSchemaFormat) + optNodeSelector match { + case None => + IO.pure( + DataExtract.fromMsg("DataExtract: Node selector not specified") + ) + case Some(nodeSelector) => + val es: ESIO[(Schema, ResultShapeMap)] = for { + pm <- io2es(rdf.getPrefixMap) + selector <- either2es(NodeSelector.fromString(nodeSelector, base, pm)) + eitherResult <- { + logger.debug(s"Node selector: $selector") + + val inferOptions: InferOptions = InferOptions( + inferTypePlainNode = true, + addLabelLang = Some(Lang("en")), + possiblePrefixMap = PossiblePrefixes.wikidataPrefixMap, + maxFollowOn = 1, + followOnLs = List(), + followOnThreshold = Some(1), + sortFunction = InferOptions.orderByIRI + ) + io2es( + SchemaInfer.runInferSchema( + rdf, + selector, + engine, + optLabelName.map(IRI(_)).getOrElse(defaultShapeLabel), + inferOptions + ) + ) + } + pair <- either2es(eitherResult) + str <- io2es(pair._1.serialize("ShExC")) + _ <- io2es(IO(logger.debug(s"Extracted; $str"))) + } yield { + pair + } + for { + either <- run_es(es) + } yield either.fold( + err => DataExtract.fromMsg(err), + pair => { + val (schema, resultShapeMap) = pair + DataExtract.fromExtraction( + optData, + optDataFormat, + schemaFormat.name, + engine, + schema, + resultShapeMap + ) + } + ) + } + } + + /** @param msg Error message contained in the result + * @return A DataExtractResult consisting of a single error message and no data + */ + def fromMsg(msg: String): DataExtract = + DataExtract(msg, None, None, None, None, None, None) + + /** @return A DataExtractResult, given all the parameters needed to build it (input, formats and results) + */ + def fromExtraction( + optData: Option[String], + optDataFormat: Option[DataFormat], + schemaFormat: String, + schemaEngine: String, + schema: Schema, + resultShapeMap: ResultShapeMap + ): DataExtract = + DataExtract( + "Shape extracted", + optData, + optDataFormat, + optSchemaFormat = Some(schemaFormat), + optSchemaEngine = Some(schemaEngine), + optSchema = Some(schema), + optResultShapeMap = Some(resultShapeMap) + ) +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataExtractResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataExtractResult.scala deleted file mode 100644 index b0b66f52..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataExtractResult.scala +++ /dev/null @@ -1,94 +0,0 @@ -package es.weso.rdfshape.server.api.routes.data.logic - -import cats.effect.IO -import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ - defaultSchemaEngine, - defaultSchemaFormat -} -import es.weso.rdfshape.server.api.format._ -import es.weso.rdfshape.server.utils.json.JsonUtils._ -import es.weso.schema.Schema -import es.weso.shapemaps.ResultShapeMap -import io.circe.Json - -/** Data class representing the output of an extraction operation (input RDF data => output schema) - * - * @param msg Output informational message after conversion. Used in case of error. - * @param optData RDF input data from which ShEx may be extracted - * @param optDataFormat RDF input data format - * @param optSchemaFormat Target schema format - * @param optSchemaEngine Target schema engine - * @param optSchema Resulting schema - * @param optResultShapeMap Resulting shapemap - */ -case class DataExtractResult private ( - msg: String, - optData: Option[String], - optDataFormat: Option[DataFormat], - optSchemaFormat: Option[String], - optSchemaEngine: Option[String], - optSchema: Option[Schema], - optResultShapeMap: Option[ResultShapeMap] -) { - - /** Convert an extraction result to its JSON representation - * - * @return JSON representation of the extraction result - */ - def toJson: IO[Json] = optSchema match { - case None => IO(Json.fromFields(List(("msg", Json.fromString(msg))))) - case Some(schema) => - val engine = optSchemaEngine.getOrElse(defaultSchemaEngine) - val schemaFormat = optSchemaFormat.getOrElse(defaultSchemaFormat.name) - for { - schemaStr <- schema.serialize(schemaFormat) - } yield Json.fromFields( - List( - ("msg", Json.fromString(msg)), - ("inferredShape", Json.fromString(schemaStr)), - ("schemaFormat", Json.fromString(schemaFormat)), - ("schemaEngine", Json.fromString(engine)) - ) ++ - maybeField(optData, "data", Json.fromString) ++ - maybeField( - optDataFormat, - "dataFormat", - (df: DataFormat) => Json.fromString(df.name) - ) ++ - maybeField( - optResultShapeMap, - "resultShapeMap", - (r: ResultShapeMap) => Json.fromString(r.toString) - ) - ) - } -} - -object DataExtractResult { - - /** @param msg Error message contained in the result - * @return A DataExtractResult consisting of a single error message and no data - */ - def fromMsg(msg: String): DataExtractResult = - DataExtractResult(msg, None, None, None, None, None, None) - - /** @return A DataExtractResult, given all the parameters needed to build it (input, formats and results) - */ - def fromExtraction( - optData: Option[String], - optDataFormat: Option[DataFormat], - schemaFormat: String, - schemaEngine: String, - schema: Schema, - resultShapeMap: ResultShapeMap - ): DataExtractResult = - DataExtractResult( - "Shape extracted", - optData, - optDataFormat, - optSchemaFormat = Some(schemaFormat), - optSchemaEngine = Some(schemaEngine), - optSchema = Some(schema), - optResultShapeMap = Some(resultShapeMap) - ) -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataInfoResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataInfo.scala similarity index 56% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataInfoResult.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataInfo.scala index 99317308..c1c510fb 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataInfoResult.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataInfo.scala @@ -1,10 +1,13 @@ package es.weso.rdfshape.server.api.routes.data.logic -import es.weso.rdf.PrefixMap +import cats.effect.IO +import es.weso.rdf.jena.RDFAsJenaModel import es.weso.rdf.nodes.IRI +import es.weso.rdf.{PrefixMap, RDFReasoner} import es.weso.rdfshape.server.api.format._ import es.weso.rdfshape.server.api.routes.data.logic.DataOperations.prefixMap2Json import es.weso.rdfshape.server.utils.json.JsonUtils._ +import es.weso.utils.IOUtils.{ESIO, either2es, io2es} import io.circe.Json /** Data class representing the output of an "information" operation @@ -16,7 +19,7 @@ import io.circe.Json * @param numberStatements Number of statements in the RDF input * @param prefixMap Prefix map of the RDF input */ -case class DataInfoResult private ( +final case class DataInfo private ( msg: String, data: Option[String], dataFormat: Option[DataFormat], @@ -30,6 +33,7 @@ case class DataInfoResult private ( lazy val pm: PrefixMap = prefixMap.getOrElse(PrefixMap.empty) /** Convert an information result to its JSON representation + * * @return JSON information of the extraction result */ def toJson: Json = { @@ -60,18 +64,14 @@ case class DataInfoResult private ( } -object DataInfoResult { +/** Static utilities to obtain information about RDF data + */ +object DataInfo { /** Message attached to the result when created successfully */ val successMessage = "Well formed RDF" - /** @param msg Error message contained in the result - * @return A DataInfoResult consisting of a single error message and no data - */ - def fromMsg(msg: String): DataInfoResult = - DataInfoResult(msg, None, None, None, None, None) - /** @return A DataInfoResult, given all the parameters needed to build it (input, predicates, etc.) */ def fromData( @@ -80,8 +80,8 @@ object DataInfoResult { predicates: Set[IRI], numberStatements: Int, prefixMap: PrefixMap - ): DataInfoResult = - DataInfoResult( + ): DataInfo = + DataInfo( successMessage, data, dataFormat, @@ -89,4 +89,62 @@ object DataInfoResult { Some(numberStatements), Some(prefixMap) ) + + /** For a given RDF input, return information about it + * + * @param rdf Input RDF + * @param data Input data string + * @param dataFormat Input data format + * @return Information about the input RDF: statements, well-formed, etc. + */ + def dataInfoFromRdf( + rdf: RDFReasoner, + data: Option[String], + dataFormat: Option[DataFormat] + ): IO[DataInfo] = { + val either: IO[Either[Throwable, DataInfo]] = (for { + numberStatements <- rdf.getNumberOfStatements() + predicates <- rdf.predicates().compile.toList + pm <- rdf.getPrefixMap + } yield DataInfo.fromData( + data, + dataFormat, + predicates.toSet, + numberStatements, + pm + )).attempt + either.map( + _.fold(e => DataInfo.fromMsg(e.getMessage), r => r) + ) + } + + /** For a given RDF input (plain text), return information about it + * + * @param data Input data string + * @param dataFormatStr Input data format + * @return Information about the input RDF: statements, well-formed, etc. + */ + def dataInfoFromString( + data: String, + dataFormatStr: String + ): IO[DataInfo] = { + val either: ESIO[DataInfo] = for { + dataFormat <- either2es(DataFormat.fromString(dataFormatStr)) + json <- io2es( + RDFAsJenaModel + .fromChars(data, dataFormat.name) + .flatMap( + _.use(rdf => dataInfoFromRdf(rdf, Some(data), Some(dataFormat))) + ) + ) + } yield json + + either.fold(e => DataInfo.fromMsg(e), identity) + } + + /** @param msg Error message contained in the result + * @return A DataInfoResult consisting of a single error message and no data + */ + def fromMsg(msg: String): DataInfo = + DataInfo(msg, None, None, None, None, None) } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataOperations.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataOperations.scala index 6a0fd5fa..f7700791 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataOperations.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataOperations.scala @@ -1,23 +1,8 @@ package es.weso.rdfshape.server.api.routes.data.logic -import cats.effect.IO import com.typesafe.scalalogging.LazyLogging -import es.weso.rdf.jena.RDFAsJenaModel -import es.weso.rdf.nodes.{IRI, Lang} -import es.weso.rdf.{PrefixMap, RDFReasoner} -import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ - defaultSchemaEngine, - defaultSchemaFormat, - defaultShapeLabel -} -import es.weso.rdfshape.server.api.definitions.UmlDefinitions.umlOptions -import es.weso.rdfshape.server.api.format.{DataFormat, SchemaFormat} -import es.weso.rdfshape.server.utils.json.JsonUtils.maybeField -import es.weso.schema.{DataFormats, Schema} -import es.weso.schemaInfer.{InferOptions, PossiblePrefixes, SchemaInfer} -import es.weso.shapemaps.{NodeSelector, ResultShapeMap} -import es.weso.uml.{Schema2UML, UML} -import es.weso.utils.IOUtils._ +import es.weso.rdf.PrefixMap +import es.weso.schema.DataFormats import io.circe.Json /** Static utilities used by the {@link es.weso.rdfshape.server.api.routes.data.service.DataService} @@ -31,202 +16,6 @@ private[api] object DataOperations extends LazyLogging { def dataFormatOrDefault(df: Option[String]): String = df.getOrElse(DataFormats.defaultFormatName) - /** For a given RDF input (plain text), return information about it - * - * @param data Input data string - * @param dataFormatStr Input data format - * @return Information about the input RDF: statements, well-formed, etc. - */ - def dataInfoFromString( - data: String, - dataFormatStr: String - ): IO[DataInfoResult] = { - val either: ESIO[DataInfoResult] = for { - dataFormat <- either2es(DataFormat.fromString(dataFormatStr)) - json <- io2es( - RDFAsJenaModel - .fromChars(data, dataFormat.name) - .flatMap(_.use(rdf => dataInfo(rdf, Some(data), Some(dataFormat)))) - ) - } yield json - - either.fold(e => DataInfoResult.fromMsg(e), identity) - } - - /** For a given RDF input, return information about it - * - * @param rdf Input RDF - * @param data Input data string - * @param dataFormat Input data format - * @return Information about the input RDF: statements, well-formed, etc. - */ - def dataInfo( - rdf: RDFReasoner, - data: Option[String], - dataFormat: Option[DataFormat] - ): IO[DataInfoResult] = { - val either: IO[Either[Throwable, DataInfoResult]] = (for { - numberStatements <- rdf.getNumberOfStatements() - predicates <- rdf.predicates().compile.toList - pm <- rdf.getPrefixMap - } yield DataInfoResult.fromData( - data, - dataFormat, - predicates.toSet, - numberStatements, - pm - )).attempt - either.map( - _.fold(e => DataInfoResult.fromMsg(e.getMessage), r => r) - ) - } - - /** Extract Shex from a given RDF input - * - * @param rdf Input RDF - * @param optData Input data (optional) - * @param optDataFormat Input data format (optional) - * @param optNodeSelector Node selector (optional) - * @param optInference Conversion inference (optional) - * @param optEngine Conversion engine (optional) - * @param optSchemaFormat Target schema format (optional) - * @param optLabelName Label name (optional) - * @param relativeBase Relative base - * @return - */ - def dataExtract( - rdf: RDFReasoner, - optData: Option[String], - optDataFormat: Option[DataFormat], - optNodeSelector: Option[String], - optInference: Option[String], - optEngine: Option[String], - optSchemaFormat: Option[SchemaFormat], - optLabelName: Option[String], - relativeBase: Option[IRI] - ): IO[DataExtractResult] = { - val base = relativeBase.map(_.str) - val engine = optEngine.getOrElse(defaultSchemaEngine) - val schemaFormat = optSchemaFormat.getOrElse(defaultSchemaFormat) - optNodeSelector match { - case None => - IO.pure( - DataExtractResult.fromMsg("DataExtract: Node selector not specified") - ) - case Some(nodeSelector) => - val es: ESIO[(Schema, ResultShapeMap)] = for { - pm <- io2es(rdf.getPrefixMap) - selector <- either2es(NodeSelector.fromString(nodeSelector, base, pm)) - eitherResult <- { - logger.debug(s"Node selector: $selector") - - val inferOptions: InferOptions = InferOptions( - inferTypePlainNode = true, - addLabelLang = Some(Lang("en")), - possiblePrefixMap = PossiblePrefixes.wikidataPrefixMap, - maxFollowOn = 1, - followOnLs = List(), - followOnThreshold = Some(1), - sortFunction = InferOptions.orderByIRI - ) - io2es( - SchemaInfer.runInferSchema( - rdf, - selector, - engine, - optLabelName.map(IRI(_)).getOrElse(defaultShapeLabel), - inferOptions - ) - ) - } - pair <- either2es(eitherResult) - str <- io2es(pair._1.serialize("ShExC")) - _ <- io2es(IO(logger.debug(s"Extracted; $str"))) - } yield { - pair - } - for { - either <- run_es(es) - } yield either.fold( - err => DataExtractResult.fromMsg(err), - pair => { - val (schema, resultShapeMap) = pair - DataExtractResult.fromExtraction( - optData, - optDataFormat, - schemaFormat.name, - engine, - schema, - resultShapeMap - ) - } - ) - } - } - - // TODO: remove if unused? - def shapeInfer( - rdf: RDFReasoner, - optNodeSelector: Option[String], - optInference: Option[String], - optEngine: Option[String], - optSchemaFormat: Option[SchemaFormat], - optLabelName: Option[String], - relativeBase: Option[IRI], - withUml: Boolean - ): ESIO[Json] = { - val base = relativeBase.map(_.str) - val engine = optEngine.getOrElse(defaultSchemaEngine) - val schemaFormat = optSchemaFormat.getOrElse(defaultSchemaFormat) - optNodeSelector match { - case None => ok_es(Json.Null) - case Some(nodeSelector) => - for { - pm <- io2es(rdf.getPrefixMap) - selector <- either2es(NodeSelector.fromString(nodeSelector, base, pm)) - eitherResult <- io2es { - logger.debug(s"Selector: $selector") - - SchemaInfer.runInferSchema( - rdf, - selector, - engine, - optLabelName.map(IRI(_)).getOrElse(defaultShapeLabel) - ) - } - result <- either2es(eitherResult) - (schemaInfer, resultMap) = result - maybePair <- - if(withUml) - either2es(Schema2UML.schema2UML(schemaInfer).map(Some(_))) - else ok_es(None) - maybeSvg <- io2es(maybePair match { - case None => IO.pure(None) - case Some(pair) => - val (uml, warnings) = pair - uml.toSVG(umlOptions).map(Some(_)) - }) - str <- io2es(schemaInfer.serialize(schemaFormat.name)) - } yield Json.fromFields( - List( - ("inferredShape", Json.fromString(str)), - ("format", Json.fromString(schemaFormat.name)), - ("engine", Json.fromString(engine)), - ("nodeSelector", Json.fromString(nodeSelector)) - ) ++ - maybeField( - maybePair, - "uml", - (pair: (UML, List[String])) => { - val (uml, warnings) = pair - Json.fromString(uml.toPlantUML(umlOptions)) - } - ) ++ - maybeField(maybeSvg, "svg", Json.fromString) - ) - } - } - /** Convert a given prefix map to JSON format for API operations * * @param prefixMap Input prefix map diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataParam.scala similarity index 96% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataParam.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataParam.scala index e2fda622..580f1b23 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataParam.scala @@ -1,4 +1,4 @@ -package es.weso.rdfshape.server.api.routes.data.service +package es.weso.rdfshape.server.api.routes.data.logic import cats.effect._ import cats.implicits._ @@ -15,7 +15,7 @@ import es.weso.utils.IOUtils.err import java.net.URI -case class DataParam( +sealed case class DataParam( data: Option[String], dataURL: Option[String], dataFile: Option[String], @@ -93,11 +93,7 @@ case class DataParam( case Some(dataStr) => val dataFormat: Format = dataFormatFile.getOrElse(DataFormat.defaultFormat) - /* io2es(RDFAsJenaModel.fromString(dataStr, dataFormat.name, - * iriBase).use(rdf => for { iriBase <- mkBase(base) newRdf <- - * extendWithInference(rdf, inference) eitherStr <- - * newRdf.serialize(dataFormat.name,None).attempt optStr = - * eitherStr.toOption } yield (optStr,newRdf))) */ + for { iriBase <- mkBase(base) res <- RDFAsJenaModel.fromString( diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala index c91a9903..20515be0 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala @@ -12,19 +12,21 @@ import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.format._ import es.weso.rdfshape.server.api.routes.IncomingRequestParameters._ -import es.weso.rdfshape.server.api.routes.PartsMap -import es.weso.rdfshape.server.api.routes.data.logic.DataOperations.{ - dataExtract, - dataFormatOrDefault, - dataInfo, +import es.weso.rdfshape.server.api.routes.data.logic.DataExtract.dataExtract +import es.weso.rdfshape.server.api.routes.data.logic.DataInfo.{ + dataInfoFromRdf, dataInfoFromString } +import es.weso.rdfshape.server.api.routes.data.logic.DataOperations.dataFormatOrDefault import es.weso.rdfshape.server.api.routes.data.logic.{ - DataConverter, - DataExtractResult + DataConversion, + DataExtract, + DataParam } -import es.weso.rdfshape.server.api.routes.endpoint.logic.SparqlQueryParam +import es.weso.rdfshape.server.api.routes.endpoint.logic.SparqlQuery +import es.weso.rdfshape.server.api.routes.{ApiService, PartsMap} import es.weso.rdfshape.server.api.utils.OptEitherF._ +import es.weso.rdfshape.server.utils.json.JsonUtils.responseJson import es.weso.schema._ import es.weso.utils.IOUtils._ import io.circe._ @@ -35,45 +37,48 @@ import org.http4s.dsl.Http4sDsl import org.http4s.headers._ import org.http4s.multipart.Multipart -import scala.util.Try - /** API Service to handle RDF data * * @param client HTTP4S client object */ -class DataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { +class DataService(client: Client[IO]) + extends Http4sDsl[IO] + with ApiService + with LazyLogging { + + override val verb: String = "data" /** Describe the API routes handled by this service and the actions performed on each of them */ val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { // Input RDF data formats include html-microdata, turtle, json-ld... - case GET -> Root / `api` / "data" / "formats" / "input" => + case GET -> Root / `api` / `verb` / "formats" / "input" => val formats = DataFormat.availableFormats.map(_.name) val json = Json.fromValues(formats.map(Json.fromString)) Ok(json) // Output RDF data conversion formats - case GET -> Root / `api` / "data" / "formats" / "output" => + case GET -> Root / `api` / `verb` / "formats" / "output" => val formats = DataFormats.availableFormats.map(_.name) val json = Json.fromValues(formats.map(Json.fromString)) Ok(json) - case GET -> Root / `api` / "data" / "formats" / "default" => + case GET -> Root / `api` / `verb` / "formats" / "default" => val dataFormat = DataFormat.defaultFormat.name Ok(Json.fromString(dataFormat)) - case GET -> Root / `api` / "data" / "inferenceEngines" => + case GET -> Root / `api` / `verb` / "inferenceEngines" => val inferenceEngines = availableInferenceEngines val json = Json.fromValues(inferenceEngines.map(Json.fromString)) Ok(json) - case GET -> Root / `api` / "data" / "inferenceEngines" / "default" => + case GET -> Root / `api` / `verb` / "inferenceEngines" / "default" => val defaultInferenceEngine = defaultInference Ok(Json.fromString(defaultInferenceEngine)) - case GET -> Root / `api` / "data" / "visualize" / "formats" => - val formats = DataConverter.availableGraphFormatNames ++ + case GET -> Root / `api` / `verb` / "visualize" / "formats" => + val formats = DataConversion.availableGraphFormatNames ++ List( "DOT", // DOT is not a visual format but can be used to debug "JSON" // JSON is the format that can be used by Cytoscape @@ -86,7 +91,7 @@ class DataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { DataFormatParam(optDataFormat) => val dataFormat = dataFormatOrDefault(optDataFormat) optDataUrl match { - case None => errJson(s"Must provide a dataUrl") + case None => responseJson("Must provide a dataUrl", BadRequest) case Some(dataUrl) => for { data <- client.expect[String](dataUrl) @@ -98,7 +103,7 @@ class DataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { } yield r } - case req @ POST -> Root / `api` / "data" / "info" => + case req @ POST -> Root / `api` / `verb` / "info" => req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) for { @@ -115,7 +120,9 @@ class DataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { case None => for { d <- - resourceRdf.use(rdf => dataInfo(rdf, None, dp.dataFormat)) + resourceRdf.use(rdf => + dataInfoFromRdf(rdf, None, dp.dataFormat) + ) json <- IO(d.toJson) ok <- Ok(json) } yield ok @@ -123,7 +130,7 @@ class DataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { } yield response } - case req @ GET -> Root / `api` / "data" / "info" :? + case GET -> Root / `api` / `verb` / "info" :? OptDataParam(optData) +& OptDataURLParam(optDataURL) +& CompoundDataParam(optCompoundData) +& @@ -136,7 +143,7 @@ class DataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { } yield df val r: IO[Response[IO]] = either.fold( - str => errJson(str), + str => responseJson(str, BadRequest), optDataFormat => { val dp = DataParam( @@ -153,17 +160,12 @@ class DataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { optActiveDataTab, optCompoundData ) - // for { - // dataParam <- io2f(dp.getData(relativeBase)) - // (maybeStr, resourceRdf) = dataParam - /* json <- io2f(resourceRdf.use(rdf => dataInfo(rdf, maybeStr, - * optDataFormat))) */ - // ok <- Ok(json) - // } yield ok for { dataParam <- io2f(dp.getData(relativeBase)) (maybeStr, resourceRdf) = dataParam - d <- resourceRdf.use(rdf => dataInfo(rdf, maybeStr, optDataFormat)) + d <- resourceRdf.use(rdf => + dataInfoFromRdf(rdf, maybeStr, optDataFormat) + ) json <- IO(d.toJson) ok <- Ok(json) } yield ok @@ -171,7 +173,7 @@ class DataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { ) r - case req @ POST -> Root / `api` / "data" / "convert" => + case req @ POST -> Root / `api` / `verb` / "convert" => req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) for { @@ -182,14 +184,14 @@ class DataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { result <- io2f( resourceRdf.use(rdf => { logger.debug(s"Data convert dataParam: $dp") - DataConverter.rdfConvert(rdf, dp.data, dataFormat, targetFormat) + DataConversion.rdfConvert(rdf, dp.data, dataFormat, targetFormat) }) ) ok <- Ok(result.toJson) } yield ok } - case req @ GET -> Root / `api` / "data" / "convert" :? + case req @ GET -> Root / `api` / `verb` / "convert" :? DataParameter(data) +& DataFormatParam(optDataFormat) +& CompoundDataParam(optCompoundData) +& @@ -203,7 +205,7 @@ class DataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { dataFormat => for { r <- io2f( - DataConverter.dataConvert( + DataConversion.dataConvert( data, dataFormat, optCompoundData, @@ -215,24 +217,23 @@ class DataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { ) } yield result - case req @ POST -> Root / `api` / "data" / "query" => + case req @ POST -> Root / `api` / `verb` / "query" => req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) logger.debug(s"Data query params map: $partsMap") for { dataParam <- DataParam.mkData(partsMap, relativeBase) (resourceRdf, dp) = dataParam - maybePair <- SparqlQueryParam.getSparqlQuery(partsMap) - resp <- maybePair match { - case Left(err) => errJson(s"Error obtaining Query data $err") - case Right((queryStr, qp)) => - val optQueryStr = qp.queryRaw - logger.debug(s"Data query optQueryStr: $optQueryStr") + maybeQuery <- SparqlQuery.getSparqlQuery(partsMap) + resp <- maybeQuery match { + case Left(err) => + responseJson(s"Error obtaining query data: $err", BadRequest) + case Right(query) => + val optQueryStr = query.query + logger.debug(s"Data query with querystring: $optQueryStr") for { json <- io2f( - resourceRdf.use(rdf => - rdf.queryAsJson(optQueryStr.getOrElse("")) - ) + resourceRdf.use(rdf => rdf.queryAsJson(optQueryStr)) ) v <- Ok(json) } yield v @@ -240,7 +241,7 @@ class DataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { } yield resp } - case req @ POST -> Root / `api` / "data" / "extract" => + case req @ POST -> Root / `api` / `verb` / "extract" => req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) for { @@ -259,7 +260,7 @@ class DataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { case Left(err) => for { res <- io2f( - DataExtractResult + DataExtract .fromMsg(s"Error obtaining data: ${err.getMessage}") .toJson ) @@ -294,9 +295,6 @@ class DataService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { } private val relativeBase = ApiDefaults.relativeBase - private def parseInt(s: String): Either[String, Int] = - Try(s.toInt).map(Right(_)).getOrElse(Left(s"$s is not a number")) - private def errJson(msg: String): IO[Response[IO]] = Ok(Json.fromFields(List(("error", Json.fromString(msg))))) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/SparqlQuery.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/SparqlQuery.scala new file mode 100644 index 00000000..9d969c63 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/SparqlQuery.scala @@ -0,0 +1,121 @@ +package es.weso.rdfshape.server.api.routes.endpoint.logic + +import cats.effect.IO +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdfshape.server.api.routes.PartsMap +import es.weso.rdfshape.server.api.routes.endpoint.logic.SparqlQueryTab.{ + SparqlQueryTab, + defaultActiveQueryTab +} + +import java.net.URL +import scala.io.Source +import scala.util.{Failure, Success, Try} + +/** Data class representing a SPARQL query and its current source + * + * @param query Query raw text + * @param activeQueryTab Active tab, used to know which source the query comes from + */ +sealed case class SparqlQuery( + query: String, + activeQueryTab: SparqlQueryTab +) + +private[api] object SparqlQuery extends LazyLogging { + + /** Placeholder value used for the sparql query whenever an empty query is issued/needed. + */ + private val emptyQueryValue = "" + + /** Given a request's parameters, try to extract a SPARQL query from them + * + * @param partsMap Request's parameter + * @return Either the SPARQL query or an error message + */ + def getSparqlQuery( + partsMap: PartsMap + ): IO[Either[String, SparqlQuery]] = + for { + queryStr <- partsMap.optPartValue("query") + queryURL <- partsMap.optPartValue("queryURL") + queryFile <- partsMap.optPartValue("queryFile") + activeQueryTab <- partsMap.optPartValue("activeQueryTab") + + _ = logger.debug( + s"Getting SPARQL from params. Query tab: $activeQueryTab" + ) + + maybeQuery: Either[String, SparqlQuery] = activeQueryTab.getOrElse( + defaultActiveQueryTab + ) match { + case SparqlQueryTab.TEXT => + queryStr match { + case None => Left("No value for the query string") + case Some(queryRaw) => + Right(SparqlQuery(queryRaw, SparqlQueryTab.TEXT)) + } + case SparqlQueryTab.URL => + queryURL match { + case None => Left(s"No value for the query URL") + case Some(queryUrl) => + getUrlContents(queryUrl) match { + case Right(queryRaw) => + Right(SparqlQuery(queryRaw, SparqlQueryTab.URL)) + case Left(err) => Left(err) + } + + } + case SparqlQueryTab.FILE => + queryFile match { + case None => Left(s"No value for the query file") + case Some(queryRaw) => + Right(SparqlQuery(queryRaw, SparqlQueryTab.FILE)) + } + + case other => + val msg = s"Unknown value for activeQueryTab: $other" + logger.warn(msg) + Left(msg) + + } + + } yield maybeQuery + + /** Error-safe way of obtaining the raw contents in a given URL + * + * @param urlString URL to be fetched (String representation) + * @return Either the contents if the URL or an error message + */ + private def getUrlContents(urlString: String): Either[String, String] = { + Try { + val url = new URL(urlString) + val src = Source.fromURL(url) + val str = src.mkString + src.close() + str + } match { + case Success(urlContent) => Right(urlContent) + case Failure(exception) => + val msg = + s"Error obtaining data from url $urlString: ${exception.getMessage}" + logger.warn(msg) + Left(msg) + } + } +} + +/** Enumeration of the different possible QueryTabs sent by the client. + * The tab sent indicates the API if the Query was sent in raw text, as a URL + * to be fetched or as a text file containing the query. + * In case the client submits the query in several formats, the selected tab will indicate the preferred format. + */ +private[logic] object SparqlQueryTab extends Enumeration { + type SparqlQueryTab = String + + val TEXT = "#queryTextArea" + val URL = "#queryUrl" + val FILE = "#queryFile" + + val defaultActiveQueryTab: SparqlQueryTab = TEXT +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/SparqlQueryParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/SparqlQueryParam.scala deleted file mode 100644 index 0c9d6464..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/SparqlQueryParam.scala +++ /dev/null @@ -1,104 +0,0 @@ -package es.weso.rdfshape.server.api.routes.endpoint.logic - -import cats.effect.IO -import es.weso.rdfshape.server.api.definitions.ApiDefaults.defaultActiveQueryTab -import es.weso.rdfshape.server.api.routes.PartsMap - -import java.net.URL -import scala.io.Source -import scala.util.Try - -/** Data class representing a SPARQL query - * - * @param query Query string - */ -case class SparqlQuery(query: String) - -/** Data class representing the parameters needed for SPARQL querying - * - * @param queryRaw Query raw text (optional) - * @param queryURL Query containing URL (optional) - * @param queryFile Query containing file (optional) - * @param activeQueryTab Active tab, used to know which query source to use - */ -case class SparqlQueryParam( - queryRaw: Option[String], - queryURL: Option[String], - queryFile: Option[String], - activeQueryTab: Option[String] -) { - - def getSparqlQuery: (Option[String], Either[String, SparqlQuery]) = { - activeQueryTab.getOrElse(defaultActiveQueryTab) match { - case "#queryUrl" => - queryURL match { - case None => (None, Left(s"No value for queryURL")) - case Some(queryUrl) => - Try { - val url = new URL(queryUrl) - val src = Source.fromURL(url) - val str = src.mkString - src.close() - str - }.toEither match { - case Left(err) => - ( - None, - Left( - s"Error obtaining data from url $queryUrl: ${err.getMessage} " - ) - ) - case Right(str) => (Some(str), Right(SparqlQuery(str))) - } - } - case "#queryFile" => - queryFile match { - case None => (None, Left(s"No value for queryFile")) - case Some(queryStr) => - (Some(queryStr), Right(SparqlQuery(queryStr))) - } - case "#queryTextArea" => - queryRaw match { - case None => (None, Right(SparqlQuery(""))) - case Some(queryText) => - (Some(queryText), Right(SparqlQuery(queryText))) - } - case other => (None, Left(s"Unknown value for activeQueryTab: $other")) - } - } - -} - -object SparqlQueryParam { - - /** Given a request's parameters, try to extract a SPARQL query from them - * - * @param partsMap Request's parameter - * @return The SPARQL query or an error message - */ - private[api] def getSparqlQuery( - partsMap: PartsMap - ): IO[Either[String, (SparqlQuery, SparqlQueryParam)]] = for { - qp <- mkQueryParam(partsMap) - } yield { - val (maybeStr, maybeQuery) = qp.getSparqlQuery - maybeQuery match { - case Left(str) => Left(str) - case Right(query) => Right((query, qp.copy(queryRaw = Some(query.query)))) - } - } - - private[api] def mkQueryParam(partsMap: PartsMap): IO[SparqlQueryParam] = - for { - queryStr <- partsMap.optPartValue("query") - queryURL <- partsMap.optPartValue("queryURL") - queryFile <- partsMap.optPartValue("queryFile") - activeQueryTab <- partsMap.optPartValue("activeQueryTab") - } yield SparqlQueryParam( - queryStr, - queryURL, - queryFile, - activeQueryTab - ) - -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala index dfe5ef50..05bb5fb5 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala @@ -6,25 +6,25 @@ import cats.implicits._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.jena.{Endpoint => EndpointJena} import es.weso.rdf.nodes.IRI -import es.weso.rdfshape.server.api.definitions.ApiDefaults import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.routes.IncomingRequestParameters.{ LimitParam, OptEndpointParam, OptNodeParam } -import es.weso.rdfshape.server.api.routes.PartsMap import es.weso.rdfshape.server.api.routes.endpoint.logic.Endpoint.{ getEndpointAsRDFReader, getEndpointInfo, getEndpointUrl } +import es.weso.rdfshape.server.api.routes.endpoint.logic.SparqlQuery.getSparqlQuery import es.weso.rdfshape.server.api.routes.endpoint.logic.{ Endpoint, Outgoing, - SparqlQuery, - SparqlQueryParam + SparqlQuery } +import es.weso.rdfshape.server.api.routes.{ApiService, PartsMap} +import es.weso.rdfshape.server.utils.json.JsonUtils.responseJson import es.weso.rdfshape.server.utils.numeric.NumericUtils import es.weso.utils.IOUtils._ import io.circe.Json @@ -40,15 +40,16 @@ import org.http4s.multipart._ */ class EndpointService(client: Client[IO]) extends Http4sDsl[IO] + with ApiService with LazyLogging { - private val relativeBase = ApiDefaults.relativeBase + override val verb: String = "endpoint" /** Describe the API routes handled by this service and the actions performed on each of them */ def routes: HttpRoutes[IO] = HttpRoutes.of[IO] { - case req @ POST -> Root / `api` / "endpoint" / "query" => + case req @ POST -> Root / `api` / `verb` / "query" => req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) @@ -58,31 +59,31 @@ class EndpointService(client: Client[IO]) either <- EitherT .liftF[IO, String, Either[ String, - (SparqlQuery, SparqlQueryParam) + SparqlQuery ]]( - SparqlQueryParam.getSparqlQuery(partsMap) + getSparqlQuery(partsMap) ) - pair <- EitherT.fromEither[IO](either) - (_, qp) = pair - optQueryStr = qp.queryRaw + query <- EitherT.fromEither[IO](either) + queryString = query.query json <- { logger.debug( - s"Query to endpoint $endpoint: ${optQueryStr.getOrElse("")}" + s"Query to endpoint $endpoint: $queryString" ) - io2es(endpoint.queryAsJson(optQueryStr.getOrElse(""))) + io2es(endpoint.queryAsJson(queryString)) } } yield json for { either <- r.value resp <- either.fold( - e => errJson(s"Error querying endpoint: $e"), + e => + responseJson(s"Error querying endpoint: $e", InternalServerError), json => Ok(json) ) } yield resp } - case req @ POST -> Root / `api` / "endpoint" / "info" => + case req @ POST -> Root / `api` / `verb` / "info" => req.decode[Multipart[IO]] { m => { val partsMap = PartsMap(m.parts) @@ -95,30 +96,31 @@ class EndpointService(client: Client[IO]) for { either <- r.value resp <- either.fold( - e => errJson(s"Error obtaining info on Endpoint $e"), + e => + responseJson( + s"Error obtaining info on Endpoint $e", + InternalServerError + ), json => Ok(json) ) } yield resp } } - case GET -> Root / `api` / "endpoint" / "outgoing" :? + case GET -> Root / `api` / `verb` / "outgoing" :? OptEndpointParam(optEndpoint) +& OptNodeParam(optNode) +& LimitParam(optLimit) => for { eitherOutgoing <- getOutgoing(optEndpoint, optNode, optLimit).value resp <- eitherOutgoing.fold( - (s: String) => errJson(s"Error: $s"), + (s: String) => responseJson(s"Error: $s", InternalServerError), (outgoing: Outgoing) => Ok(outgoing.toJson) ) } yield resp } - private def errJson(msg: String): IO[Response[IO]] = - Ok(Json.fromFields(List(("error", Json.fromString(msg))))) - private def getOutgoing( optEndpoint: Option[String], optNode: Option[String], diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/fetch/service/FetchService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/fetch/service/FetchService.scala index c48c2f9c..6a17ec51 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/fetch/service/FetchService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/fetch/service/FetchService.scala @@ -1,21 +1,25 @@ package es.weso.rdfshape.server.api.routes.fetch.service import cats.effect._ +import com.typesafe.scalalogging.LazyLogging import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api +import es.weso.rdfshape.server.api.routes.ApiService import es.weso.rdfshape.server.api.routes.IncomingRequestParameters.UrlParam import org.http4s._ import org.http4s.client.Client import org.http4s.dsl.Http4sDsl import scalaj.http.Http -class FetchService() extends Http4sDsl[IO] { +class FetchService() extends Http4sDsl[IO] with ApiService with LazyLogging { + + override val verb: String = "fetch" /** Describe the API routes handled by this service and the actions performed on each of them */ val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { // Query URL and return the response - case GET -> Root / `api` / "fetch" :? + case GET -> Root / `api` / `verb` :? UrlParam(url) => try { val res = Http(url).asString diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/service/PermalinkService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/service/PermalinkService.scala index 9240ef5b..5beacd15 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/service/PermalinkService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/service/PermalinkService.scala @@ -3,6 +3,7 @@ package es.weso.rdfshape.server.api.routes.permalink.service import cats.effect._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api +import es.weso.rdfshape.server.api.routes.ApiService import es.weso.rdfshape.server.api.routes.IncomingRequestParameters.{ UrlCodeParam, UrlParam @@ -29,13 +30,14 @@ import scala.util.Random */ class PermalinkService(client: Client[IO]) extends Http4sDsl[IO] + with ApiService with LazyLogging { lazy val mongoClient: MongoClient = MongoClient(mongoConnectionString) lazy val db: MongoDatabase = mongoClient.getDatabase(mongoDatabase) lazy val collection: MongoCollection[Document] = db.getCollection(collectionName) - + override val verb: String = "permalink" // Utils for url generation val random: Random.type = Random @@ -44,7 +46,7 @@ class PermalinkService(client: Client[IO]) val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { // Insert a reference to the permalink in DB - case GET -> Root / `api` / "permalink" / "generate" :? + case GET -> Root / `api` / `verb` / "generate" :? UrlParam(url) => // Store only query path and query params val urlObj = new URL(url) @@ -100,7 +102,7 @@ class PermalinkService(client: Client[IO]) } // Retrieve a URL given the link - case GET -> Root / `api` / "permalink" / "get" :? + case GET -> Root / `api` / `verb` / "get" :? UrlCodeParam(urlCode) => try { val code = urlCode.toLong diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala index dccbfe3b..6f746a5b 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala @@ -9,7 +9,7 @@ import es.weso.rdf.{RDFBuilder, RDFReasoner} import es.weso.rdfshape.server.api.definitions.ApiDefaults import es.weso.rdfshape.server.api.definitions.UmlDefinitions.umlOptions import es.weso.rdfshape.server.api.format.{DataFormat, SchemaFormat} -import es.weso.rdfshape.server.api.routes.data.service.DataParam +import es.weso.rdfshape.server.api.routes.data.logic.DataParam import es.weso.rdfshape.server.api.routes.schema.service.{ SchemaParam, TriggerModeParam diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala index 6db5d7ae..e16e926c 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala @@ -14,15 +14,15 @@ import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.format._ import es.weso.rdfshape.server.api.routes.IncomingRequestParameters._ -import es.weso.rdfshape.server.api.routes.PartsMap import es.weso.rdfshape.server.api.routes.data.logic.DataOperations.prefixMap2Json -import es.weso.rdfshape.server.api.routes.data.service.DataParam +import es.weso.rdfshape.server.api.routes.data.logic.DataParam import es.weso.rdfshape.server.api.routes.schema.logic.SchemaOperations._ import es.weso.rdfshape.server.api.routes.schema.logic.{ SchemaConversionResult, SchemaInfo, SchemaInfoResult } +import es.weso.rdfshape.server.api.routes.{ApiService, PartsMap} import es.weso.rdfshape.server.api.utils.OptEitherF._ import es.weso.rdfshape.server.utils.json.JsonUtils.responseJson import es.weso.schema._ @@ -43,18 +43,23 @@ import org.http4s.multipart.Multipart * * @param client HTTP4S client object */ -class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { +class SchemaService(client: Client[IO]) + extends Http4sDsl[IO] + with ApiService + with LazyLogging { + + override val verb: String = "schema" /** Describe the API routes handled by this service and the actions performed on each of them */ val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { - case GET -> Root / `api` / "schema" / "engines" => + case GET -> Root / `api` / `verb` / "engines" => val engines = Schemas.availableSchemaNames val json = Json.fromValues(engines.map(str => Json.fromString(str))) Ok(json) - case GET -> Root / `api` / "schema" / "engines" / "shacl" => + case GET -> Root / `api` / `verb` / "engines" / "shacl" => val shaclSchemas = List(Schemas.shaclex, Schemas.jenaShacl, Schemas.shaclTQ) val json = Json.fromValues( @@ -62,12 +67,12 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { ) Ok(json) - case GET -> Root / `api` / "schema" / "engines" / "default" => + case GET -> Root / `api` / `verb` / "engines" / "default" => val schemaEngine = Schemas.defaultSchemaName val json = Json.fromString(schemaEngine) Ok(json) - case GET -> Root / `api` / "schema" / "formats" :? + case GET -> Root / `api` / `verb` / "formats" :? SchemaEngineParam(optSchemaEngine) => val schemaEngine = optSchemaEngine.getOrElse(Schemas.defaultSchemaName) val r: IO[Json] = Schemas @@ -93,12 +98,12 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { ) io2f(r).flatMap(json => Ok(json)) - case GET -> Root / `api` / "schema" / "triggerModes" => + case GET -> Root / `api` / `verb` / "triggerModes" => val triggerModes = ValidationTrigger.triggerValues.map(_._1) val json = Json.fromValues(triggerModes.map(Json.fromString)) Ok(json) - case GET -> Root / `api` / "schema" / "info" :? + case GET -> Root / `api` / `verb` / "info" :? OptSchemaParam(optSchema) +& SchemaFormatParam(optSchemaFormat) +& SchemaEngineParam(optSchemaEngine) => @@ -130,7 +135,7 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { ) } yield r - case req @ POST -> Root / `api` / "schema" / "info" => + case req @ POST -> Root / `api` / `verb` / "info" => req.decode[Multipart[IO]] { m => { val partsMap = PartsMap(m.parts) @@ -153,7 +158,7 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { } } - case GET -> Root / `api` / "schema" / "convert" :? + case GET -> Root / `api` / `verb` / "convert" :? OptSchemaParam(optSchema) +& SchemaFormatParam(optSchemaFormat) +& SchemaEngineParam(optSchemaEngine) +& @@ -199,7 +204,7 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { ) } yield r - case req @ POST -> Root / `api` / "schema" / "convert" => + case req @ POST -> Root / `api` / `verb` / "convert" => req.decode[Multipart[IO]] { m => { val partsMap = PartsMap(m.parts) @@ -234,7 +239,7 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { } } - case req @ POST -> Root / `api` / "schema" / "visualize" => + case req @ POST -> Root / `api` / `verb` / "visualize" => req.decode[Multipart[IO]] { m => { val partsMap = PartsMap(m.parts) @@ -253,7 +258,7 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { } } - case req @ POST -> Root / `api` / "schema" / "cytoscape" => + case req @ POST -> Root / `api` / `verb` / "cytoscape" => req.decode[Multipart[IO]] { m => { val partsMap = PartsMap(m.parts) @@ -271,7 +276,7 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { } } - case req @ GET -> Root / `api` / "schema" / "visualize" :? + case req @ GET -> Root / `api` / `verb` / "visualize" :? SchemaURLParam(optSchemaURL) +& OptSchemaParam(optSchema) +& SchemaFormatParam(optSchemaFormatStr) +& @@ -321,7 +326,7 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { ) } yield v - case req @ GET -> Root / `api` / "schema" / "validate" :? + case req @ GET -> Root / `api` / `verb` / "validate" :? OptDataParam(optData) +& OptDataURLParam(optDataURL) +& DataFormatParam(maybeDataFormatStr) +& @@ -445,7 +450,7 @@ class SchemaService(client: Client[IO]) extends Http4sDsl[IO] with LazyLogging { eitherResult } - case req @ POST -> Root / `api` / "schema" / "validate" => + case req @ POST -> Root / `api` / `verb` / "validate" => req.decode[Multipart[IO]] { m => { val partsMap = PartsMap(m.parts) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala index 0dd51d68..e5159284 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala @@ -3,8 +3,8 @@ package es.weso.rdfshape.server.api.routes.shapemap.service import cats.effect._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api -import es.weso.rdfshape.server.api.routes.PartsMap import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMapInfoResult +import es.weso.rdfshape.server.api.routes.{ApiService, PartsMap} import es.weso.rdfshape.server.utils.json.JsonUtils.responseJson import es.weso.shapemaps.ShapeMap import io.circe._ @@ -20,18 +20,21 @@ import org.http4s.multipart._ */ class ShapeMapService(client: Client[IO]) extends Http4sDsl[IO] + with ApiService with LazyLogging { + override val verb: String = "shapeMap" + /** Describe the API routes handled by this service and the actions performed on each of them */ val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { - case GET -> Root / `api` / "shapeMap" / "formats" => + case GET -> Root / `api` / `verb` / "formats" => val formats = ShapeMap.availableFormats val json = Json.fromValues(formats.map(str => Json.fromString(str))) Ok(json) - case req @ POST -> Root / `api` / "shapeMap" / "info" => + case req @ POST -> Root / `api` / `verb` / "info" => req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) val t: IO[(ShapeMap, ShapeMapParam)] = diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shex/service/ShExService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shex/service/ShExService.scala index 3a6d0212..75c04fb4 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shex/service/ShExService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shex/service/ShExService.scala @@ -1,7 +1,9 @@ package es.weso.rdfshape.server.api.routes.shex.service import cats.effect._ +import com.typesafe.scalalogging.LazyLogging import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api +import es.weso.rdfshape.server.api.routes.ApiService import es.weso.schema._ import io.circe._ import org.http4s._ @@ -9,13 +11,18 @@ import org.http4s.circe._ import org.http4s.client.Client import org.http4s.dsl.Http4sDsl -class ShExService(client: Client[IO]) extends Http4sDsl[IO] { +class ShExService(client: Client[IO]) + extends Http4sDsl[IO] + with ApiService + with LazyLogging { + + override val verb: String = "shEx" /** Describe the API routes handled by this service and the actions performed on each of them */ val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { - case GET -> Root / `api` / "shEx" / "formats" => + case GET -> Root / `api` / `verb` / "formats" => val formats = Schemas.availableFormats val json = Json.fromValues(formats.map(str => Json.fromString(str))) Ok(json) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala index 6cb991d0..2fff7aa7 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala @@ -14,7 +14,7 @@ import es.weso.rdfshape.server.api.routes.schema.logic.SchemaOperations.schemaRe import es.weso.rdfshape.server.api.definitions._ import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.routes.IncomingRequestParameters._ -import es.weso.rdfshape.server.api.routes.PartsMap +import es.weso.rdfshape.server.api.routes.{ApiService, PartsMap} import es.weso.rdfshape.server.api.utils.Http4sUtils._ import es.weso.schema.{Schema, ShapeMapTrigger} import es.weso.schemaInfer.{InferOptions, SchemaInfer} @@ -42,8 +42,11 @@ import scala.util.matching.Regex */ class WikidataService(client: Client[IO]) extends Http4sDsl[IO] + with ApiService with LazyLogging { + override val verb: String = "wikidata" + val wikidataEntityUrl = uri"http://www.wikidata.org/entity" val apiUri = uri"/api/wikidata/entity" val wikidataUri: Uri = uri"https://query.wikidata.org/sparql" @@ -55,11 +58,11 @@ class WikidataService(client: Client[IO]) */ def routes: HttpRoutes[IO] = HttpRoutes.of[IO] { - case GET -> Root / `api` / "wikidata" / "test" => { + case GET -> Root / `api` / `verb` / "test" => { Ok("Wikidata Test") } - case GET -> Root / `api` / "wikidata" / "entityLabel" :? + case GET -> Root / `api` / `verb` / "entityLabel" :? WdEntityParam(entity) +& LanguageParam(language) => val uri = Uri.unsafeFromString( @@ -80,7 +83,7 @@ class WikidataService(client: Client[IO]) resp <- Ok(either.fold(Json.fromString, identity)) } yield resp - case GET -> Root / `api` / "wikidata" / "schemaContent" :? + case GET -> Root / `api` / `verb` / "schemaContent" :? WdSchemaParam(wdSchema) => { val uri = uri"https://www.wikidata.org".withPath( Uri.Path.unsafeFromString(s"/wiki/Special:EntitySchemaText/$wdSchema") @@ -107,7 +110,7 @@ class WikidataService(client: Client[IO]) } - case GET -> Root / `api` / "wikidata" / "searchEntity" :? + case GET -> Root / `api` / `verb` / "searchEntity" :? OptEndpointParam(endpoint) +& LabelParam(label) +& LanguageParam(language) +& @@ -151,7 +154,7 @@ class WikidataService(client: Client[IO]) } yield resp } - case GET -> Root / `api` / "wikidata" / "searchProperty" :? + case GET -> Root / `api` / `verb` / "searchProperty" :? OptEndpointParam(endpoint) +& LabelParam(label) +& LanguageParam(language) +& @@ -193,7 +196,7 @@ class WikidataService(client: Client[IO]) } yield resp } - case GET -> Root / `api` / "wikidata" / "searchLexeme" :? + case GET -> Root / `api` / `verb` / "searchLexeme" :? LabelParam(label) +& LanguageParam(language) +& LimitParam(maybelimit) +& @@ -231,7 +234,7 @@ class WikidataService(client: Client[IO]) } yield resp } - case GET -> Root / `api` / "wikidata" / "languages" => { + case GET -> Root / `api` / `verb` / "languages" => { val uri = uri"https://www.wikidata.org" .withPath(Uri.Path.unsafeFromString("/w/api.php")) @@ -265,7 +268,7 @@ class WikidataService(client: Client[IO]) } yield resp } - case req @ POST -> Root / `api` / "wikidata" / "query" => + case req @ POST -> Root / `api` / `verb` / "query" => req.decode[Multipart[IO]] { m => { val partsMap = PartsMap(m.parts) @@ -300,7 +303,7 @@ class WikidataService(client: Client[IO]) } } - case req @ POST -> Root / `api` / "wikidata" / "extract" => { + case req @ POST -> Root / `api` / `verb` / "extract" => { req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) val r: EitherT[IO, String, Response[IO]] = for { @@ -349,7 +352,7 @@ class WikidataService(client: Client[IO]) } // TODO: This one doesn't work. It gives a timeout response - case req @ POST -> Root / `api` / "wikidata" / "shexer" => { + case req @ POST -> Root / `api` / `verb` / "shexer" => { req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) val r: EitherT[IO, String, Response[IO]] = for { @@ -378,7 +381,7 @@ class WikidataService(client: Client[IO]) } } - case req @ POST -> Root / `api` / "wikidata" / "validate" => { + case req @ POST -> Root / `api` / `verb` / "validate" => { logger.debug(s"Wikidata validate request: $req") req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) From 55b5f7a02a5e21243507d9ac78a4d4842b796985 Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Tue, 31 Aug 2021 18:17:24 +0200 Subject: [PATCH 16/32] Further refactoring. --- docs/api-usage/usage_tutorial.md | 4 +- .../routes/IncomingRequestParameters.scala | 148 ----- .../rdfshape/server/api/routes/PartsMap.scala | 53 -- .../api/routes/api/service/APIService.scala | 2 +- .../routes/data/logic/DataConversion.scala | 34 +- .../api/routes/data/logic/DataExtract.scala | 2 +- .../api/routes/data/logic/DataInfo.scala | 118 ++-- .../api/routes/data/logic/DataParam.scala | 137 +++-- .../api/routes/data/service/DataService.scala | 284 ++++----- .../api/routes/endpoint/logic/Endpoint.scala | 39 +- .../api/routes/endpoint/logic/Outgoing.scala | 38 ++ .../routes/endpoint/logic/SparqlQuery.scala | 50 +- .../endpoint/service/EndpointService.scala | 121 ++-- .../routes/fetch/service/FetchService.scala | 37 +- .../routes/permalink/logic/Permalink.scala | 26 + .../permalink/service/PermalinkService.scala | 47 +- .../schema/logic/SchemaConversionResult.scala | 2 +- .../api/routes/schema/logic/SchemaInfo.scala | 8 +- .../schema/logic/SchemaOperations.scala | 153 ++++- .../api/routes/schema/logic/SchemaParam.scala | 289 ++++++++++ .../routes/schema/service/SchemaParam.scala | 321 ----------- .../routes/schema/service/SchemaService.scala | 542 +++++------------- .../schema/service/TriggerModeParam.scala | 25 +- .../api/routes/shapemap/logic/ShapeMap.scala | 192 +++++++ .../shapemap/service/ShapeMapParam.scala | 142 ----- .../shapemap/service/ShapeMapService.scala | 51 +- .../routes/wikibase/WikibaseSchemaParam.scala | 7 +- .../api/routes/wikibase/WikidataService.scala | 43 +- .../IncomingRequestParameters.scala | 327 +++++++++++ .../api/utils/parameters/PartsMap.scala | 68 +++ .../server/utils/json/JsonUtils.scala | 2 +- .../utils/networking/NetworkingUtils.scala | 33 ++ 32 files changed, 1779 insertions(+), 1566 deletions(-) delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/IncomingRequestParameters.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/PartsMap.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/logic/Permalink.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaParam.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaParam.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapParam.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/PartsMap.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/utils/networking/NetworkingUtils.scala diff --git a/docs/api-usage/usage_tutorial.md b/docs/api-usage/usage_tutorial.md index 5a9e2b47..bae9d3a8 100644 --- a/docs/api-usage/usage_tutorial.md +++ b/docs/api-usage/usage_tutorial.md @@ -11,13 +11,13 @@ title: API Tutorial Validate RDF data with ShEx. Example from the [Validating RDF book](https://book.validatingrdf.com/): -- [Example](https://rdfshape.weso.es/shExValidate?activeSchemaTab=%23schemaTextArea&activeTab=%23dataTextArea&data=PREFIX%20%3A%20%20%20%20%20%20%20%3Chttp%3A%2F%2Fexample.org%2F%3E%0APREFIX%20schema%3A%20%3Chttp%3A%2F%2Fschema.org%2F%3E%0APREFIX%20xsd%3A%20%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2F2001%2FXMLSchema%23%3E%0APREFIX%20foaf%3A%20%20%20%3Chttp%3A%2F%2Fxmlns.com%2Ffoaf%2F0.1%2F%3E%0A%0A%3Aalice%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Alice%22%20%3B%20%20%20%20%20%20%20%20%20%20%20%20%23%20%25%2A%20%5CPasses%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AFemale%20%3B%0A%20%20%20%20%20%20%20schema%3Aknows%20%20%20%20%20%20%20%20%20%20%3Abob%20.%0A%0A%3Abob%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AMale%20%3B%20%20%20%20%20%20%20%20%23%20%25%2A%20%5CPasses%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Robert%22%3B%20%20%20%20%20%20%20%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3AbirthDate%20%20%20%20%20%20%221980-03-10%22%5E%5Exsd%3Adate%20.%0A%0A%3Acarol%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Carol%22%20%3B%20%20%20%20%20%20%20%20%20%20%20%20%23%20%25%2A%20%5CPasses%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20%22unspecified%22%20%3B%20%20%0A%20%20%20%20%20%20%20foaf%3Aname%20%20%20%20%20%20%20%20%20%20%20%20%20%22Carol%22%20.%0A%0A%3Adave%20%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Dave%22%3B%20%20%20%20%20%20%20%20%20%23%20%25%2A%20%5CFails%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20%22XYY%22%3B%20%20%20%20%20%20%20%20%20%20%23%0A%20%20%20%20%20%20%20schema%3AbirthDate%20%20%20%20%20%201980%20.%20%20%20%20%20%20%20%20%20%20%23%20%25%2A%201980%20is%20not%20an%20xsd%3Adate%20%2A%29%0A%0A%3Aemily%20schema%3Aname%20%22Emily%22%2C%20%22Emilee%22%20%3B%20%20%20%20%20%20%20%23%20%25%2A%20%5CFails%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AFemale%20.%20%23%20%25%2A%20too%20many%20schema%3Anames%20%2A%29%0A%0A%3Afrank%20foaf%3Aname%20%20%20%20%20%20%20%20%20%20%20%20%20%22Frank%22%20%3B%20%20%20%20%20%20%20%23%20%25%2A%20%5CFails%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Agender%3A%20%20%20%20%20%20%20%20schema%3AMale%20.%20%20%20%23%20%25%2A%20missing%20schema%3Aname%20%2A%29%0A%0A%3Agrace%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Grace%22%20%3B%20%20%20%20%20%20%20%23%20%25%2A%20%5CFails%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AMale%20%3B%20%20%20%23%20%0A%20%20%20%20%20%20%20schema%3Aknows%20%20%20%20%20%20%20%20%20%20_%3Ax%20.%20%20%20%20%20%20%20%20%20%20%20%23%20%25%2A%20%5C_%3Ax%20is%20not%20an%20IRI%20%2A%29%0A%0A%3Aharold%20schema%3Aname%20%20%20%20%20%20%20%20%20%22Harold%22%20%3B%20%20%20%20%23%20%25%2A%20%5CFails%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20schema%3AMale%20%3B%20%0A%20%20%20%20%20%20%20%20schema%3Aknows%20%20%20%20%20%20%20%20%3Agrace%20.%20%20%20%20%20%20%23%20%25%2A%20%3Agrace%20does%20not%20conform%20to%20%3AUser%20%2A%29&dataFormat=TURTLE&dataFormatTextArea=TURTLE&endpoint=&inference=None&schema=PREFIX%20%3A%20%20%20%20%20%20%20%3Chttp%3A%2F%2Fexample.org%2F%3E%0APREFIX%20schema%3A%20%3Chttp%3A%2F%2Fschema.org%2F%3E%0APREFIX%20xsd%3A%20%20%3Chttp%3A%2F%2Fwww.w3.org%2F2001%2FXMLSchema%23%3E%0A%0A%3AUser%20%7B%0A%20%20schema%3Aname%20%20%20%20%20%20%20%20%20%20xsd%3Astring%20%20%3B%0A%20%20schema%3AbirthDate%20%20%20%20%20xsd%3Adate%3F%20%20%3B%0A%20%20schema%3Agender%20%20%20%20%20%20%20%20%5B%20schema%3AMale%20schema%3AFemale%20%5D%20OR%20xsd%3Astring%20%3B%0A%20%20schema%3Aknows%20%20%20%20%20%20%20%20%20IRI%20%40%3AUser%2A%0A%7D&schemaEmbedded=false&schemaEngine=ShEx&schemaFormat=ShExC&schemaFormatTextArea=ShExC&shapeMap=%3Aalice%40%3AUser%2C%3Abob%40%3AUser%2C%3Acarol%40%3AUser%2C%3Aemily%40%3AUser%2C%3Afrank%40%3AUser%2C%3Agrace%40%3AUser%2C%3Aharold%40%3AUser&shapeMapActiveTab=%23shapeMapTextArea&shapeMapFormat=Compact&shapeMapFormatTextArea=Compact&triggerMode=shapeMap) +- [Example](https://rdfshape.weso.es/shExValidate?activeSchemaTab=%23schemaTextArea&activeTab=%23dataTextArea&data=PREFIX%20%3A%20%20%20%20%20%20%20%3Chttp%3A%2F%2Fexample.org%2F%3E%0APREFIX%20schema%3A%20%3Chttp%3A%2F%2Fschema.org%2F%3E%0APREFIX%20xsd%3A%20%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2F2001%2FXMLSchema%23%3E%0APREFIX%20foaf%3A%20%20%20%3Chttp%3A%2F%2Fxmlns.com%2Ffoaf%2F0.1%2F%3E%0A%0A%3Aalice%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Alice%22%20%3B%20%20%20%20%20%20%20%20%20%20%20%20%23%20%25%2A%20%5CPasses%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AFemale%20%3B%0A%20%20%20%20%20%20%20schema%3Aknows%20%20%20%20%20%20%20%20%20%20%3Abob%20.%0A%0A%3Abob%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AMale%20%3B%20%20%20%20%20%20%20%20%23%20%25%2A%20%5CPasses%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Robert%22%3B%20%20%20%20%20%20%20%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3AbirthDate%20%20%20%20%20%20%221980-03-10%22%5E%5Exsd%3Adate%20.%0A%0A%3Acarol%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Carol%22%20%3B%20%20%20%20%20%20%20%20%20%20%20%20%23%20%25%2A%20%5CPasses%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20%22unspecified%22%20%3B%20%20%0A%20%20%20%20%20%20%20foaf%3Aname%20%20%20%20%20%20%20%20%20%20%20%20%20%22Carol%22%20.%0A%0A%3Adave%20%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Dave%22%3B%20%20%20%20%20%20%20%20%20%23%20%25%2A%20%5CFails%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20%22XYY%22%3B%20%20%20%20%20%20%20%20%20%20%23%0A%20%20%20%20%20%20%20schema%3AbirthDate%20%20%20%20%20%201980%20.%20%20%20%20%20%20%20%20%20%20%23%20%25%2A%201980%20is%20not%20an%20xsd%3Adate%20%2A%29%0A%0A%3Aemily%20schema%3Aname%20%22Emily%22%2C%20%22Emilee%22%20%3B%20%20%20%20%20%20%20%23%20%25%2A%20%5CFails%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AFemale%20.%20%23%20%25%2A%20too%20many%20schema%3Anames%20%2A%29%0A%0A%3Afrank%20foaf%3Aname%20%20%20%20%20%20%20%20%20%20%20%20%20%22Frank%22%20%3B%20%20%20%20%20%20%20%23%20%25%2A%20%5CFails%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Agender%3A%20%20%20%20%20%20%20%20schema%3AMale%20.%20%20%20%23%20%25%2A%20missing%20schema%3Aname%20%2A%29%0A%0A%3Agrace%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Grace%22%20%3B%20%20%20%20%20%20%20%23%20%25%2A%20%5CFails%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AMale%20%3B%20%20%20%23%20%0A%20%20%20%20%20%20%20schema%3Aknows%20%20%20%20%20%20%20%20%20%20_%3Ax%20.%20%20%20%20%20%20%20%20%20%20%20%23%20%25%2A%20%5C_%3Ax%20is%20not%20an%20IRI%20%2A%29%0A%0A%3Aharold%20schema%3Aname%20%20%20%20%20%20%20%20%20%22Harold%22%20%3B%20%20%20%20%23%20%25%2A%20%5CFails%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20schema%3AMale%20%3B%20%0A%20%20%20%20%20%20%20%20schema%3Aknows%20%20%20%20%20%20%20%20%3Agrace%20.%20%20%20%20%20%20%23%20%25%2A%20%3Agrace%20does%20not%20conform%20to%20%3AUser%20%2A%29&dataFormat=TURTLE&dataFormatTextArea=TURTLE&endpoint=&inference=None&schema=PREFIX%20%3A%20%20%20%20%20%20%20%3Chttp%3A%2F%2Fexample.org%2F%3E%0APREFIX%20schema%3A%20%3Chttp%3A%2F%2Fschema.org%2F%3E%0APREFIX%20xsd%3A%20%20%3Chttp%3A%2F%2Fwww.w3.org%2F2001%2FXMLSchema%23%3E%0A%0A%3AUser%20%7B%0A%20%20schema%3Aname%20%20%20%20%20%20%20%20%20%20xsd%3Astring%20%20%3B%0A%20%20schema%3AbirthDate%20%20%20%20%20xsd%3Adate%3F%20%20%3B%0A%20%20schema%3Agender%20%20%20%20%20%20%20%20%5B%20schema%3AMale%20schema%3AFemale%20%5D%20OR%20xsd%3Astring%20%3B%0A%20%20schema%3Aknows%20%20%20%20%20%20%20%20%20IRI%20%40%3AUser%2A%0A%7D&schemaEngine=ShEx&schemaFormat=ShExC&schemaFormatTextArea=ShExC&shapeMap=%3Aalice%40%3AUser%2C%3Abob%40%3AUser%2C%3Acarol%40%3AUser%2C%3Aemily%40%3AUser%2C%3Afrank%40%3AUser%2C%3Agrace%40%3AUser%2C%3Aharold%40%3AUser&shapeMapActiveTab=%23shapeMapTextArea&shapeMapFormat=Compact&shapeMapFormatTextArea=Compact&triggerMode=shapeMap) ## SHACL Validate RDF data with SHACL. Example from the Validating RDF book: -- [Example](https://rdfshape.weso.es/shaclValidate?activeSchemaTab=%23schemaTextArea&activeTab=%23dataTextArea&data=%40prefix%20%3A%20%20%20%20%20%20%20%3Chttp%3A%2F%2Fexample.org%2F%3E%20.%0A%40prefix%20sh%3A%20%20%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2Fns%2Fshacl%23%3E%20.%0A%40prefix%20xsd%3A%20%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2F2001%2FXMLSchema%23%3E%20.%0A%40prefix%20schema%3A%20%3Chttp%3A%2F%2Fschema.org%2F%3E%20.%0A%40prefix%20foaf%3A%20%20%20%3Chttp%3A%2F%2Fxmlns.com%2Ffoaf%2F0.1%2F%3E%20.%0A%40prefix%20rdfs%3A%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2F2000%2F01%2Frdf-schema%23%3E%20.%0A%20%20%20%20%20%20%20%20%0A%0A%3Aalice%20a%20%3AUser%3B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%25%2A%5CPasses%7B%3AUserShape%7D%20%2A%29%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Alice%22%20%3B%20%20%20%20%20%20%20%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AFemale%20%3B%0A%20%20%20%20%20%20%20schema%3Aknows%20%20%20%20%20%20%20%20%20%20%3Abob%20.%0A%0A%3Abob%20%20%20a%20%3AUser%3B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%25%2A%5CPasses%7B%3AUserShape%7D%20%2A%29%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AMale%20%3B%20%20%20%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Robert%22%3B%0A%20%20%20%20%20%20%20schema%3AbirthDate%20%20%20%20%20%20%221980-03-10%22%5E%5Exsd%3Adate%20.%0A%0A%3Acarol%20a%20%3AUser%3B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%25%2A%5CPasses%7B%3AUserShape%7D%20%2A%29%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Carol%22%20%3B%20%20%20%20%20%20%20%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AFemale%20%3B%20%20%0A%20%20%20%20%20%20%20foaf%3Aname%20%20%20%20%20%20%20%20%20%20%20%20%20%22Carol%22%20.&dataFormat=TURTLE&dataFormatTextArea=TURTLE&endpoint=&inference=None&schema=%40prefix%20%3A%20%20%20%20%20%20%20%3Chttp%3A%2F%2Fexample.org%2F%3E%20.%0A%40prefix%20sh%3A%20%20%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2Fns%2Fshacl%23%3E%20.%0A%40prefix%20xsd%3A%20%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2F2001%2FXMLSchema%23%3E%20.%0A%40prefix%20schema%3A%20%3Chttp%3A%2F%2Fschema.org%2F%3E%20.%0A%40prefix%20foaf%3A%20%20%20%3Chttp%3A%2F%2Fxmlns.com%2Ffoaf%2F0.1%2F%3E%20.%0A%40prefix%20rdfs%3A%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2F2000%2F01%2Frdf-schema%23%3E%20.%0A%20%20%20%20%20%20%20%20%0A%3AUserShape%20a%20sh%3ANodeShape%3B%0A%20%20%20sh%3AtargetClass%20%3AUser%20%3B%0A%20%20%20sh%3Aproperty%20%5B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%20Blank%20node%201%0A%20%20%20%20sh%3Apath%20%20%20%20%20schema%3Aname%20%3B%20%0A%20%20%20%20sh%3AminCount%201%3B%20%0A%20%20%20%20sh%3AmaxCount%201%3B%0A%20%20%20%20sh%3Adatatype%20xsd%3Astring%20%3B%0A%20%20%5D%20%3B%0A%20%20sh%3Aproperty%20%5B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%20Blank%20node%202%0A%20%20%20sh%3Apath%20schema%3Agender%20%3B%0A%20%20%20sh%3AminCount%201%3B%0A%20%20%20sh%3AmaxCount%201%3B%0A%20%20%20sh%3Aor%20%28%0A%20%20%20%20%5B%20sh%3Ain%20%28schema%3AMale%20schema%3AFemale%29%20%5D%0A%20%20%20%20%5B%20sh%3Adatatype%20xsd%3Astring%5D%0A%20%20%20%29%0A%20%20%5D%20%3B%0A%20%20sh%3Aproperty%20%5B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%20Blank%20node%203%20%20%0A%20%20%20sh%3Apath%20%20%20%20%20schema%3AbirthDate%20%3B%20%0A%20%20%20sh%3AmaxCount%201%3B%20%0A%20%20%20sh%3Adatatype%20xsd%3Adate%20%3B%0A%20%20%5D%20%3B%0A%20%20sh%3Aproperty%20%5B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%20Blank%20node%204%20%0A%20%20%20sh%3Apath%20%20%20%20%20schema%3Aknows%20%3B%20%0A%20%20%20sh%3AnodeKind%20sh%3AIRI%20%3B%0A%20%20%20sh%3Aclass%20%20%20%20%3AUser%20%3B%0A%20%20%5D%20.&schemaEmbedded=false&schemaEngine=JenaSHACL&schemaFormat=TURTLE&schemaFormatTextArea=TURTLE&schemaInference=none&triggerMode=targetDecls) +- [Example](https://rdfshape.weso.es/shaclValidate?activeSchemaTab=%23schemaTextArea&activeTab=%23dataTextArea&data=%40prefix%20%3A%20%20%20%20%20%20%20%3Chttp%3A%2F%2Fexample.org%2F%3E%20.%0A%40prefix%20sh%3A%20%20%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2Fns%2Fshacl%23%3E%20.%0A%40prefix%20xsd%3A%20%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2F2001%2FXMLSchema%23%3E%20.%0A%40prefix%20schema%3A%20%3Chttp%3A%2F%2Fschema.org%2F%3E%20.%0A%40prefix%20foaf%3A%20%20%20%3Chttp%3A%2F%2Fxmlns.com%2Ffoaf%2F0.1%2F%3E%20.%0A%40prefix%20rdfs%3A%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2F2000%2F01%2Frdf-schema%23%3E%20.%0A%20%20%20%20%20%20%20%20%0A%0A%3Aalice%20a%20%3AUser%3B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%25%2A%5CPasses%7B%3AUserShape%7D%20%2A%29%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Alice%22%20%3B%20%20%20%20%20%20%20%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AFemale%20%3B%0A%20%20%20%20%20%20%20schema%3Aknows%20%20%20%20%20%20%20%20%20%20%3Abob%20.%0A%0A%3Abob%20%20%20a%20%3AUser%3B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%25%2A%5CPasses%7B%3AUserShape%7D%20%2A%29%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AMale%20%3B%20%20%20%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Robert%22%3B%0A%20%20%20%20%20%20%20schema%3AbirthDate%20%20%20%20%20%20%221980-03-10%22%5E%5Exsd%3Adate%20.%0A%0A%3Acarol%20a%20%3AUser%3B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%25%2A%5CPasses%7B%3AUserShape%7D%20%2A%29%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Carol%22%20%3B%20%20%20%20%20%20%20%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AFemale%20%3B%20%20%0A%20%20%20%20%20%20%20foaf%3Aname%20%20%20%20%20%20%20%20%20%20%20%20%20%22Carol%22%20.&dataFormat=TURTLE&dataFormatTextArea=TURTLE&endpoint=&inference=None&schema=%40prefix%20%3A%20%20%20%20%20%20%20%3Chttp%3A%2F%2Fexample.org%2F%3E%20.%0A%40prefix%20sh%3A%20%20%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2Fns%2Fshacl%23%3E%20.%0A%40prefix%20xsd%3A%20%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2F2001%2FXMLSchema%23%3E%20.%0A%40prefix%20schema%3A%20%3Chttp%3A%2F%2Fschema.org%2F%3E%20.%0A%40prefix%20foaf%3A%20%20%20%3Chttp%3A%2F%2Fxmlns.com%2Ffoaf%2F0.1%2F%3E%20.%0A%40prefix%20rdfs%3A%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2F2000%2F01%2Frdf-schema%23%3E%20.%0A%20%20%20%20%20%20%20%20%0A%3AUserShape%20a%20sh%3ANodeShape%3B%0A%20%20%20sh%3AtargetClass%20%3AUser%20%3B%0A%20%20%20sh%3Aproperty%20%5B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%20Blank%20node%201%0A%20%20%20%20sh%3Apath%20%20%20%20%20schema%3Aname%20%3B%20%0A%20%20%20%20sh%3AminCount%201%3B%20%0A%20%20%20%20sh%3AmaxCount%201%3B%0A%20%20%20%20sh%3Adatatype%20xsd%3Astring%20%3B%0A%20%20%5D%20%3B%0A%20%20sh%3Aproperty%20%5B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%20Blank%20node%202%0A%20%20%20sh%3Apath%20schema%3Agender%20%3B%0A%20%20%20sh%3AminCount%201%3B%0A%20%20%20sh%3AmaxCount%201%3B%0A%20%20%20sh%3Aor%20%28%0A%20%20%20%20%5B%20sh%3Ain%20%28schema%3AMale%20schema%3AFemale%29%20%5D%0A%20%20%20%20%5B%20sh%3Adatatype%20xsd%3Astring%5D%0A%20%20%20%29%0A%20%20%5D%20%3B%0A%20%20sh%3Aproperty%20%5B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%20Blank%20node%203%20%20%0A%20%20%20sh%3Apath%20%20%20%20%20schema%3AbirthDate%20%3B%20%0A%20%20%20sh%3AmaxCount%201%3B%20%0A%20%20%20sh%3Adatatype%20xsd%3Adate%20%3B%0A%20%20%5D%20%3B%0A%20%20sh%3Aproperty%20%5B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%20Blank%20node%204%20%0A%20%20%20sh%3Apath%20%20%20%20%20schema%3Aknows%20%3B%20%0A%20%20%20sh%3AnodeKind%20sh%3AIRI%20%3B%0A%20%20%20sh%3Aclass%20%20%20%20%3AUser%20%3B%0A%20%20%5D%20.&schemaEngine=JenaSHACL&schemaFormat=TURTLE&schemaFormatTextArea=TURTLE&schemaInference=none&triggerMode=targetDecls) ## Data + Schema diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/IncomingRequestParameters.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/IncomingRequestParameters.scala deleted file mode 100644 index bfc99e89..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/IncomingRequestParameters.scala +++ /dev/null @@ -1,148 +0,0 @@ -package es.weso.rdfshape.server.api.routes - -import org.http4s.dsl.io.{ - OptionalQueryParamDecoderMatcher, - QueryParamDecoderMatcher -} - -/** Definitions for all the possible parameters that may come from client requests - */ -object IncomingRequestParameters { - lazy val data = "data" - lazy val compoundData = "compoundData" - lazy val dataURL = "dataURL" - lazy val endpoint = "endpoint" - lazy val endpoints = "endpoints" - lazy val dataFormat = "dataFormat" - lazy val targetDataFormat = "targetDataFormat" - lazy val schema = "schema" - lazy val entity = "entity" - lazy val node = "node" - lazy val withDot = "withDot" - lazy val schemaURL = "schemaURL" - lazy val schemaFormat = "schemaFormat" - lazy val shape = "shape" - lazy val url = "url" - lazy val urlCode = "urlCode" - lazy val hostname = "hostname" - - object DataParameter extends OptionalQueryParamDecoderMatcher[String](data) - - object OptDataParam extends OptionalQueryParamDecoderMatcher[String](data) - - object OptEndpointParam - extends OptionalQueryParamDecoderMatcher[String](endpoint) - - object OptDataURLParam - extends OptionalQueryParamDecoderMatcher[String](dataURL) - - object DataFormatParam - extends OptionalQueryParamDecoderMatcher[String](dataFormat) - - object CompoundDataParam - extends OptionalQueryParamDecoderMatcher[String](compoundData) - - object TargetDataFormatParam - extends OptionalQueryParamDecoderMatcher[String](targetDataFormat) - - object OptSchemaParam extends OptionalQueryParamDecoderMatcher[String](schema) - - object OptEntityParam extends OptionalQueryParamDecoderMatcher[String](entity) - - object OptNodeParam extends OptionalQueryParamDecoderMatcher[String](node) - - object OptWithDotParam - extends OptionalQueryParamDecoderMatcher[Boolean](withDot) - - object SchemaURLParam - extends OptionalQueryParamDecoderMatcher[String](schemaURL) - - object SchemaFormatParam - extends OptionalQueryParamDecoderMatcher[String](schemaFormat) - - object OptNodeSelectorParam - extends OptionalQueryParamDecoderMatcher[String]("nodeSelector") - - object SchemaEngineParam - extends OptionalQueryParamDecoderMatcher[String]("schemaEngine") - - object OptView extends OptionalQueryParamDecoderMatcher[String]("view") - - object TargetSchemaFormatParam - extends OptionalQueryParamDecoderMatcher[String]("targetSchemaFormat") - - object TargetSchemaEngineParam - extends OptionalQueryParamDecoderMatcher[String]("targetSchemaEngine") - - object OptTriggerModeParam - extends OptionalQueryParamDecoderMatcher[String]("triggerMode") - - object NodeParam extends OptionalQueryParamDecoderMatcher[String](node) - - object ShapeParam extends OptionalQueryParamDecoderMatcher[String](shape) - - // object NameParam extends OptionalQueryParamDecoderMatcher[String]("name") - object ShapeMapParameter - extends OptionalQueryParamDecoderMatcher[String]("shapeMap") - - object ShapeMapParameterAlt - extends OptionalQueryParamDecoderMatcher[String]("shape-map") - - object ShapeMapURLParameter - extends OptionalQueryParamDecoderMatcher[String]("shapeMapURL") - - object ShapeMapFileParameter - extends OptionalQueryParamDecoderMatcher[String]("shapeMapFile") - - object ShapeMapFormatParam - extends OptionalQueryParamDecoderMatcher[String]("shapeMapFormat") - - object SchemaEmbedded - extends OptionalQueryParamDecoderMatcher[Boolean]("schemaEmbedded") - - object InferenceParam - extends OptionalQueryParamDecoderMatcher[String]("inference") - - object ExamplesParam - extends OptionalQueryParamDecoderMatcher[String]("examples") - - object ManifestURLParam - extends OptionalQueryParamDecoderMatcher[String]("manifestURL") - - object OptExamplesParam - extends OptionalQueryParamDecoderMatcher[String]("examples") - - object OptQueryParam extends OptionalQueryParamDecoderMatcher[String]("query") - - object OptActiveDataTabParam - extends OptionalQueryParamDecoderMatcher[String]("activeDataTab") - - object OptActiveSchemaTabParam - extends OptionalQueryParamDecoderMatcher[String]("activeSchemaTab") - - object OptActiveShapeMapTabParam - extends OptionalQueryParamDecoderMatcher[String]("activeShapeMapTab") - - object OptActiveQueryTabParam - extends OptionalQueryParamDecoderMatcher[String]("activeQueryTab") - - object WdEntityParam extends QueryParamDecoderMatcher[String]("wdEntity") - - object WdSchemaParam extends QueryParamDecoderMatcher[String]("wdSchema") - - object LanguageParam extends QueryParamDecoderMatcher[String]("language") - - object LabelParam extends QueryParamDecoderMatcher[String]("label") - - object UrlParam extends QueryParamDecoderMatcher[String](url) - - object UrlCodeParam extends QueryParamDecoderMatcher[String](urlCode) - - object HostNameParam extends QueryParamDecoderMatcher[String](hostname) - - object LimitParam extends OptionalQueryParamDecoderMatcher[String]("limit") - - object ContinueParam - extends OptionalQueryParamDecoderMatcher[String]("continue") - -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/PartsMap.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/PartsMap.scala deleted file mode 100644 index 76441751..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/PartsMap.scala +++ /dev/null @@ -1,53 +0,0 @@ -package es.weso.rdfshape.server.api.routes - -import cats.effect.IO -import cats.implicits._ -import fs2.text.utf8Decode -import org.http4s.multipart.Part - -case class PartsMap private (map: Map[String, Part[IO]]) { - - def optPartValueBoolean(key: String): IO[Option[Boolean]] = - map.get(key) match { - case Some(part) => - part.body.through(utf8Decode).compile.foldMonoid.map { - case "true" => Some(true) - case "false" => Some(false) - case _ => None - } - case None => IO.pure(None) - } - - def partValue(key: String): IO[String] = for { - eitherValue <- eitherPartValue(key) - value <- eitherValue.fold( - s => IO.raiseError(new RuntimeException(s)), - IO.pure - ) - } yield value - - def eitherPartValue(key: String): IO[Either[String, String]] = for { - maybeValue <- optPartValue(key) - } yield maybeValue match { - case None => - Left( - s"Not found value for key $key\nKeys available: ${map.keySet.mkString(",")}" - ) - case Some(s) => Right(s) - } - - def optPartValue(key: String): IO[Option[String]] = - map.get(key) match { - case Some(part) => - part.body.through(utf8Decode).compile.foldMonoid.map(Some.apply) - case None => IO.pure(None) - } -} - -object PartsMap { - - def apply(ps: Vector[Part[IO]]): PartsMap = { - PartsMap(ps.filter(_.name.isDefined).map(p => (p.name.get, p)).toMap) - } - -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/api/service/APIService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/api/service/APIService.scala index 2ca08c5d..6b97bffe 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/api/service/APIService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/api/service/APIService.scala @@ -20,7 +20,7 @@ class APIService(client: Client[IO]) extends Http4sDsl[IO] with ApiService { val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { case GET -> Root / `api` / "health" => - Ok("OK") + Ok("Healthy") } } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConversion.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConversion.scala index 918caea9..7d4b5652 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConversion.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConversion.scala @@ -7,7 +7,6 @@ import es.weso.rdf.jena.RDFAsJenaModel import es.weso.rdf.sgraph.{RDF2SGraph, RDFDotPreferences} import es.weso.rdfshape.server.api.format.DataFormat import es.weso.rdfshape.server.api.merged.CompoundData -import es.weso.rdfshape.server.api.routes.data.logic import es.weso.rdfshape.server.utils.json.JsonUtils.maybeField import es.weso.utils.IOUtils.{either2io, err} import guru.nidi.graphviz.engine.{Format, Graphviz} @@ -43,7 +42,7 @@ final case class DataConversion( */ def toJson: Json = Json.fromFields( List( - ("msg", Json.fromString(msg)), + ("message", Json.fromString(msg)), ("result", Json.fromString(result)), ("dataFormat", Json.fromString(dataFormat.name)), ("targetDataFormat", Json.fromString(targetFormat)) @@ -65,13 +64,14 @@ private[api] object DataConversion extends LazyLogging { GraphFormat("PNG", "application/png", Format.PNG), GraphFormat("PS", "application/ps", Format.PS) ) + val successMessage = "Conversion successful!" private[api] def dataConvert( maybeData: Option[String], dataFormat: DataFormat, maybeCompoundData: Option[String], targetFormat: String - ): IO[DataConversion] = { + ): IO[Either[String, DataConversion]] = { logger.debug( s"Converting $maybeData with format $dataFormat to $targetFormat. OptTargetFormat: $targetFormat" ) @@ -86,17 +86,31 @@ private[api] object DataConversion extends LazyLogging { ecd <- either2io(CompoundData.fromString(compoundDataStr)) cd <- cnvEither(ecd, str => s"dataConvert: Error: $str") result <- cd.toRDF.flatMap( - _.use(rdf => rdfConvert(rdf, None, dataFormat, targetFormat)) + _.use(rdf => + rdfConvert(rdf, None, dataFormat, targetFormat).attempt.map( + _.fold(exc => Left(exc.getMessage), dc => Right(dc)) + ) + ) ) + } yield result } case Some(data) => - RDFAsJenaModel - .fromChars(data, dataFormat.name, None) - .flatMap( - _.use(rdf => rdfConvert(rdf, Some(data), dataFormat, targetFormat)) - ) + val maybeConversion = + RDFAsJenaModel + .fromChars(data, dataFormat.name, None) + .flatMap( + _.use(rdf => + rdfConvert(rdf, Some(data), dataFormat, targetFormat) + ) + ) + + maybeConversion.attempt.map( + _.fold(exc => Left(exc.getMessage), dc => Right(dc)) + ) + } + } private def cnvEither[A](e: Either[String, A], cnv: String => String): IO[A] = @@ -142,7 +156,7 @@ private[api] object DataConversion extends LazyLogging { for { converted <- doConversion - } yield logic.DataConversion( + } yield DataConversion( "Conversion successful!", data, dataFormat, diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataExtract.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataExtract.scala index 97bbd691..36811308 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataExtract.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataExtract.scala @@ -50,7 +50,7 @@ final case class DataExtract private ( schemaStr <- schema.serialize(schemaFormat) } yield Json.fromFields( List( - ("msg", Json.fromString(msg)), + ("message", Json.fromString(msg)), ("inferredShape", Json.fromString(schemaStr)), ("schemaFormat", Json.fromString(schemaFormat)), ("schemaEngine", Json.fromString(engine)) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataInfo.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataInfo.scala index c1c510fb..f7d47a9d 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataInfo.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataInfo.scala @@ -1,5 +1,6 @@ package es.weso.rdfshape.server.api.routes.data.logic +import cats.data.EitherT import cats.effect.IO import es.weso.rdf.jena.RDFAsJenaModel import es.weso.rdf.nodes.IRI @@ -7,24 +8,24 @@ import es.weso.rdf.{PrefixMap, RDFReasoner} import es.weso.rdfshape.server.api.format._ import es.weso.rdfshape.server.api.routes.data.logic.DataOperations.prefixMap2Json import es.weso.rdfshape.server.utils.json.JsonUtils._ -import es.weso.utils.IOUtils.{ESIO, either2es, io2es} +import es.weso.utils.IOUtils.{either2es, io2es} import io.circe.Json /** Data class representing the output of an "information" operation * - * @param msg Output informational message after processing. Used in case of error. - * @param data RDF input data - * @param dataFormat RDF input data format - * @param predicates List of predicates of the RDF input - * @param numberStatements Number of statements in the RDF input - * @param prefixMap Prefix map of the RDF input + * @param message Output informational message after processing. Used in case of error. + * @param data RDF input data + * @param dataFormat RDF input data format + * @param predicates List of predicates of the RDF input + * @param numberOfStatements Number of statements in the RDF input + * @param prefixMap Prefix map of the RDF input */ final case class DataInfo private ( - msg: String, + message: String, data: Option[String], dataFormat: Option[DataFormat], predicates: Option[Set[IRI]], - numberStatements: Option[Int], + numberOfStatements: Option[Int], prefixMap: Option[PrefixMap] ) { @@ -38,14 +39,14 @@ final case class DataInfo private ( */ def toJson: Json = { Json.fromFields( - List(("msg", Json.fromString(msg))) ++ + List(("message", Json.fromString(message))) ++ maybeField(data, "data", Json.fromString) ++ maybeField( dataFormat, "dataFormat", (df: DataFormat) => Json.fromString(df.name) ) ++ - maybeField(numberStatements, "numberStatements", Json.fromInt) ++ + maybeField(numberOfStatements, "numberOfStatements", Json.fromInt) ++ maybeField(prefixMap, "prefixMap", prefixMap2Json) ++ maybeField( predicates, @@ -72,23 +73,30 @@ object DataInfo { */ val successMessage = "Well formed RDF" - /** @return A DataInfoResult, given all the parameters needed to build it (input, predicates, etc.) + /** For a given RDF input (plain text), return information about it + * + * @param data Input data string + * @param dataFormatStr Input data format + * @return Information about the input RDF: statements, well-formed, etc. */ - def fromData( - data: Option[String], - dataFormat: Option[DataFormat], - predicates: Set[IRI], - numberStatements: Int, - prefixMap: PrefixMap - ): DataInfo = - DataInfo( - successMessage, - data, - dataFormat, - Some(predicates), - Some(numberStatements), - Some(prefixMap) - ) + def dataInfoFromString( + data: String, + dataFormatStr: String + ): IO[Either[String, DataInfo]] = { + val either: EitherT[IO, String, DataInfo] = for { + dataFormat <- either2es(DataFormat.fromString(dataFormatStr)) + json <- io2es( + RDFAsJenaModel + .fromChars(data, dataFormat.name) + .flatMap( + _.use(rdf => dataInfoFromRdf(rdf, Some(data), Some(dataFormat))) + ) + ) + ret <- EitherT.fromEither[IO](json) + } yield ret + + either.fold(e => Left(e), d => Right(d)) + } /** For a given RDF input, return information about it * @@ -101,50 +109,38 @@ object DataInfo { rdf: RDFReasoner, data: Option[String], dataFormat: Option[DataFormat] - ): IO[DataInfo] = { + ): IO[Either[String, DataInfo]] = { val either: IO[Either[Throwable, DataInfo]] = (for { - numberStatements <- rdf.getNumberOfStatements() - predicates <- rdf.predicates().compile.toList - pm <- rdf.getPrefixMap + numberOfStatements <- rdf.getNumberOfStatements() + predicates <- rdf.predicates().compile.toList + pm <- rdf.getPrefixMap } yield DataInfo.fromData( data, dataFormat, predicates.toSet, - numberStatements, + numberOfStatements, pm )).attempt either.map( - _.fold(e => DataInfo.fromMsg(e.getMessage), r => r) + _.fold(e => Left(e.getMessage), r => Right(r)) ) } - /** For a given RDF input (plain text), return information about it - * - * @param data Input data string - * @param dataFormatStr Input data format - * @return Information about the input RDF: statements, well-formed, etc. - */ - def dataInfoFromString( - data: String, - dataFormatStr: String - ): IO[DataInfo] = { - val either: ESIO[DataInfo] = for { - dataFormat <- either2es(DataFormat.fromString(dataFormatStr)) - json <- io2es( - RDFAsJenaModel - .fromChars(data, dataFormat.name) - .flatMap( - _.use(rdf => dataInfoFromRdf(rdf, Some(data), Some(dataFormat))) - ) - ) - } yield json - - either.fold(e => DataInfo.fromMsg(e), identity) - } - - /** @param msg Error message contained in the result - * @return A DataInfoResult consisting of a single error message and no data + /** @return A DataInfoResult, given all the parameters needed to build it (input, predicates, etc.) */ - def fromMsg(msg: String): DataInfo = - DataInfo(msg, None, None, None, None, None) + def fromData( + data: Option[String], + dataFormat: Option[DataFormat], + predicates: Set[IRI], + numberOfStatements: Int, + prefixMap: PrefixMap + ): DataInfo = + DataInfo( + successMessage, + data, + dataFormat, + Some(predicates), + Some(numberOfStatements), + Some(prefixMap) + ) } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataParam.scala index 580f1b23..93f4236e 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataParam.scala @@ -6,11 +6,12 @@ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.jena._ import es.weso.rdf.nodes.IRI import es.weso.rdf.{InferenceEngine, RDFReasoner} -import es.weso.rdfshape.server.api.definitions.ApiDefaults.defaultActiveDataTab import es.weso.rdfshape.server.api.format._ import es.weso.rdfshape.server.api.merged.CompoundData -import es.weso.rdfshape.server.api.routes.PartsMap +import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ +import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.html2rdf.HTML2RDF +import es.weso.rdfshape.server.utils.networking.NetworkingUtils.getUrlContents import es.weso.utils.IOUtils.err import java.net.URI @@ -20,27 +21,15 @@ sealed case class DataParam( dataURL: Option[String], dataFile: Option[String], maybeEndpoint: Option[String], - dataFormatValue: Option[DataFormat], - dataFormatTextarea: Option[DataFormat], - dataFormatUrl: Option[DataFormat], - dataFormatFile: Option[DataFormat], + optDataFormat: Option[DataFormat], inference: Option[String], targetDataFormat: Option[DataFormat], activeDataTab: Option[String], compoundData: Option[String] ) extends LazyLogging { - - val dataFormat: Option[DataFormat] = { - val dataTab = parseDataTab(activeDataTab.getOrElse(defaultActiveDataTab)) - logger.debug(s"Data tab received: $dataTab") - dataTab match { - case Right(`dataUrlType`) => dataFormatUrl orElse dataFormatValue - case Right(`dataFileType`) => dataFormatFile orElse dataFormatValue - case Right(`dataTextAreaType`) => - dataFormatTextarea orElse dataFormatValue - case _ => dataFormatValue - } - } + val dataFormat: DataFormat = optDataFormat.getOrElse( + DataFormat.defaultFormat + ) /** get RDF data from data parameters * @@ -81,9 +70,12 @@ sealed case class DataParam( dataURL match { case None => err(s"Non value for dataURL") case Some(dataUrl) => - val dataFormat = dataFormatUrl.getOrElse(DataFormat.defaultFormat) for { - rdf <- rdfFromUri(new URI(dataUrl), dataFormat, base) + rdf <- rdfFromUri( + new URI(dataUrl), + dataFormat, + base + ) } yield (None, rdf) } case Right(`dataFileType`) => @@ -91,9 +83,6 @@ sealed case class DataParam( dataFile match { case None => err(s"No value for dataFile") case Some(dataStr) => - val dataFormat: Format = - dataFormatFile.getOrElse(DataFormat.defaultFormat) - for { iriBase <- mkBase(base) res <- RDFAsJenaModel.fromString( @@ -120,9 +109,6 @@ sealed case class DataParam( data match { case None => RDFAsJenaModel.empty.flatMap(e => IO((None, e))) case d @ Some(data) => - val dataFormat = dataFormatTextarea.getOrElse( - dataFormatValue.getOrElse(DataFormat.defaultFormat) - ) val x: IO[(Option[String], Resource[IO, RDFReasoner])] = for { res <- rdfFromString(data, dataFormat, base) res2 = extendWithInference( @@ -199,20 +185,28 @@ sealed case class DataParam( format: Format, base: Option[String] ): IO[Resource[IO, RDFReasoner]] = { - format.name.toLowerCase match { - case formatName if HTML2RDF.availableExtractorNames contains formatName => - IO( - HTML2RDF.extractFromUrl( - uri.toString, - formatName - ) - ) + + getUrlContents(uri.toString) match { + case Left(errMsg) => IO.raiseError(new RuntimeException(errMsg)) case _ => - for { - baseIri <- mkBase(base) - res <- RDFAsJenaModel.fromURI(uri.toString, format.name, baseIri) - } yield res + format.name.toLowerCase match { + case formatName + if HTML2RDF.availableExtractorNames contains formatName => + IO( + HTML2RDF.extractFromUrl( + uri.toString, + formatName + ) + ) + case _ => + for { + baseIri <- mkBase(base) + res <- RDFAsJenaModel.fromURI(uri.toString, format.name, baseIri) + } yield res + } + } + } private def extendWithInference( @@ -294,26 +288,20 @@ object DataParam extends LazyLogging { } private[api] def mkDataParam(partsMap: PartsMap): IO[DataParam] = for { - data <- partsMap.optPartValue("data") - compoundData <- partsMap.optPartValue("compoundData") - dataURL <- partsMap.optPartValue("dataURL") - dataFile <- partsMap.optPartValue("dataFile") - endpoint <- partsMap.optPartValue("endpoint") - dataFormatTextArea <- getDataFormat("dataFormatTextArea", partsMap) - dataFormatUrl <- getDataFormat("dataFormatUrl", partsMap) - dataFormatFile <- getDataFormat("dataFormatFile", partsMap) - dataFormatValue <- getDataFormat("dataFormat", partsMap) - inference <- partsMap.optPartValue("inference") - targetDataFormat <- getDataFormat("targetDataFormat", partsMap) - activeDataTab <- partsMap.optPartValue("activeTab") + data <- partsMap.optPartValue(DataParameter.name) + compoundData <- partsMap.optPartValue(CompoundDataParameter.name) + dataURL <- partsMap.optPartValue(DataURLParameter.name) + dataFile <- partsMap.optPartValue(DataFileParameter.name) + endpoint <- partsMap.optPartValue(EndpointParameter.name) + dataFormat <- getDataFormat(DataFormatParameter.name, partsMap) + inference <- partsMap.optPartValue(InferenceParameter.name) + targetDataFormat <- getDataFormat(TargetDataFormatParameter.name, partsMap) + activeDataTab <- partsMap.optPartValue(ActiveDataTabParameter.name) } yield { logger.debug(s"data: $data") logger.debug(s"compoundData: $compoundData") - logger.debug(s"dataFormatValue: $dataFormatValue") - logger.debug(s"dataFormatTextArea: $dataFormatTextArea") - logger.debug(s"dataFormatUrl: $dataFormatUrl") - logger.debug(s"dataFormatFile: $dataFormatFile") + logger.debug(s"dataFormat: $dataFormat") logger.debug(s"dataURL: $dataURL") logger.debug(s"endpoint: $endpoint") logger.debug(s"activeDataTab: $activeDataTab") @@ -337,10 +325,7 @@ object DataParam extends LazyLogging { dataURL, dataFile, finalEndpoint, - dataFormatValue, - dataFormatTextArea, - dataFormatUrl, - dataFormatFile, + dataFormat, inference, targetDataFormat, finalActiveDataTab, @@ -370,17 +355,29 @@ object DataParam extends LazyLogging { private[api] def empty: DataParam = DataParam( - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None + data = None, + dataURL = None, + dataFile = None, + maybeEndpoint = None, + optDataFormat = None, + inference = None, + targetDataFormat = None, + activeDataTab = None, + compoundData = None ) } + +/** Enumeration of the different possible Schema tabs sent by the client. + * The tab sent indicates the API if the schema was sent in raw text, as a URL + * to be fetched or as a text file containing the schema. + * In case the client submits the schema in several formats, the selected tab will indicate the preferred one. + */ +private[logic] object DataTab extends Enumeration { + type DataTab = String + + val TEXT = "#dataTextArea" + val URL = "#dataUrl" + val FILE = "#dataFile" + + val defaultActiveShapeMapTab: DataTab = TEXT +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala index 20515be0..04fd2e41 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala @@ -1,7 +1,6 @@ package es.weso.rdfshape.server.api.routes.data.service import cats.effect._ -import cats.implicits._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdfshape.server.api.definitions.ApiDefaults import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ @@ -11,22 +10,18 @@ import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ } import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.format._ -import es.weso.rdfshape.server.api.routes.IncomingRequestParameters._ +import es.weso.rdfshape.server.api.routes.ApiService import es.weso.rdfshape.server.api.routes.data.logic.DataExtract.dataExtract import es.weso.rdfshape.server.api.routes.data.logic.DataInfo.{ dataInfoFromRdf, dataInfoFromString } import es.weso.rdfshape.server.api.routes.data.logic.DataOperations.dataFormatOrDefault -import es.weso.rdfshape.server.api.routes.data.logic.{ - DataConversion, - DataExtract, - DataParam -} +import es.weso.rdfshape.server.api.routes.data.logic.{DataConversion, DataParam} import es.weso.rdfshape.server.api.routes.endpoint.logic.SparqlQuery -import es.weso.rdfshape.server.api.routes.{ApiService, PartsMap} import es.weso.rdfshape.server.api.utils.OptEitherF._ -import es.weso.rdfshape.server.utils.json.JsonUtils.responseJson +import es.weso.rdfshape.server.api.utils.parameters.PartsMap +import es.weso.rdfshape.server.utils.json.JsonUtils.errorResponseJson import es.weso.schema._ import es.weso.utils.IOUtils._ import io.circe._ @@ -34,7 +29,6 @@ import org.http4s._ import org.http4s.circe._ import org.http4s.client.Client import org.http4s.dsl.Http4sDsl -import org.http4s.headers._ import org.http4s.multipart.Multipart /** API Service to handle RDF data @@ -52,31 +46,41 @@ class DataService(client: Client[IO]) */ val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { - // Input RDF data formats include html-microdata, turtle, json-ld... + /** Returns a JSON array with the accepted input RDF data formats + */ case GET -> Root / `api` / `verb` / "formats" / "input" => val formats = DataFormat.availableFormats.map(_.name) val json = Json.fromValues(formats.map(Json.fromString)) Ok(json) - // Output RDF data conversion formats + /** Returns a JSON array with the available output RDF data formats + */ case GET -> Root / `api` / `verb` / "formats" / "output" => val formats = DataFormats.availableFormats.map(_.name) val json = Json.fromValues(formats.map(Json.fromString)) Ok(json) + /** Returns the default RDF format as a raw string + */ case GET -> Root / `api` / `verb` / "formats" / "default" => val dataFormat = DataFormat.defaultFormat.name Ok(Json.fromString(dataFormat)) + /** Returns a JSON array with the available inference engines + */ case GET -> Root / `api` / `verb` / "inferenceEngines" => val inferenceEngines = availableInferenceEngines val json = Json.fromValues(inferenceEngines.map(Json.fromString)) Ok(json) + /** Returns the default inference engine used as a raw string + */ case GET -> Root / `api` / `verb` / "inferenceEngines" / "default" => val defaultInferenceEngine = defaultInference Ok(Json.fromString(defaultInferenceEngine)) + /** Returns a JSON array with the available visualization formats + */ case GET -> Root / `api` / `verb` / "visualize" / "formats" => val formats = DataConversion.availableGraphFormatNames ++ List( @@ -86,93 +90,72 @@ class DataService(client: Client[IO]) val json = Json.fromValues(formats.map(Json.fromString)) Ok(json) - case req @ GET -> Root / `api` / "dataUrl" / "info" :? - OptDataURLParam(optDataUrl) +& - DataFormatParam(optDataFormat) => - val dataFormat = dataFormatOrDefault(optDataFormat) - optDataUrl match { - case None => responseJson("Must provide a dataUrl", BadRequest) - case Some(dataUrl) => - for { - data <- client.expect[String](dataUrl) - result <- io2f(dataInfoFromString(data, dataFormat)) - json = result.toJson - r <- Ok(json).map( - _.withContentType(`Content-Type`(MediaType.application.json)) - ) - } yield r - } - + /** Obtain information about an RDF source. + * Receives a JSON object with the input RDF information: + * - data [String]: RDF data + * - dataUrl [String]: Url containing the RDF data + * - dataFile [File Object]: File containing RDF data + * - dataFormat [String]: Format of the RDF data + * - inference [String]: Inference to be applied + * - activeDataTab [String]: Identifies the source of the data (raw, URL, file...) + * Returns a JSON object with the RDF data information: + * - message [String]: Informational message + * - data [String]: RDF data sent back (originally sent by the client) + * - dataFormat [String]: Data format of the data + * - numberOfStatements [String]: Data format of the data + * - prefixMap [Object]: Dictionary with the prefix map of the data + * - predicates [Array]: Array of the predicates present in the data + */ case req @ POST -> Root / `api` / `verb` / "info" => req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) for { dataParam <- DataParam.mkData(partsMap, relativeBase) (resourceRdf, dp) = dataParam - dataFormat = dataFormatOrDefault(dp.dataFormat.map(_.name)) + dataFormat = dataFormatOrDefault(dp.optDataFormat.map(_.name)) response <- dp.data match { case Some(data) => for { result <- dataInfoFromString(data, dataFormat) - json: Json = result.toJson - ok <- Ok(json) - } yield ok + response <- result match { + case Left(err) => errorResponseJson(err, InternalServerError) + case Right(res) => Ok(res.toJson) + } + } yield response case None => for { - d <- + maybeData <- resourceRdf.use(rdf => - dataInfoFromRdf(rdf, None, dp.dataFormat) + dataInfoFromRdf(rdf, None, dp.optDataFormat) ) - json <- IO(d.toJson) - ok <- Ok(json) - } yield ok + response <- maybeData match { + case Left(err) => errorResponseJson(err, InternalServerError) + case Right(res) => Ok(res.toJson) + } + } yield response } } yield response } - case GET -> Root / `api` / `verb` / "info" :? - OptDataParam(optData) +& - OptDataURLParam(optDataURL) +& - CompoundDataParam(optCompoundData) +& - DataFormatParam(maybeDataFormat) +& - InferenceParam(optInference) +& - OptEndpointParam(optEndpoint) +& - OptActiveDataTabParam(optActiveDataTab) => - val either: Either[String, Option[DataFormat]] = for { - df <- maybeDataFormat.map(DataFormat.fromString).sequence - } yield df - - val r: IO[Response[IO]] = either.fold( - str => responseJson(str, BadRequest), - optDataFormat => { - val dp = - DataParam( - optData, - optDataURL, - None, - optEndpoint, - optDataFormat, - optDataFormat, - optDataFormat, - None, //no dataFormatFile - optInference, - None, - optActiveDataTab, - optCompoundData - ) - for { - dataParam <- io2f(dp.getData(relativeBase)) - (maybeStr, resourceRdf) = dataParam - d <- resourceRdf.use(rdf => - dataInfoFromRdf(rdf, maybeStr, optDataFormat) - ) - json <- IO(d.toJson) - ok <- Ok(json) - } yield ok - } - ) - r - + /** Convert an RDF source into another format/syntax. + * Receives a JSON object with the input RDF information: + * - data [String]: RDF data + * - dataUrl [String]: Url containing the RDF data + * - dataFile [File Object]: File containing RDF data + * - dataFormat [String]: Format of the RDF data + * - targetDataFormat [String]: Format of the RDF data + * - inference [String]: Inference to be applied + * - activeDataTab [String]: Identifies the source of the data (raw, URL, file...) + * Returns a JSON object with the RDF data information: + * - message [String]: Informational message + * - data [String]: RDF data sent back (originally sent by the client) + * - result [String]: RDF resulting from the conversion + * - dataFormat [String]: Data format of the input data + * - targetDataFormat [String]: Data format of the output data + * - numberOfStatements [String]: Data format of the data + * - prefixMap [Object]: Dictionary with the prefix map of the data + * - predicates [Array]: Array of the predicates present in the data + */ case req @ POST -> Root / `api` / `verb` / "convert" => req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) @@ -180,67 +163,100 @@ class DataService(client: Client[IO]) dataParam <- DataParam.mkData(partsMap, relativeBase) (resourceRdf, dp) = dataParam targetFormat = dp.targetDataFormat.getOrElse(defaultDataFormat).name - dataFormat = dp.dataFormat.getOrElse(defaultDataFormat) + dataFormat = dp.optDataFormat.getOrElse(defaultDataFormat) + result <- io2f( resourceRdf.use(rdf => { - logger.debug(s"Data convert dataParam: $dp") - DataConversion.rdfConvert(rdf, dp.data, dataFormat, targetFormat) + logger.debug(s"Attempting data conversion") + DataConversion + .rdfConvert(rdf, dp.data, dataFormat, targetFormat) + }) - ) - ok <- Ok(result.toJson) - } yield ok - } + ).attempt + .map( + _.fold(exc => Left(exc.getMessage), dc => Right(dc)) + ) - case req @ GET -> Root / `api` / `verb` / "convert" :? - DataParameter(data) +& - DataFormatParam(optDataFormat) +& - CompoundDataParam(optCompoundData) +& - TargetDataFormatParam(optResultDataFormat) => - for { - eitherDataFormat <- either2ef[DataFormat, IO]( - DataFormat.fromString(optDataFormat.getOrElse(defaultDataFormat.name)) - ).value - result <- eitherDataFormat.fold( - e => BadRequest(e), - dataFormat => - for { - r <- io2f( - DataConversion.dataConvert( - data, - dataFormat, - optCompoundData, - optResultDataFormat.getOrElse(defaultDataFormat.name) - ) - ) - ok <- Ok(r.toJson) - } yield ok - ) - } yield result + response <- result match { + case Left(err) => errorResponseJson(err, InternalServerError) + case Right(result) => Ok(result.toJson) + } + } yield response + } + + /** Perform a SPARQL query on RDF data. + * Receives a JSON object with the input RDF and query information: + * - data [String]: Raw RDF data + * - dataUrl [String]: Url containing the RDF data + * - dataFile [File Object]: File containing RDF data + * - dataFormat [String]: Format of the RDF data + * - inference [String]: Inference to be applied + * - query [String]: Raw SPARQL query + * - queryUrl [String]: Url containing the SPARQL query + * - queryFile [String]: File containing the SPARQL query + * - activeDataTab [String]: Identifies the source of the data (raw, URL, file...) + * - activeQueryTab [String]: Identifies the source of the query (raw, URL, file...) + * Returns a JSON object with the RDF data information: + * - message [String]: Informational message + * - data [String]: RDF data sent back (originally sent by the client) + * - result [String]: RDF resulting from the conversion + * - dataFormat [String]: Data format of the input data + * - targetDataFormat [String]: Data format of the output data + * - numberOfStatements [String]: Data format of the data + * - prefixMap [Object]: Dictionary with the prefix map of the data + * - predicates [Array]: Array of the predicates present in the data + */ case req @ POST -> Root / `api` / `verb` / "query" => req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) logger.debug(s"Data query params map: $partsMap") for { + /* TODO: an error is thrown on bad query URLs (IO.raise...), but it is + * not controlled */ dataParam <- DataParam.mkData(partsMap, relativeBase) + (resourceRdf, dp) = dataParam maybeQuery <- SparqlQuery.getSparqlQuery(partsMap) resp <- maybeQuery match { case Left(err) => - responseJson(s"Error obtaining query data: $err", BadRequest) + // Query could not be even parsed from user data + errorResponseJson(s"Error obtaining query data: $err", BadRequest) case Right(query) => + // Query was parsed, but may be invalid still val optQueryStr = query.query logger.debug(s"Data query with querystring: $optQueryStr") for { - json <- io2f( + result <- io2f( resourceRdf.use(rdf => rdf.queryAsJson(optQueryStr)) - ) - v <- Ok(json) - } yield v + ).attempt + .map(_.fold(exc => Left(exc.getMessage), dc => Right(dc))) + response <- result match { + case Left(err) => errorResponseJson(err, InternalServerError) + case Right(json) => Ok(json) + } + } yield response } } yield resp } + /** Attempt to extract a schema from an RDF source. + * Receives a JSON object with the input RDF information: + * - data [String]: Raw RDF data + * - dataUrl [String]: Url containing the RDF data + * - dataFile [File Object]: File containing RDF data + * - dataFormat [String]: Format of the RDF data + * - inference [String]: Inference to be applied + * - activeDataTab [String]: Identifies the source of the data (raw, URL, file...) + * Returns a JSON object with the extraction information: + * - message [String]: Informational message + * - data [String]: Input RDF data + * - dataFormat [String]: Format of the input RDF data + * - inferredShape [String]: Raw extracted shape + * - schemaFormat [String]: Format of the extracted schema + * - schemaEngine [String]: Engine of the extracted schema + * - resultShapeMap [String]: Shapemap of the extracted schema + */ case req @ POST -> Root / `api` / `verb` / "extract" => req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) @@ -257,25 +273,18 @@ class DataService(client: Client[IO]) SchemaFormat.fromString ) response <- maybeData match { + // No data received case Left(err) => - for { - res <- io2f( - DataExtract - .fromMsg(s"Error obtaining data: ${err.getMessage}") - .toJson - ) - ok <- Ok(res) - } yield ok - /* Ok(DataExtractResult.fromMsg(s"Error obtaining data: - * $err").toJson) */ + errorResponseJson(err.getMessage, BadRequest) + // Data received, try to extract case Right((resourceRdf, dp)) => for { - d <- io2f( + result <- io2f( resourceRdf.use(rdf => dataExtract( rdf, dp.data, - dp.dataFormatValue, + dp.optDataFormat, nodeSelector, inference, schemaEngine, @@ -284,10 +293,14 @@ class DataService(client: Client[IO]) None ) ) - ) - json <- io2f(d.toJson) - ok <- Ok(json) - } yield ok + ).attempt + .map(_.fold(exc => Left(exc.getMessage), res => Right(res))) + response <- result match { + case Left(err) => errorResponseJson(err, InternalServerError) + case Right(result) => Ok(result.toJson) + } + + } yield response } } yield response } @@ -295,9 +308,6 @@ class DataService(client: Client[IO]) } private val relativeBase = ApiDefaults.relativeBase - private def errJson(msg: String): IO[Response[IO]] = - Ok(Json.fromFields(List(("error", Json.fromString(msg))))) - } object DataService { diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/Endpoint.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/Endpoint.scala index 78c892e9..c289e415 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/Endpoint.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/Endpoint.scala @@ -5,10 +5,15 @@ import cats.effect.IO import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.RDFReader import es.weso.rdf.jena.{Endpoint => EndpointJena} -import es.weso.rdfshape.server.api.routes.PartsMap +import es.weso.rdfshape.server.api.routes.endpoint.logic.EndpointStatus.{ + EndpointStatus, + OFFLINE, + ONLINE +} +import es.weso.rdfshape.server.api.utils.parameters.PartsMap +import es.weso.rdfshape.server.utils.networking.NetworkingUtils.getUrlContents import es.weso.utils.IOUtils.{ESIO, io2es} import io.circe.Json -import org.http4s.client.Client import java.net.URL import scala.util.{Failure, Success, Try} @@ -18,11 +23,11 @@ import scala.util.{Failure, Success, Try} * @param msg Message attached to the information/returned by the endpoint * @param status Status of the endpoint */ -case class Endpoint(msg: String, status: Option[String] = None) { +sealed case class Endpoint(msg: String, status: EndpointStatus) { def asJson: Json = Json.fromFields( List( - ("msg", Json.fromString(msg)), - ("status", Json.fromString(status.getOrElse(""))) + ("message", Json.fromString(msg)), + ("status", Json.fromString(status)) ) ) } @@ -43,12 +48,14 @@ private[api] object Endpoint extends LazyLogging { /** Given an endpoint URL, fetch and return its data * * @param url Endpoint URL - * @param client Client used to fetch the URL - * @return An instance of EndpointInfo with the information contained in the endpoint + * @return An instance of Endpoint with the information contained in the endpoint */ - def getEndpointInfo(url: URL, client: Client[IO]): IO[Endpoint] = { - IO.println(s"Obtaining info of endpoint $url") *> - client.expect[String](url.toString).map(Endpoint(_)) + def getEndpointInfo(url: URL): Endpoint = { + logger.debug(s"Obtaining info of endpoint $url") + getUrlContents(url.toString) match { + case Left(errMsg) => Endpoint(errMsg, OFFLINE) + case Right(response) => Endpoint(response, ONLINE) + } } /** Given a request's parameters, try to extract an endpoint URL from them @@ -64,7 +71,7 @@ private[api] object Endpoint extends LazyLogging { ) ep <- maybeStr match { case None => - EitherT.leftT[IO, URL](s"No value for param endpoint") + EitherT.leftT[IO, URL](s"No value provided for parameter endpoint") case Some(str) => Try(new URL(str)) match { case Success(url) => EitherT.rightT[IO, String](url) @@ -73,3 +80,13 @@ private[api] object Endpoint extends LazyLogging { } } yield ep } + +/** Enumeration of the different possible Endpoint states. + */ +private[endpoint] object EndpointStatus extends Enumeration { + type EndpointStatus = String + + val ONLINE = "online" + val OFFLINE = "offline" + +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/Outgoing.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/Outgoing.scala index a1311a9a..805ab604 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/Outgoing.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/Outgoing.scala @@ -1,7 +1,13 @@ package es.weso.rdfshape.server.api.routes.endpoint.logic +import cats.data.EitherT +import cats.effect.IO +import cats.implicits._ +import es.weso.rdf.jena.{Endpoint => EndpointJena} import es.weso.rdf.nodes.{IRI, RDFNode} import es.weso.rdf.triples.RDFTriple +import es.weso.rdfshape.server.utils.numeric.NumericUtils +import es.weso.utils.IOUtils.{ESIO, stream2es} import io.circe.Json case class Outgoing(node: IRI, endpoint: IRI, children: Children) { @@ -34,18 +40,49 @@ case class Outgoing(node: IRI, endpoint: IRI, children: Children) { } case class Children(m: Map[IRI, Vector[Value]]) + case class Value(node: RDFNode, children: Children) object Outgoing { val noChildren: Children = Children(Map()) + def getOutgoing( + optEndpoint: Option[String], + optNode: Option[String], + optLimit: Option[String] + ): EitherT[IO, String, Outgoing] = { + for { + endpointIRI <- EitherT.fromEither[IO]( + Either + .fromOption(optEndpoint, "No endpoint provided") + .flatMap(IRI.fromString(_)) + ) + node <- EitherT.fromEither[IO]( + Either + .fromOption(optNode, "No node provided") + .flatMap(IRI.fromString(_)) + ) + limit <- EitherT.fromEither[IO]( + NumericUtils.parseInt(optLimit.getOrElse("1")) + ) + o <- outgoing(endpointIRI, node, limit) + } yield o + } + + def outgoing(endpoint: IRI, node: IRI, limit: Int): ESIO[Outgoing] = + for { + triples <- stream2es(EndpointJena(endpoint).triplesWithSubject(node)) + } yield Outgoing.fromTriples(node, endpoint, triples.toSet) + /** Creates an outgoing value from a set of triples. * It assumes all those triples have the same subject which is ignored + * * @param ts Triple set * @return */ def fromTriples(node: IRI, endpoint: IRI, ts: Set[RDFTriple]): Outgoing = { val zero: Map[IRI, Vector[Value]] = Map() + def cmb( m: Map[IRI, Vector[Value]], current: RDFTriple @@ -56,6 +93,7 @@ object Outgoing { )((vs: Vector[Value]) => m.updated(current.pred, vs :+ Value(current.obj, noChildren)) ) + Outgoing(node, endpoint, Children(ts.foldLeft(zero)(cmb))) } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/SparqlQuery.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/SparqlQuery.scala index 9d969c63..9c1b8687 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/SparqlQuery.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/SparqlQuery.scala @@ -2,22 +2,25 @@ package es.weso.rdfshape.server.api.routes.endpoint.logic import cats.effect.IO import com.typesafe.scalalogging.LazyLogging -import es.weso.rdfshape.server.api.routes.PartsMap import es.weso.rdfshape.server.api.routes.endpoint.logic.SparqlQueryTab.{ SparqlQueryTab, defaultActiveQueryTab } - -import java.net.URL -import scala.io.Source -import scala.util.{Failure, Success, Try} +import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters.{ + ActiveQueryTabParameter, + QueryFileParameter, + QueryParameter, + QueryURLParameter +} +import es.weso.rdfshape.server.api.utils.parameters.PartsMap +import es.weso.rdfshape.server.utils.networking.NetworkingUtils.getUrlContents /** Data class representing a SPARQL query and its current source * * @param query Query raw text * @param activeQueryTab Active tab, used to know which source the query comes from */ -sealed case class SparqlQuery( +sealed case class SparqlQuery private ( query: String, activeQueryTab: SparqlQueryTab ) @@ -30,17 +33,17 @@ private[api] object SparqlQuery extends LazyLogging { /** Given a request's parameters, try to extract a SPARQL query from them * - * @param partsMap Request's parameter + * @param partsMap Request's parameters * @return Either the SPARQL query or an error message */ def getSparqlQuery( partsMap: PartsMap ): IO[Either[String, SparqlQuery]] = for { - queryStr <- partsMap.optPartValue("query") - queryURL <- partsMap.optPartValue("queryURL") - queryFile <- partsMap.optPartValue("queryFile") - activeQueryTab <- partsMap.optPartValue("activeQueryTab") + queryStr <- partsMap.optPartValue(QueryParameter.name) + queryUrl <- partsMap.optPartValue(QueryURLParameter.name) + queryFile <- partsMap.optPartValue(QueryFileParameter.name) + activeQueryTab <- partsMap.optPartValue(ActiveQueryTabParameter.name) _ = logger.debug( s"Getting SPARQL from params. Query tab: $activeQueryTab" @@ -56,7 +59,7 @@ private[api] object SparqlQuery extends LazyLogging { Right(SparqlQuery(queryRaw, SparqlQueryTab.TEXT)) } case SparqlQueryTab.URL => - queryURL match { + queryUrl match { case None => Left(s"No value for the query URL") case Some(queryUrl) => getUrlContents(queryUrl) match { @@ -82,33 +85,12 @@ private[api] object SparqlQuery extends LazyLogging { } yield maybeQuery - /** Error-safe way of obtaining the raw contents in a given URL - * - * @param urlString URL to be fetched (String representation) - * @return Either the contents if the URL or an error message - */ - private def getUrlContents(urlString: String): Either[String, String] = { - Try { - val url = new URL(urlString) - val src = Source.fromURL(url) - val str = src.mkString - src.close() - str - } match { - case Success(urlContent) => Right(urlContent) - case Failure(exception) => - val msg = - s"Error obtaining data from url $urlString: ${exception.getMessage}" - logger.warn(msg) - Left(msg) - } - } } /** Enumeration of the different possible QueryTabs sent by the client. * The tab sent indicates the API if the Query was sent in raw text, as a URL * to be fetched or as a text file containing the query. - * In case the client submits the query in several formats, the selected tab will indicate the preferred format. + * In case the client submits the query in several formats, the selected tab will indicate the one format. */ private[logic] object SparqlQueryTab extends Enumeration { type SparqlQueryTab = String diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala index 05bb5fb5..d9c90e02 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala @@ -2,30 +2,29 @@ package es.weso.rdfshape.server.api.routes.endpoint.service import cats.data.EitherT import cats.effect._ -import cats.implicits._ import com.typesafe.scalalogging.LazyLogging -import es.weso.rdf.jena.{Endpoint => EndpointJena} -import es.weso.rdf.nodes.IRI import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api -import es.weso.rdfshape.server.api.routes.IncomingRequestParameters.{ - LimitParam, - OptEndpointParam, - OptNodeParam -} +import es.weso.rdfshape.server.api.routes.ApiService import es.weso.rdfshape.server.api.routes.endpoint.logic.Endpoint.{ getEndpointAsRDFReader, getEndpointInfo, getEndpointUrl } +import es.weso.rdfshape.server.api.routes.endpoint.logic.EndpointStatus._ +import es.weso.rdfshape.server.api.routes.endpoint.logic.Outgoing.getOutgoing import es.weso.rdfshape.server.api.routes.endpoint.logic.SparqlQuery.getSparqlQuery import es.weso.rdfshape.server.api.routes.endpoint.logic.{ Endpoint, Outgoing, SparqlQuery } -import es.weso.rdfshape.server.api.routes.{ApiService, PartsMap} -import es.weso.rdfshape.server.utils.json.JsonUtils.responseJson -import es.weso.rdfshape.server.utils.numeric.NumericUtils +import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters.{ + EndpointParameter, + LimitParameter, + NodeParameter +} +import es.weso.rdfshape.server.api.utils.parameters.PartsMap +import es.weso.rdfshape.server.utils.json.JsonUtils.errorResponseJson import es.weso.utils.IOUtils._ import io.circe.Json import org.http4s._ @@ -49,6 +48,17 @@ class EndpointService(client: Client[IO]) */ def routes: HttpRoutes[IO] = HttpRoutes.of[IO] { + /** Perform a SPARQL query targeted to a specific endpoint. + * Receives a JSON object with the input endpoint query: + * - query [String]: Input query + * - endpoint [String]: Target endpoint + * - activeQueryTab [String]: Identifies the source of the query (raw, URL, file...) + * Returns a JSON object with the query results: + * - head [Object]: Query metadata + * - vars: [Array]: Query variables + * - results [Object]: Query results + * - bindings: [Array]: Query results, each item being an object mapping each variable to its value + */ case req @ POST -> Root / `api` / `verb` / "query" => req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) @@ -77,78 +87,61 @@ class EndpointService(client: Client[IO]) either <- r.value resp <- either.fold( e => - responseJson(s"Error querying endpoint: $e", InternalServerError), + errorResponseJson( + s"Query failed. $e", + InternalServerError + ), json => Ok(json) ) } yield resp } + /** Attempt to contact an endpoint and return metadata about it. + * Receives a JSON object with the input endpoint: + * - endpoint [String]: Target endpoint + * Returns a JSON object with the endpoint response: + * - head [Object]: Query metadata + * - vars: [Array]: Query variables + * - results [Object]: Query results + * - bindings: [Array]: Query results, each item being an object mapping each variable to its value + */ case req @ POST -> Root / `api` / `verb` / "info" => req.decode[Multipart[IO]] { m => - { - val partsMap = PartsMap(m.parts) - val r: EitherT[IO, String, Json] = for { - endpointUrl <- getEndpointUrl(partsMap) - ei <- EitherT.liftF[IO, String, Endpoint]( - getEndpointInfo(endpointUrl, client) - ) - } yield ei.asJson - for { - either <- r.value - resp <- either.fold( - e => - responseJson( - s"Error obtaining info on Endpoint $e", - InternalServerError - ), - json => Ok(json) - ) - } yield resp - } + val partsMap = PartsMap(m.parts) + for { + endpointUrl <- getEndpointUrl(partsMap).value + response <- endpointUrl match { + case Left(err) => errorResponseJson(err, BadRequest) + case Right(endpointUrl) => + val endpointInfo = getEndpointInfo(endpointUrl) + endpointInfo match { + case Endpoint(errMsg, OFFLINE) => + errorResponseJson( + errMsg, + InternalServerError + ) + case _ => Ok(endpointInfo.asJson) + } + } + } yield response + } + // TODO: document case GET -> Root / `api` / `verb` / "outgoing" :? - OptEndpointParam(optEndpoint) +& - OptNodeParam(optNode) +& - LimitParam(optLimit) => + EndpointParameter(optEndpoint) +& + NodeParameter(optNode) +& + LimitParameter(optLimit) => for { eitherOutgoing <- getOutgoing(optEndpoint, optNode, optLimit).value resp <- eitherOutgoing.fold( - (s: String) => responseJson(s"Error: $s", InternalServerError), + (s: String) => errorResponseJson(s"Error: $s", InternalServerError), (outgoing: Outgoing) => Ok(outgoing.toJson) ) } yield resp } - private def getOutgoing( - optEndpoint: Option[String], - optNode: Option[String], - optLimit: Option[String] - ): EitherT[IO, String, Outgoing] = { - for { - endpointIRI <- EitherT.fromEither[IO]( - Either - .fromOption(optEndpoint, "No endpoint provided") - .flatMap(IRI.fromString(_)) - ) - node <- EitherT.fromEither[IO]( - Either - .fromOption(optNode, "No node provided") - .flatMap(IRI.fromString(_)) - ) - limit <- EitherT.fromEither[IO]( - NumericUtils.parseInt(optLimit.getOrElse("1")) - ) - o <- outgoing(endpointIRI, node, limit) - } yield o - } - - private def outgoing(endpoint: IRI, node: IRI, limit: Int): ESIO[Outgoing] = - for { - triples <- stream2es(EndpointJena(endpoint).triplesWithSubject(node)) - } yield Outgoing.fromTriples(node, endpoint, triples.toSet) - } object EndpointService { diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/fetch/service/FetchService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/fetch/service/FetchService.scala index 6a17ec51..daddde51 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/fetch/service/FetchService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/fetch/service/FetchService.scala @@ -4,12 +4,15 @@ import cats.effect._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.routes.ApiService -import es.weso.rdfshape.server.api.routes.IncomingRequestParameters.UrlParam +import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters.UrlParameter +import es.weso.rdfshape.server.utils.json.JsonUtils.errorResponseJson import org.http4s._ import org.http4s.client.Client import org.http4s.dsl.Http4sDsl import scalaj.http.Http +import scala.util.{Failure, Success, Try} + class FetchService() extends Http4sDsl[IO] with ApiService with LazyLogging { override val verb: String = "fetch" @@ -18,19 +21,27 @@ class FetchService() extends Http4sDsl[IO] with ApiService with LazyLogging { */ val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { - // Query URL and return the response + /** Query a given URL and return the response. + * Receives the URL to be queried: + * - url [String]: URL to be queried + * Returns the URL contents (response body) + */ case GET -> Root / `api` / `verb` :? - UrlParam(url) => - try { - val res = Http(url).asString - if(res.isSuccess) { - Ok(res.body) - } else { - InternalServerError("Could not fetch URL") - } - } catch { - case _: Exception => - InternalServerError("Could not fetch URL") + UrlParameter(url) => + Try { + Http(url).asString + } match { + case Success(res) if res.isSuccess => Ok(res.body) + case Success(res) => + errorResponseJson( + s"Could not fetch URL: status ${res.code}", + InternalServerError + ) + case Failure(exc) => + errorResponseJson( + s"Could not fetch URL: ${exc.getMessage}", + InternalServerError + ) } } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/logic/Permalink.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/logic/Permalink.scala new file mode 100644 index 00000000..eab5298a --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/logic/Permalink.scala @@ -0,0 +1,26 @@ +package es.weso.rdfshape.server.api.routes.permalink.logic + +import com.typesafe.scalalogging.LazyLogging + +import java.net.URL +import java.util.Date + +/** Data class representing a permalink + * + * @param longUrl Permalink target + * @param code Permalink identifying code + * @param creationDate Permalink identifying code + */ +sealed case class Permalink( + longUrl: URL, + code: Long, + creationDate: Date = new Date(), + editionDate: Date +) + +private[api] object Permalink extends LazyLogging { + + /** Placeholder value used for the permalink query whenever an empty target is issued/needed. + */ + private val emptyTargetValue = "" +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/service/PermalinkService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/service/PermalinkService.scala index 5beacd15..b1a93178 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/service/PermalinkService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/service/PermalinkService.scala @@ -4,9 +4,9 @@ import cats.effect._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.routes.ApiService -import es.weso.rdfshape.server.api.routes.IncomingRequestParameters.{ - UrlCodeParam, - UrlParam +import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters.{ + UrlCodeParameter, + UrlParameter } import org.http4s._ import org.http4s.client.Client @@ -45,9 +45,13 @@ class PermalinkService(client: Client[IO]) */ val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { - // Insert a reference to the permalink in DB + /** Create a permalink to another resource. + * Receives the URL to be linked: + * - url [String]: URL to be linked + * Returns the permalink unique code in the response body + */ case GET -> Root / `api` / `verb` / "generate" :? - UrlParam(url) => + UrlParameter(url) => // Store only query path and query params val urlObj = new URL(url) val urlPath = s"${urlObj.getPath}?${urlObj.getQuery}" @@ -101,9 +105,13 @@ class PermalinkService(client: Client[IO]) } - // Retrieve a URL given the link + /** Retrieve a URL to a resource given its permalink code. + * Receives the permalink code to be checked: + * - urlCode [String]: code to be checked + * Returns the permalink target (if present in the database) in the response body + */ case GET -> Root / `api` / `verb` / "get" :? - UrlCodeParam(urlCode) => + UrlCodeParameter(urlCode) => try { val code = urlCode.toLong val promise = Promise[IO[Response[IO]]]() @@ -158,16 +166,37 @@ class PermalinkService(client: Client[IO]) } } // DB credentials. Access is limited to application needs. + /** Database user. Can be overridden by environment variables. + */ private val mongoUser = sys.env.getOrElse("MONGO_USER", "rdfshape-user") + + /** Database password. Can be overridden by environment variables. + */ private val mongoPassword = sys.env.getOrElse("MONGO_PASSWORD", "rdfshape-user") + + /** Database name. Can be overridden by environment variables. + */ private val mongoDatabase = sys.env.getOrElse("MONGO_DATABASE", "rdfshape") + + /** Database collection name. Can be overridden by environment variables. + */ private val collectionName = sys.env.getOrElse("MONGO_COLLECTION", "permalinks") + + /** Final connection String formed by interpolating database credentials. + */ private val mongoConnectionString = s"mongodb+srv://$mongoUser:$mongoPassword@cluster0.pnja6.mongodb.net/$mongoDatabase" + "?retryWrites=true&w=majority" + /** Given a URL, search for a permalink already targeting it. + * + * @param urlPath URL which we want to find a permalink for + * @return Optionally, the identifier of the permalink targeting the given URL + * @note This is internally used to prevent creating multiple permalinks for the same targets. + * Instead, the access-date of the permalink is updated. + */ private def retrieveUrlCode(urlPath: String): Option[Long] = { val promise = Promise[Option[Long]]() @@ -207,6 +236,10 @@ class PermalinkService(client: Client[IO]) result } + /** Update the access date of a given permalink (invoked when it is accessed) + * + * @param code Unique identofier of the permalink + */ private def updateUrl(code: Long): Unit = { logger.debug(s"URL code to update: $code") // Update date of document in database diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaConversionResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaConversionResult.scala index d953adc4..a4830839 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaConversionResult.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaConversionResult.scala @@ -33,7 +33,7 @@ case class SchemaConversionResult( */ def toJson: Json = Json.fromFields( List( - ("msg", Json.fromString(msg)) + ("message", Json.fromString(msg)) ) ++ maybeField(schema, "schema", Json.fromString) ++ maybeField(schemaFormat, "schemaFormat", Json.fromString) ++ diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaInfo.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaInfo.scala index aa91a5e7..59d9fcf8 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaInfo.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaInfo.scala @@ -5,7 +5,7 @@ import io.circe.Json /** Data class representing the data contained in a schema * - * @param schemaName Name of the schema + * @param schemaType Name of the schema * @param schemaEngine Engine of the schema * @param wellFormed Is the schema well formed * @param shapes List of shapes in the schema @@ -13,7 +13,7 @@ import io.circe.Json * @param errors Errors in the schema */ private[schema] case class SchemaInfo( - schemaName: Option[String], + schemaType: Option[String], schemaEngine: Option[String], wellFormed: Boolean, shapes: List[String], @@ -27,7 +27,7 @@ private[schema] case class SchemaInfo( */ def toJson: Json = Json.fromFields( List( - ("schemaName", schemaName.fold(Json.Null)(Json.fromString)), + ("schemaType", schemaType.fold(Json.Null)(Json.fromString)), ("schemaEngine", schemaEngine.fold(Json.Null)(Json.fromString)), ("wellFormed", Json.fromBoolean(wellFormed)), ("shapes", Json.fromValues(shapes.map(Json.fromString))), @@ -44,7 +44,7 @@ private[schema] case class SchemaInfo( ) ) ), - ("errors", Json.fromValues(errors.map(Json.fromString))) + ("error", Json.fromValues(errors.map(Json.fromString))) ) ) } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala index 6f746a5b..f58ced4b 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala @@ -5,16 +5,15 @@ import cats.syntax.either._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.jena.RDFAsJenaModel import es.weso.rdf.nodes.IRI -import es.weso.rdf.{RDFBuilder, RDFReasoner} +import es.weso.rdf.{InferenceEngine, RDFBuilder, RDFReasoner} import es.weso.rdfshape.server.api.definitions.ApiDefaults import es.weso.rdfshape.server.api.definitions.UmlDefinitions.umlOptions import es.weso.rdfshape.server.api.format.{DataFormat, SchemaFormat} import es.weso.rdfshape.server.api.routes.data.logic.DataParam -import es.weso.rdfshape.server.api.routes.schema.service.{ - SchemaParam, - TriggerModeParam -} -import es.weso.schema.{Result, Schema, ValidationTrigger} +import es.weso.rdfshape.server.api.routes.schema.service.TriggerModeParam +import es.weso.schema.{Result, Schema, ShaclexSchema, ValidationTrigger} +import es.weso.shacl.converter.Shacl2ShEx +import es.weso.shapemaps.ShapeMap import es.weso.uml.Schema2UML import io.circe.Json @@ -77,13 +76,10 @@ private[api] object SchemaOperations extends LazyLogging { val info = schema.info val fields: List[(String, Json)] = List( - ("schemaName", Json.fromString(info.schemaName)), + ("schemaType", Json.fromString(info.schemaName)), ("schemaEngine", Json.fromString(info.schemaEngine)), - ("wellFormed", Json.fromBoolean(info.isWellFormed)), - ("errors", Json.fromValues(info.errors.map(Json.fromString))), - ("parsed", Json.fromString("Parsed OK")), ("svg", Json.fromString(svg)), - ("plantUML", Json.fromString(plantuml)) + ("plantUml", Json.fromString(plantuml)) ) Json.fromFields(fields) } @@ -91,7 +87,12 @@ private[api] object SchemaOperations extends LazyLogging { def schema2SVG(schema: Schema): IO[(String, String)] = { val eitherUML = Schema2UML.schema2UML(schema) eitherUML.fold( - e => IO.pure((s"SVG conversion: $e", s"Error converting UML: $e")), + e => { + val errMsg = s"Error in SVG conversion: $e" + logger.error(errMsg) + IO.raiseError(new RuntimeException(errMsg)) + // IO.pure((s"SVG conversion: $e", s"Error converting UML: $e")) + }, pair => { val (uml, _) = pair logger.debug(s"UML converted: $uml") @@ -100,11 +101,8 @@ private[api] object SchemaOperations extends LazyLogging { } yield { (str, uml.toPlantUML(umlOptions)) }).handleErrorWith(e => - IO.pure( - ( - s"SVG conversion error: ${e.getMessage}", - uml.toPlantUML(umlOptions) - ) + IO.raiseError( + new RuntimeException(s"SVG conversion error: ${e.getMessage}") ) ) } @@ -151,12 +149,12 @@ private[api] object SchemaOperations extends LazyLogging { ): IO[(Result, Option[ValidationTrigger], Long)] = { val dp = DataParam.empty.copy( data = Some(data), - dataFormatTextarea = optDataFormat, + optDataFormat = optDataFormat, inference = optInference ) val sp = SchemaParam.empty.copy( schema = optSchema, - schemaFormatTextArea = optSchemaFormat, + optSchemaFormat = optSchemaFormat, schemaEngine = optSchemaEngine ) @@ -246,4 +244,121 @@ private[api] object SchemaOperations extends LazyLogging { private def schemaErr(msg: String) = IO((Result.errStr(s"Error: $msg"), None, NoTime)) + /** Given an input schema, convert it to another output schema with the parameters specified. + * + * @param schema Input schema + * @param schemaStr Input schema contents + * @param schemaFormat Input schema format + * @param schemaEngine Input schema engine + * @param optTargetSchemaFormat Output schema desired format + * @param optTargetSchemaEngine Output schema desired engine + * @return Optionally, the raw output schema contents + */ + private[schema] def convertSchema( + schema: Schema, + schemaStr: Option[String], + schemaFormat: SchemaFormat, + schemaEngine: String, + optTargetSchemaFormat: Option[SchemaFormat], + optTargetSchemaEngine: Option[String] + ): IO[SchemaConversionResult] = { + val result: IO[SchemaConversionResult] = for { + pair <- doSchemaConversion( + schema, + optTargetSchemaFormat.map(_.name), + optTargetSchemaEngine + ) + sourceStr <- schemaStr match { + case None => schema.serialize(schemaFormat.name) + case Some(source) => IO(source) + } + (resultStr, resultShapeMap) = pair + } yield SchemaConversionResult.fromConversion( + sourceStr, + schemaFormat.name, + schemaEngine, + optTargetSchemaFormat.map(_.name), + optTargetSchemaEngine, + resultStr, + resultShapeMap + ) + + for { + either <- result.attempt + } yield either.fold( + err => SchemaConversionResult.fromMsg(s"Error converting schema: $err"), + identity + ) + } + + private def doSchemaConversion( + schema: Schema, + targetSchemaFormat: Option[String], + optTargetSchemaEngine: Option[String] + ): IO[(String, ShapeMap)] = { + logger.debug( + s"Schema conversion, name: ${schema.name}, targetSchema: $targetSchemaFormat" + ) + val default = for { + str <- schema.convert(targetSchemaFormat, optTargetSchemaEngine, None) + } yield (str, ShapeMap.empty) + schema match { + case shacl: ShaclexSchema => + optTargetSchemaEngine.map(_.toUpperCase()) match { + case Some("SHEX") => + logger.debug("Schema conversion: SHACLEX -> SHEX") + Shacl2ShEx + .shacl2ShEx(shacl.schema) + .fold( + e => + IO.raiseError( + new RuntimeException( + s"Error converting SHACL -> ShEx: $e" + ) + ), + pair => { + val (schema, shapeMap) = pair + logger.debug(s"shapeMap: $shapeMap") + for { + emptyBuilder <- RDFAsJenaModel.empty + str <- emptyBuilder.use(builder => + es.weso.shex.Schema.serialize( + schema, + targetSchemaFormat.getOrElse("SHEXC"), + None, + builder + ) + ) + } yield (str, shapeMap) + } + ) + case _ => default + } + case _ => default + } + } + + /** Apply inference + * + * @param rdf Data over which the inference should be applied + * @param inferenceName Name of the inference to be applied + * @return The RDF data after applying the inference + */ + private[schema] def applyInference( + rdf: RDFReasoner, + inferenceName: Option[String] + ): IO[RDFReasoner] = inferenceName match { + case None => IO.pure(rdf) + case Some(name) => + InferenceEngine.fromString(name) match { + case Left(str) => + IO.raiseError( + new RuntimeException( + s"Error parsing inference engine: $name: $str" + ) + ) + case Right(engine) => rdf.applyInference(engine) + } + } + } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaParam.scala new file mode 100644 index 00000000..96ed62e6 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaParam.scala @@ -0,0 +1,289 @@ +package es.weso.rdfshape.server.api.routes.schema.logic + +import cats.effect._ +import cats.implicits._ +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf.RDFReasoner +import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ + defaultSchemaEngine, + defaultSchemaFormat +} +import es.weso.rdfshape.server.api.format._ +import es.weso.rdfshape.server.api.routes.schema.logic.SchemaOperations.getBase +import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ +import es.weso.rdfshape.server.api.utils.parameters.PartsMap +import es.weso.schema.{Schema, Schemas} + +import scala.io.Source +import scala.util.Try + +sealed case class SchemaParam( + schema: Option[String], + schemaURL: Option[String], + schemaFile: Option[String], + optSchemaFormat: Option[SchemaFormat], + schemaEngine: Option[String], + targetSchemaEngine: Option[String], + targetSchemaFormat: Option[String], + activeSchemaTab: Option[String] +) extends LazyLogging { + + val schemaFormat: SchemaFormat = + optSchemaFormat.getOrElse(defaultSchemaFormat) + + def getSchema( + data: Option[RDFReasoner] + ): IO[(Option[String], Either[String, Schema])] = { + + logger.debug(s"activeSchemaTab: $activeSchemaTab") + logger.debug(s"schemaEngine: $schemaEngine") + val inputType = activeSchemaTab match { + case Some(a) => parseSchemaTab(a) + case None if schema.isDefined => Right(SchemaTextAreaType) + case None if schemaURL.isDefined => Right(SchemaUrlType) + case None if schemaFile.isDefined => Right(SchemaFileType) + case None => Right(SchemaTextAreaType) + } + logger.debug(s"inputType: $inputType") + val maybeSchema: IO[(Option[String], Either[String, Schema])] = + inputType match { + case Right(`SchemaUrlType`) => + logger.debug("Schema input type - SchemaUrlType") + schemaURL match { + case None => IO((None, Left(s"Non value for schemaURL"))) + case Some(schemaUrl) => + val e: IO[(String, Schema)] = for { + str <- IO.fromEither( + Try(Source.fromURL(schemaUrl).mkString).toEither + ) + schema <- Schemas.fromString( + str, + schemaFormat.name, + schemaEngine.getOrElse(defaultSchemaEngine), + getBase + ) + _ <- IO { + logger.debug("Schema parsed") + } + } yield (str, schema) + e.attempt.map( + _.fold( + s => (none[String], s.getMessage.asLeft[Schema]), + pair => { + val (str, schema) = pair + (Some(str), Right(schema)) + } + ) + ) + } + case Right(`SchemaFileType`) => + logger.debug("Schema input type - SchemaFileType") + schemaFile match { + case None => IO((None, Left(s"No value for schemaFile"))) + case Some(schemaStr) => + val schemaFormatStr = + schemaFormat.name + val schemaEngineStr = + schemaEngine.getOrElse(defaultSchemaEngine) + Schemas + .fromString( + schemaStr, + schemaFormatStr, + schemaEngineStr, + getBase + ) + .attempt + .map( + _.fold( + s => (Some(schemaStr), Left(s"Error parsing file: $s")), + schema => (Some(schemaStr), Right(schema)) + ) + ) + } + case Right(`SchemaTextAreaType`) => + logger.debug("Schema input type - SchemaTextAreaType") + val schemaStr = schema.getOrElse("") + for { + pair <- Schemas + .fromString( + schemaStr, + schemaFormat.name, + schemaEngine.getOrElse(defaultSchemaEngine), + getBase + ) + .attempt + .map( + _.fold( + err => { + /* TODO: some specific malformed schemas produce a + * NullPointerException with no further message */ + val msg = + if(err.getMessage == null) "Unknown error." + else err.getMessage + (Some(schemaStr), Left(msg)) + }, + schema => (Some(schemaStr), Right(schema)) + ) + ) + (str, eitherSchema) = pair + nameSchema = eitherSchema.map(_.name).getOrElse(s"No schema") + _ <- IO { + logger.debug(s"nameSchema: $nameSchema") + } + foundSchema <- Schemas.lookupSchema( + schemaEngine.getOrElse(defaultSchemaEngine) + ) + _ <- IO { + logger.debug(s"foundSchema: ${foundSchema.name}") + } + } yield pair + case Right(other) => + logger.warn(s"Unknown value for activeSchemaTab: $other") + IO((None, Left(s"Unknown value for activeSchemaTab: $other"))) + case Left(msg) => + logger.warn(msg) + IO((None, Left(msg))) + } + + maybeSchema + } + + def parseSchemaTab(tab: String): Either[String, SchemaInputType] = { + val inputTypes = List(SchemaUrlType, SchemaFileType, SchemaTextAreaType) + inputTypes.find(_.id == tab) match { + case Some(x) => Right(x) + case None => + Left( + s"Wrong value of tab: $tab, must be one of [${inputTypes.map(_.id).mkString(",")}]" + ) + } + } + + sealed abstract class SchemaInputType { + val id: String + } + + case object SchemaUrlType extends SchemaInputType { + override val id = "#schemaUrl" + } + + case object SchemaFileType extends SchemaInputType { + override val id = "#schemaFile" + } + + case object SchemaTextAreaType extends SchemaInputType { + override val id = "#schemaTextArea" + } + +} + +object SchemaParam extends LazyLogging { + + private[api] def mkSchema( + partsMap: PartsMap, + data: Option[RDFReasoner] + ): IO[(Schema, SchemaParam)] = { + val result: IO[Either[String, (Schema, SchemaParam)]] = for { + sp <- { + mkSchemaParam(partsMap) + } + eitherPair <- sp.getSchema(data).attempt + resp <- eitherPair.fold( + err => IO.pure(Left(err.getMessage)), + pair => { + val (maybeStr, maybeSchema) = pair + maybeSchema match { + // TODO: HERE ERROR IS NULL + case Left(str) => IO.pure(Left(str)) + case Right(schema) => + IO.pure(Right((schema, sp.copy(schema = maybeStr)))) + } + } + ) + } yield resp + result.flatMap( + _.fold( + errMsg => { + logger.error(errMsg) + IO.raiseError( + new RuntimeException(s"Could not obtain schema. $errMsg") + ) + }, + IO.pure + ) + ) + } + + private[api] def mkSchemaParam(partsMap: PartsMap): IO[SchemaParam] = for { + schema <- partsMap.optPartValue(SchemaParameter.name) + schemaURL <- partsMap.optPartValue(SchemaURLParameter.name) + schemaFile <- partsMap.optPartValue(SchemaFileParameter.name) + schemaFormatValue <- getSchemaFormat(SchemaFormatParameter.name, partsMap) + schemaEngine <- partsMap.optPartValue(SchemaEngineParameter.name) + targetSchemaEngine <- partsMap.optPartValue( + TargetSchemaEngineParameter.name + ) + targetSchemaFormat <- partsMap.optPartValue( + TargetSchemaFormatParameter.name + ) + activeSchemaTab <- partsMap.optPartValue(ActiveSchemaTabParameter.name) + } yield { + SchemaParam( + schema, + schemaURL, + schemaFile, + schemaFormatValue, + schemaEngine, + targetSchemaEngine, + targetSchemaFormat, + activeSchemaTab + ) + } + + private def getSchemaFormat( + name: String, + partsMap: PartsMap + ): IO[Option[SchemaFormat]] = for { + maybeStr <- partsMap.optPartValue(name) + } yield maybeStr match { + case None => None + case Some(str) => + SchemaFormat + .fromString(str) + .fold( + err => { + logger.error(s"Unsupported schemaFormat for $name: $str") + None + }, + df => Some(df) + ) + } + + private[api] def empty: SchemaParam = + SchemaParam( + schema = None, + schemaURL = None, + schemaFile = None, + optSchemaFormat = None, + schemaEngine = None, + targetSchemaEngine = None, + targetSchemaFormat = None, + activeSchemaTab = None + ) + +} + +/** Enumeration of the different possible Schema tabs sent by the client. + * The tab sent indicates the API if the schema was sent in raw text, as a URL + * to be fetched or as a text file containing the schema. + * In case the client submits the schema in several formats, the selected tab will indicate the preferred one. + */ +private[logic] object SchemaTab extends Enumeration { + type SchemaTab = String + + val TEXT = "#schemaTextArea" + val URL = "#schemaUrl" + val FILE = "#schemaFile" + + val defaultActiveShapeMapTab: SchemaTab = TEXT +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaParam.scala deleted file mode 100644 index d676017c..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaParam.scala +++ /dev/null @@ -1,321 +0,0 @@ -package es.weso.rdfshape.server.api.routes.schema.service - -import cats.effect._ -import cats.implicits._ -import com.typesafe.scalalogging.LazyLogging -import es.weso.rdf.RDFReasoner -import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ - defaultActiveSchemaTab, - defaultSchemaEngine -} -import es.weso.rdfshape.server.api.format._ -import es.weso.rdfshape.server.api.routes.PartsMap -import es.weso.rdfshape.server.api.routes.schema.logic.SchemaOperations.getBase -import es.weso.schema.{Schema, Schemas} - -import scala.io.Source -import scala.util.Try - -case class SchemaParam( - schema: Option[String], - schemaURL: Option[String], - schemaFile: Option[String], - schemaFormatTextArea: Option[SchemaFormat], - schemaFormatUrl: Option[SchemaFormat], - schemaFormatFile: Option[SchemaFormat], - schemaFormatValue: Option[SchemaFormat], - schemaEngine: Option[String], - schemaEmbedded: Option[Boolean], - targetSchemaEngine: Option[String], - targetSchemaFormat: Option[String], - activeSchemaTab: Option[String] -) extends LazyLogging { - - val schemaFormat: Option[SchemaFormat] = { - val schemaTab = parseSchemaTab( - activeSchemaTab.getOrElse(defaultActiveSchemaTab) - ) - logger.debug(s"schemaTab: $schemaTab") - schemaTab match { - case Right(`SchemaUrlType`) => schemaFormatUrl orElse schemaFormatValue - case Right(`SchemaFileType`) => schemaFormatFile orElse schemaFormatValue - case Right(`SchemaTextAreaType`) => - schemaFormatTextArea orElse schemaFormatValue - case _ => schemaFormatValue - } - } - - def getSchema( - data: Option[RDFReasoner] - ): IO[(Option[String], Either[String, Schema])] = { - logger.debug(s"schemaEmbedded: $schemaEmbedded") - val v: IO[(Option[String], Either[String, Schema])] = schemaEmbedded match { - case Some(true) => - data match { - case None => IO((None, Left(s"Schema embedded but no data found"))) - case Some(rdf) => - for { - eitherSchema <- { - Schemas - .fromRDF(rdf, schemaEngine.getOrElse(defaultSchemaEngine)) - .attempt - } - resp <- eitherSchema match { - case Left(str) => - IO((None, Left(s"Error obtaining schema from RDF $rdf"))) - case Right(schema) => - for { - str <- schema.serialize( - schemaFormat.getOrElse(SchemaFormat.defaultFormat).name - ) - } yield (Some(str), Right(schema)) - } - } yield resp - } - case _ => - logger.debug(s"activeSchemaTab: $activeSchemaTab") - logger.debug(s"schemaEngine: $schemaEngine") - val inputType = activeSchemaTab match { - case Some(a) => parseSchemaTab(a) - case None if schema.isDefined => Right(SchemaTextAreaType) - case None if schemaURL.isDefined => Right(SchemaUrlType) - case None if schemaFile.isDefined => Right(SchemaFileType) - case None => Right(SchemaTextAreaType) - } - logger.debug(s"inputType: $inputType") - inputType match { - case Right(`SchemaUrlType`) => - logger.debug("Schema input type - SchemaUrlType") - schemaURL match { - case None => IO((None, Left(s"Non value for schemaURL"))) - case Some(schemaUrl) => - val e: IO[(String, Schema)] = for { - str <- IO.fromEither( - Try(Source.fromURL(schemaUrl).mkString).toEither - ) - schema <- Schemas.fromString( - str, - schemaFormat.getOrElse(SchemaFormat.defaultFormat).name, - schemaEngine.getOrElse(defaultSchemaEngine), - getBase - ) // .leftMap(s => s"Error parsing contents of $schemaUrl: $s\nContents:\n$str") - _ <- IO { logger.debug("Schema parsed") } - } yield (str, schema) - e.attempt.map( - _.fold( - s => ((none[String], s.getMessage.asLeft[Schema])), - pair => { - val (str, schema) = pair - ((Some(str), Right(schema))) - } - ) - ) - } - case Right(`SchemaFileType`) => - logger.debug("Schema input type - SchemaFileType") - schemaFile match { - case None => IO((None, Left(s"No value for schemaFile"))) - case Some(schemaStr) => - val schemaFormatStr = - schemaFormat.getOrElse(SchemaFormat.defaultFormat).name - val schemaEngineStr = - schemaEngine.getOrElse(defaultSchemaEngine) - Schemas - .fromString( - schemaStr, - schemaFormatStr, - schemaEngineStr, - getBase - ) - .attempt - .map( - _.fold( - s => (Some(schemaStr), Left(s"Error parsing file: $s")), - schema => (Some(schemaStr), Right(schema)) - ) - ) - } - case Right(`SchemaTextAreaType`) => - logger.debug("Schema input type - SchemaTextAreaType") - val schemaStr = schema.getOrElse("") - for { - pair <- Schemas - .fromString( - schemaStr, - schemaFormat.getOrElse(SchemaFormat.defaultFormat).name, - schemaEngine.getOrElse(defaultSchemaEngine), - getBase - ) - .attempt - .map( - _.fold( - s => (Some(schemaStr), Left(s.getMessage)), - schema => (Some(schemaStr), Right(schema)) - ) - ) - (str, eitherSchema) = pair - nameSchema = eitherSchema.map(_.name).getOrElse(s"No schema") - _ <- IO { logger.debug(s"nameSchema: $nameSchema") } - foundSchema <- Schemas.lookupSchema( - schemaEngine.getOrElse(defaultSchemaEngine) - ) - _ <- IO { logger.debug(s"foundSchema: ${foundSchema.name}") } - } yield pair - case Right(other) => - logger.warn(s"Unknown value for activeSchemaTab: $other") - IO((None, Left(s"Unknown value for activeSchemaTab: $other"))) - case Left(msg) => - logger.warn(msg) - IO((None, Left(msg))) - } - } - v - } - - def parseSchemaTab(tab: String): Either[String, SchemaInputType] = { - val inputTypes = List(SchemaUrlType, SchemaFileType, SchemaTextAreaType) - inputTypes.find(_.id == tab) match { - case Some(x) => Right(x) - case None => - Left( - s"Wrong value of tab: $tab, must be one of [${inputTypes.map(_.id).mkString(",")}]" - ) - } - } - - private def chooseSchemaTab: String = { - (schema, schemaURL) match { - case (Some(_), None) => SchemaTextAreaType.id - case (None, Some(_)) => SchemaUrlType.id - case (None, None) => defaultActiveSchemaTab - case (Some(_), Some(_)) => defaultActiveSchemaTab - } - } - - sealed abstract class SchemaInputType { - val id: String - } - - case object SchemaUrlType extends SchemaInputType { - override val id = "#schemaUrl" - } - - case object SchemaFileType extends SchemaInputType { - override val id = "#schemaFile" - } - - case object SchemaTextAreaType extends SchemaInputType { - override val id = "#schemaTextArea" - } - -} - -object SchemaParam extends LazyLogging { - - private[api] def mkSchema( - partsMap: PartsMap, - data: Option[RDFReasoner] - ): IO[(Schema, SchemaParam)] = { - // val L = implicitly[LiftIO[F]] - // val E = implicitly[MonadError[F,Throwable]] - val r: IO[Either[String, (Schema, SchemaParam)]] = for { - sp <- { - mkSchemaParam(partsMap) - } - eitherPair <- sp.getSchema(data).attempt - resp <- eitherPair.fold( - s => IO.pure(Left(s"Error: $s")), - pair => { - val (maybeStr, maybeSchema) = pair - maybeSchema match { - case Left(str) => IO.pure(Left(str)) - case Right(schema) => - IO.pure(Right((schema, sp.copy(schema = maybeStr)))) - } - } - ) - } yield resp - r.flatMap( - _.fold( - str => - IO.raiseError(new RuntimeException(s"Error obtaining schema: $str")), - IO.pure - ) - ) - } - - /* private[server] def mkSchemaIO[F[_]:Effect](partsMap: PartsMap[F], data: - * Option[RDFReasoner] ): IO[Either[String,(Schema, SchemaParam)]] = { val L = - * implicitly[LiftIO[F]] val r: IO[Either[String,(Schema,SchemaParam)]] = for - * { sp <- mkSchemaParam(partsMap) eitherPair <- sp.getSchema(data).attempt - * resp <- eitherPair.fold( s => IO.pure(Left(s"Error: $s")), pair => { val - * (maybeStr, maybeSchema) = pair maybeSchema match { case Left(str) => - * IO.pure(Left(str)) case Right(schema) => IO.pure(Right((schema, - * sp.copy(schema = maybeStr)))) } }) } yield resp r } */ - - private[api] def mkSchemaParam(partsMap: PartsMap): IO[SchemaParam] = for { - schema <- partsMap.optPartValue("schema") - schemaURL <- partsMap.optPartValue("schemaURL") - schemaFile <- partsMap.optPartValue("schemaFile") - schemaFormatTextArea <- getSchemaFormat("schemaFormatTextArea", partsMap) - schemaFormatUrl <- getSchemaFormat("schemaFormatUrl", partsMap) - schemaFormatFile <- getSchemaFormat("schemaFormatFile", partsMap) - schemaFormatValue <- getSchemaFormat("schemaFormat", partsMap) - schemaEngine <- partsMap.optPartValue("schemaEngine") - targetSchemaEngine <- partsMap.optPartValue("targetSchemaEngine") - targetSchemaFormat <- partsMap.optPartValue("targetSchemaFormat") - activeSchemaTab <- partsMap.optPartValue("activeSchemaTab") - schemaEmbedded <- partsMap.optPartValueBoolean("schemaEmbedded") - } yield { - SchemaParam( - schema, - schemaURL, - schemaFile, - schemaFormatTextArea, - schemaFormatUrl, - schemaFormatFile, - schemaFormatValue, - schemaEngine, - schemaEmbedded, - targetSchemaEngine, - targetSchemaFormat, - activeSchemaTab - ) - } - - private def getSchemaFormat( - name: String, - partsMap: PartsMap - ): IO[Option[SchemaFormat]] = for { - maybeStr <- partsMap.optPartValue(name) - } yield maybeStr match { - case None => None - case Some(str) => - SchemaFormat - .fromString(str) - .fold( - err => { - logger.error(s"Unsupported schemaFormat for $name: $str") - None - }, - df => Some(df) - ) - } - - private[api] def empty: SchemaParam = - SchemaParam( - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None - ) - -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala index e16e926c..e05e82df 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala @@ -1,42 +1,28 @@ package es.weso.rdfshape.server.api.routes.schema.service -import cats.data._ import cats.effect._ import cats.implicits._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.jena.RDFAsJenaModel -import es.weso.rdf.{InferenceEngine, RDFReasoner} import es.weso.rdfshape.server.api.definitions.ApiDefaults -import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ - defaultSchemaEngine, - defaultSchemaFormat -} +import es.weso.rdfshape.server.api.definitions.ApiDefaults.defaultSchemaEngine import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.format._ -import es.weso.rdfshape.server.api.routes.IncomingRequestParameters._ -import es.weso.rdfshape.server.api.routes.data.logic.DataOperations.prefixMap2Json +import es.weso.rdfshape.server.api.routes.ApiService import es.weso.rdfshape.server.api.routes.data.logic.DataParam import es.weso.rdfshape.server.api.routes.schema.logic.SchemaOperations._ -import es.weso.rdfshape.server.api.routes.schema.logic.{ - SchemaConversionResult, - SchemaInfo, - SchemaInfoResult -} -import es.weso.rdfshape.server.api.routes.{ApiService, PartsMap} +import es.weso.rdfshape.server.api.routes.schema.logic.SchemaParam import es.weso.rdfshape.server.api.utils.OptEitherF._ -import es.weso.rdfshape.server.utils.json.JsonUtils.responseJson +import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ +import es.weso.rdfshape.server.api.utils.parameters.PartsMap +import es.weso.rdfshape.server.utils.json.JsonUtils.errorResponseJson import es.weso.schema._ -import es.weso.shacl.converter.Shacl2ShEx -import es.weso.shapemaps.ShapeMap import es.weso.utils.IOUtils._ import io.circe._ -import io.circe.generic.auto._ -import io.circe.syntax._ import org.http4s._ import org.http4s.circe._ import org.http4s.client.Client import org.http4s.dsl.Http4sDsl -import org.http4s.headers._ import org.http4s.multipart.Multipart /** API service to handle schema-related operations @@ -54,11 +40,15 @@ class SchemaService(client: Client[IO]) */ val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { + /** Returns a JSON array with the accepted schema engines for ShEx + */ case GET -> Root / `api` / `verb` / "engines" => val engines = Schemas.availableSchemaNames val json = Json.fromValues(engines.map(str => Json.fromString(str))) Ok(json) + /** Returns a JSON array with the accepted schema engines for SHACL + */ case GET -> Root / `api` / `verb` / "engines" / "shacl" => val shaclSchemas = List(Schemas.shaclex, Schemas.jenaShacl, Schemas.shaclTQ) @@ -67,74 +57,63 @@ class SchemaService(client: Client[IO]) ) Ok(json) + /** Returns the default schema format as a raw string + */ case GET -> Root / `api` / `verb` / "engines" / "default" => val schemaEngine = Schemas.defaultSchemaName val json = Json.fromString(schemaEngine) Ok(json) + /** Returns a JSON array with the accepted schema formats. + * Accepts an optional query parameter specifying the schema engine: + * - schemaEngine [String]: schema engine for which we are listing the formats + */ case GET -> Root / `api` / `verb` / "formats" :? - SchemaEngineParam(optSchemaEngine) => + SchemaEngineParameter(optSchemaEngine) => val schemaEngine = optSchemaEngine.getOrElse(Schemas.defaultSchemaName) - val r: IO[Json] = Schemas + val res = Schemas .lookupSchema(schemaEngine) .attempt .map( _.fold( - err => - Json.fromFields( - List( - ( - "error", - Json.fromString( - s"Schema engine: $schemaEngine not found. Available engines = ${Schemas.availableSchemaNames - .mkString(",")}" - ) - ) - ) + _ => + errorResponseJson( + s"Schema engine: $schemaEngine not found. Available engines = ${Schemas.availableSchemaNames + .mkString(",")}", + NotFound ), schema => - Json.fromValues(schema.formats.toList.map(Json.fromString)) + Ok(Json.fromValues(schema.formats.toList.map(Json.fromString))) ) ) - io2f(r).flatMap(json => Ok(json)) + res.flatten + /** Returns a JSON array with the accepted triggerModes + */ case GET -> Root / `api` / `verb` / "triggerModes" => val triggerModes = ValidationTrigger.triggerValues.map(_._1) val json = Json.fromValues(triggerModes.map(Json.fromString)) Ok(json) - case GET -> Root / `api` / `verb` / "info" :? - OptSchemaParam(optSchema) +& - SchemaFormatParam(optSchemaFormat) +& - SchemaEngineParam(optSchemaEngine) => - val schemaEngine = optSchemaEngine.getOrElse(Schemas.defaultSchemaName) - val schemaFormat = optSchemaFormat.getOrElse(Schemas.defaultSchemaFormat) - val schemaStr = optSchema match { - case None => "" - case Some(schema) => schema - } - for { - either <- Schemas - .fromString(schemaStr, schemaFormat, schemaEngine, None) - .attempt - r <- either.fold( - e => responseJson(s"Error reading schema: $e\nString: $schemaStr"), - schema => { - val shapes: List[String] = schema.shapes - val jsonShapes = Json.fromValues(shapes.map(Json.fromString)) - val pm: Json = prefixMap2Json(schema.pm) - val result = SchemaInfoResult( - schemaStr, - schemaFormat, - schemaEngine, - jsonShapes, - pm - ).asJson - Ok(result) - } - ) - } yield r - + /** Obtain information about an schema. + * Receives a JSON object with the input schema information: + * - schema [String]: Raw schema data + * - schemaUrl [String]: Url containing the schema + * - schemaFile [File Object]: File containing schema + * - schemaFormat [String]: Format of the schema + * - schemaEngine [String]: Engine used to process the schema + * - activeSchemaTab [String]: Identifies the source of the schema (raw, URL, file...) + * Returns a JSON object with the schema information: + * - schemaType [String]: Type of the schema + * - schemaEngine [String]: Engine of the schema + * - wellFormed [Boolean]: Whether if the schema is well formed or not + * - shapes [Array]: Array of the shapes in the schema + * - shapesPrefixMap [Array]: Array of the prefixes in the schema + * - prefix [String]: Prefix key + * - uri [String]: Prefix URI + * - errors [Array]: Array of errors in the schema + */ + // TODO: show errors in a friendlier way in the client's UI case req @ POST -> Root / `api` / `verb` / "info" => req.decode[Multipart[IO]] { m => { @@ -150,7 +129,10 @@ class SchemaService(client: Client[IO]) e <- r.attempt v <- e.fold( t => { - Ok(SchemaInfo.fromError(t.getMessage).toJson) + errorResponseJson( + t.getMessage, + BadRequest + ) }, Ok(_) ) @@ -158,52 +140,24 @@ class SchemaService(client: Client[IO]) } } - case GET -> Root / `api` / `verb` / "convert" :? - OptSchemaParam(optSchema) +& - SchemaFormatParam(optSchemaFormat) +& - SchemaEngineParam(optSchemaEngine) +& - TargetSchemaFormatParam(optResultSchemaFormat) +& - TargetSchemaEngineParam(optResultSchemaEngine) => - val schemaEngine = optSchemaEngine.getOrElse(Schemas.defaultSchemaName) - val schemaStr = optSchema match { - case None => "" - case Some(schema) => schema - } - for { - maybeSchemaFormat <- optEither2f( - optSchemaFormat, - SchemaFormat.fromString - ) - schemaFormat = maybeSchemaFormat.getOrElse(defaultSchemaFormat) - either <- Schemas - .fromString(schemaStr, schemaFormat.name, schemaEngine, None) - .attempt - r <- either.fold( - e => responseJson(s"Error reading schema: $e\nString: $schemaStr"), - schema => { - for { - optTargetSchemaFormat <- optEither2f( - optResultSchemaFormat, - SchemaFormat.fromString - ) - s <- io2f( - convertSchema( - schema, - optSchema, - schemaFormat, - schemaEngine, - optTargetSchemaFormat, - optResultSchemaEngine - ) - ) - r <- Ok(s.toJson) - } yield r - } - /* Ok(convertSchema(schema, optSchema, schemaFormat, schemaEngine, - * optResultSchemaFormat, optResultSchemaEngine).toJson) */ - ) - } yield r - + /** Convert a given schema to another accepted format. + * Receives a JSON object with the input schema information: + * - schema [String]: Raw schema data + * - schemaUrl [String]: Url containing the schema + * - schemaFile [File Object]: File containing schema + * - schemaFormat [String]: Format of the schema + * - targetSchemaFormat [String]: Desired format after conversion of the schema + * - schemaEngine [String]: Engine used to process the schema + * - activeSchemaTab [String]: Identifies the source of the schema (raw, URL, file...) + * Returns a JSON object with the converted schema information: + * - message [String]: Informational message on success + * - schema [String]: Original input schema + * - schemaFormat [String]: Format of the original schema + * - schemaEngine [String]: Engine of the conversion + * - targetSchemaFormat [String]: Format of the output schema + * - result [String]: Output schema + * - shapeMap [String]: Output shapemap, if any + */ case req @ POST -> Root / `api` / `verb` / "convert" => req.decode[Multipart[IO]] { m => { @@ -212,8 +166,7 @@ class SchemaService(client: Client[IO]) val r: IO[Json] = for { schemaPair <- SchemaParam.mkSchema(partsMap, None) (schema, sp) = schemaPair - /* targetSchemaFormat <- optEither2f(sp.targetSchemaFormat, - * SchemaFormat.fromString) */ + targetSchemaFormat <- optEither2f( sp.targetSchemaFormat, SchemaFormat.fromString @@ -221,7 +174,7 @@ class SchemaService(client: Client[IO]) converted <- convertSchema( schema, sp.schema, - sp.schemaFormat.getOrElse(SchemaFormat.defaultFormat), + sp.schemaFormat, sp.schemaEngine.getOrElse(defaultSchemaEngine), targetSchemaFormat, sp.targetSchemaEngine @@ -232,18 +185,31 @@ class SchemaService(client: Client[IO]) for { e <- r.attempt v <- e.fold( - t => Ok(SchemaConversionResult.fromMsg(t.getMessage).toJson), + t => errorResponseJson(t.getMessage, InternalServerError), Ok(_) ) } yield v } } + /** Convert a given schema to a UML visualization using PlantUML. + * Receives a JSON object with the input schema information: + * - schema [String]: Raw schema data + * - schemaUrl [String]: Url containing the schema + * - schemaFile [File Object]: File containing schema + * - schemaFormat [String]: Format of the schema + * - schemaEngine [String]: Engine used to process the schema + * - activeSchemaTab [String]: Identifies the source of the schema (raw, URL, file...) + * Returns a JSON object with the converted schema information: + * - schemaType [String]: Type of the schema + * - schemaEngine [String]: Engine of the schema + * - svg [String]: Array of the shapes in the schema + * - plantUml [String]: Array of the shapes in the schema + */ case req @ POST -> Root / `api` / `verb` / "visualize" => req.decode[Multipart[IO]] { m => { val partsMap = PartsMap(m.parts) - logger.info(s"POST info partsMap. $partsMap") val r: IO[Json] = for { schemaPair <- SchemaParam.mkSchema(partsMap, None) (schema, _) = schemaPair @@ -253,11 +219,12 @@ class SchemaService(client: Client[IO]) } for { e <- r.attempt - v <- e.fold(t => responseJson(t.getMessage), Ok(_)) + v <- e.fold(t => errorResponseJson(t.getMessage, BadRequest), Ok(_)) } yield v } } + // TODO: test and include in the client case req @ POST -> Root / `api` / `verb` / "cytoscape" => req.decode[Multipart[IO]] { m => { @@ -271,185 +238,47 @@ class SchemaService(client: Client[IO]) } for { e <- r.attempt - v <- e.fold(t => responseJson(t.getMessage), Ok(_)) + v <- e.fold(t => errorResponseJson(t.getMessage, BadRequest), Ok(_)) } yield v } } - case req @ GET -> Root / `api` / `verb` / "visualize" :? - SchemaURLParam(optSchemaURL) +& - OptSchemaParam(optSchema) +& - SchemaFormatParam(optSchemaFormatStr) +& - SchemaEngineParam(optSchemaEngine) +& - OptActiveSchemaTabParam(optActiveSchemaTab) => - val r: EitherT[IO, String, String] = for { - optSchemaFormat <- optEither2es( - optSchemaFormatStr, - SchemaFormat.fromString - ) - sp = SchemaParam( - optSchema, - optSchemaURL, - None, - optSchemaFormat, - optSchemaFormat, - optSchemaFormat, - optSchemaFormat, - optSchemaEngine, - None, - None, - None, - optActiveSchemaTab - ) - pair <- EitherT( - sp.getSchema(None) - .attempt - .map(_.leftMap(s => s"Error obtaining schema: ${s.getMessage}")) - ) - (_, either: Either[String, Schema]) = pair - svg <- either.fold( - s => fail_es(s"Error parsing schema: $s"), - schema => { - io2es(schema2SVG(schema).map(_._1)) - } - ) - } yield svg - for { - either <- run_es(r) - v <- either.fold( - s => responseJson(s"Error obtaining schema $s"), - svg => { - Ok(svg).map( - _.withContentType(`Content-Type`(MediaType.image.`svg+xml`)) - ) - } - ) - } yield v - - case req @ GET -> Root / `api` / `verb` / "validate" :? - OptDataParam(optData) +& - OptDataURLParam(optDataURL) +& - DataFormatParam(maybeDataFormatStr) +& - CompoundDataParam(optCompoundData) +& - OptSchemaParam(optSchema) +& - SchemaURLParam(optSchemaURL) +& - SchemaFormatParam(maybeSchemaFormatStr) +& - SchemaEngineParam(optSchemaEngine) +& - OptTriggerModeParam(optTriggerMode) +& - ShapeMapParameterAlt(optShapeMapAlt) +& - ShapeMapParameter(optShapeMap) +& - ShapeMapURLParameter(optShapeMapURL) +& - ShapeMapFileParameter( - optShapeMapFile - ) +& // This parameter seems unnecessary...maybe for keeping the state only? - ShapeMapFormatParam(optShapeMapFormat) +& - SchemaEmbedded(optSchemaEmbedded) +& - InferenceParam(optInference) +& - OptEndpointParam(optEndpoint) +& - // OptEndpointsParam(optEndpoints) +& - OptActiveDataTabParam(optActiveDataTab) +& - OptActiveSchemaTabParam(optActiveSchemaTab) +& - OptActiveShapeMapTabParam(optActiveShapeMapTab) => - val either: Either[String, (Option[DataFormat], Option[SchemaFormat])] = - for { - df <- maybeDataFormatStr.map(DataFormat.fromString).sequence - sf <- maybeSchemaFormatStr.map(SchemaFormat.fromString).sequence - } yield (df, sf) - - either match { - case Left(str) => responseJson(str, status = BadRequest) - case Right(pair) => - val (optDataFormat, optSchemaFormat) = pair - val baseUri = req.uri - logger.info(s"BaseURI: $baseUri") - logger.info(s"Endpoint: $optEndpoint") - val dp = DataParam( - optData, - optDataURL, - None, - optEndpoint, - optDataFormat, - optDataFormat, - optDataFormat, - None, - optInference, - None, - optActiveDataTab, - optCompoundData - ) - val sp = SchemaParam( - optSchema, - optSchemaURL, - None, - optSchemaFormat, - optSchemaFormat, - optSchemaFormat, - optSchemaFormat, - optSchemaEngine, - optSchemaEmbedded, - None, - None, - optActiveSchemaTab - ) - val collectShapeMap = (optShapeMap, optShapeMapAlt) match { - case (None, None) => None - case (None, Some(sm)) => Some(sm) - case (Some(sm), None) => Some(sm) - case (Some(sm1), Some(sm2)) => - if(sm1 == sm2) Some(sm1) - else { - val msg = - s"2 shape-map parameters with different values: $sm1 and $sm2. We use: $sm1" - logger.error(msg) - Some(sm1) - } - } - logger.debug(s"collectShapeMap: $collectShapeMap") - val tp = TriggerModeParam( - optTriggerMode, - collectShapeMap, - optShapeMapFormat, - optShapeMapURL, - optShapeMapFormat, // TODO: Maybe a more specific param for URL format? - optShapeMapFile, - optShapeMapFormat, // TODO: Maybe a more specific param for File format? - optActiveShapeMapTab - ) - - val eitherResult: IO[Response[IO]] = for { - pairData <- io2f(dp.getData(relativeBase)) - (dataStr, resourceRdf) = pairData - response <- io2f(for { - resBuilder <- RDFAsJenaModel.empty - vv <- (resourceRdf, resBuilder).tupled.use { - case (rdf, builder) => - for { - pair <- sp.getSchema(Some(rdf)) - (schemaStr, eitherSchema) = pair - schema <- IO.fromEither( - eitherSchema.leftMap(s => - new RuntimeException(s"Error obtaining schema: $s") - ) - ) - res <- schemaValidate( - rdf, - schema, - tp, - relativeBase, - builder - ) - (result, maybeTrigger, time) = res - json <- schemaResult2json(res._1) - } yield json - } - } yield vv) - v <- Ok(response) - } yield { - v - } - eitherResult - } - + // TODO: Enhance API response + /** Validates RDF data against a given schema-shapemap. + * Receives a JSON object with the input data, schema and shapemap information: + * - data [String]: RDF data + * - dataUrl [String]: Url containing the RDF data + * - dataFile [File Object]: File containing RDF data + * - dataFormat [String]: Format of the RDF data + * - inference [String]: Inference to be applied + * - activeDataTab [String]: Identifies the source of the data (raw, URL, file...) + * - endpoint [String]: Additional endpoint to serve as a source of data + * - schema [String]: Raw schema data + * - schemaUrl [String]: Url containing the schema + * - schemaFile [File Object]: File containing the schema + * - schemaFormat [String]: Format of the schema + * - schemaEngine [String]: Engine used to process the schema + * - activeSchemaTab [String]: Identifies the source of the schema (raw, URL, file...) + * - triggerMode [String]: Validation trigger mode + * - shapeMap [String]: Raw shapemap data + * - shapeMapUrl [String]: Url containing the shapemap + * - shapeMapFile [File Object]: File containing the shapemap + * - shapeMapFormat [String]: Format of the shapemap + * - activeShapeMapTab [String]: Identifies the source of the shapemap (raw, URL, file...) + * Returns a JSON object with the converted schema information: + * - valid [Boolean]: Whether the data is at least partially valid or not + * - message [String]: Informational message + * - validationReport [String]: Additional validation information + * - schema [String]: Original input schema + * - nodesPrefixMap [Object]: Key/value structure with the data prefixes + * - shapesPrefixMap [Object]: Key/value structure with the schema prefixes + * - shapeMap [Array]: Array containing the validation results for each node. Each result has: + * - node [String]: Full name of the affected node + * - shape [String]: Full name of the affected shape + * - status [String]: Whether this node conforms this shape + * - appInfo [Object]: Additional information on why the node conforms or not + * - errors [Array]: Array of errors in the validation + */ case req @ POST -> Root / `api` / `verb` / "validate" => req.decode[Multipart[IO]] { m => { @@ -476,131 +305,16 @@ class SchemaService(client: Client[IO]) for { e <- r.attempt - v <- e.fold(t => responseJson(t.getMessage), json => Ok(json)) + v <- e.fold( + t => errorResponseJson(t.getMessage, BadRequest), + json => Ok(json) + ) } yield v } } } private val relativeBase = ApiDefaults.relativeBase - /** Given an input schema, convert it to another output schema with the parameters specified. - * - * @param schema Input schema - * @param schemaStr Input schema contents - * @param schemaFormat Input schema format - * @param schemaEngine Input schema engine - * @param optTargetSchemaFormat Output schema desired format - * @param optTargetSchemaEngine Output schema desired engine - * @return Optionally, the raw output schema contents - */ - private[schema] def convertSchema( - schema: Schema, - schemaStr: Option[String], - schemaFormat: SchemaFormat, - schemaEngine: String, - optTargetSchemaFormat: Option[SchemaFormat], - optTargetSchemaEngine: Option[String] - ): IO[SchemaConversionResult] = { - val result: IO[SchemaConversionResult] = for { - pair <- doSchemaConversion( - schema, - optTargetSchemaFormat.map(_.name), - optTargetSchemaEngine - ) - sourceStr <- schemaStr match { - case None => schema.serialize(schemaFormat.name) - case Some(source) => IO(source) - } - (resultStr, resultShapeMap) = pair - } yield SchemaConversionResult.fromConversion( - sourceStr, - schemaFormat.name, - schemaEngine, - optTargetSchemaFormat.map(_.name), - optTargetSchemaEngine, - resultStr, - resultShapeMap - ) - - for { - either <- result.attempt - } yield either.fold( - err => SchemaConversionResult.fromMsg(s"error converting schema: $err"), - identity - ) - } - - private def doSchemaConversion( - schema: Schema, - targetSchemaFormat: Option[String], - optTargetSchemaEngine: Option[String] - ): IO[(String, ShapeMap)] = { - logger.debug( - s"Schema conversion, name: ${schema.name}, targetSchema: $targetSchemaFormat" - ) - val default = for { - str <- schema.convert(targetSchemaFormat, optTargetSchemaEngine, None) - } yield (str, ShapeMap.empty) - schema match { - case shacl: ShaclexSchema => - optTargetSchemaEngine.map(_.toUpperCase()) match { - case Some("SHEX") => - logger.debug("Schema conversion: SHACLEX -> SHEX") - Shacl2ShEx - .shacl2ShEx(shacl.schema) - .fold( - e => - IO.raiseError( - new RuntimeException( - s"Error converting SHACL -> ShEx: $e" - ) - ), - pair => { - val (schema, shapeMap) = pair - logger.debug(s"shapeMap: $shapeMap") - for { - emptyBuilder <- RDFAsJenaModel.empty - str <- emptyBuilder.use(builder => - es.weso.shex.Schema.serialize( - schema, - targetSchemaFormat.getOrElse("SHEXC"), - None, - builder - ) - ) - } yield (str, shapeMap) - } - ) - case _ => default - } - case _ => default - } - } - - private def info(msg: String): EitherT[IO, String, Unit] = - EitherT.liftF[IO, String, Unit](IO(logger.info(msg))) - - private def applyInference( - rdf: RDFReasoner, - inferenceName: Option[String] - ): IO[RDFReasoner] = inferenceName match { - case None => IO.pure(rdf) - case Some(name) => - InferenceEngine.fromString(name) match { - case Left(str) => - IO.raiseError( - new RuntimeException( - s"Error parsing inference engine: $name: $str" - ) - ) - case Right(engine) => rdf.applyInference(engine) - } - } - - // private def either2f[A](e: Either[String,A]): F[A] = ??? - - // private def - } object SchemaService { diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/TriggerModeParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/TriggerModeParam.scala index 873a73c4..f64940cf 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/TriggerModeParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/TriggerModeParam.scala @@ -8,7 +8,8 @@ import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ defaultActiveShapeMapTab, defaultShapeMapFormat } -import es.weso.rdfshape.server.api.routes.PartsMap +import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ +import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.shapemaps.ShapeMap case class TriggerModeParam( @@ -146,16 +147,22 @@ object TriggerModeParam extends LazyLogging { def mkTriggerModeParam(partsMap: PartsMap): IO[TriggerModeParam] = { val tp: IO[TriggerModeParam] = for { - optTriggerMode <- partsMap.optPartValue("triggerMode") - optShapeMap <- partsMap.optPartValue("shapeMap") - optShapeMapURL <- partsMap.optPartValue("shapeMapURL") - optShapeMapFile <- partsMap.optPartValue("shapeMapFile") + optTriggerMode <- partsMap.optPartValue(TriggerModeParameter.name) + optShapeMap <- partsMap.optPartValue(ShapeMapTextParameter.name) + optShapeMapURL <- partsMap.optPartValue(ShapeMapUrlParameter.name) + optShapeMapFile <- partsMap.optPartValue(ShapeMapFileParameter.name) optShapeMapFormatTextArea <- partsMap.optPartValue( - "shapeMapFormatTextArea" + ShapeMapFormatTextAreaParameter.name + ) + optShapeMapFormatUrl <- partsMap.optPartValue( + ShapeMapFormatUrlParameter.name + ) + optShapeMapFormatFile <- partsMap.optPartValue( + ShapeMapFormatFileParameter.name + ) + optActiveShapeMapTab <- partsMap.optPartValue( + ActiveShapeMapTabParameter.name ) - optShapeMapFormatUrl <- partsMap.optPartValue("shapeMapFormatURL") - optShapeMapFormatFile <- partsMap.optPartValue("shapeMapFormatFile") - optActiveShapeMapTab <- partsMap.optPartValue("shapeMapActiveTab") } yield { logger.debug(s"optTriggerMode: $optTriggerMode") logger.debug(s"optShapeMap: $optShapeMap") diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala new file mode 100644 index 00000000..fcfabe35 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala @@ -0,0 +1,192 @@ +package es.weso.rdfshape.server.api.routes.shapemap.logic + +import cats.effect.IO +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ +import es.weso.rdfshape.server.api.utils.parameters.PartsMap +import es.weso.rdfshape.server.utils.networking.NetworkingUtils.getUrlContents +import es.weso.shapemaps.{Compact, ShapeMapFormat, ShapeMap => ShapeMapW} + +/** Data class representing a ShapeMap and its current source + * + * @param shapeMap Shapemap raw text + * @param shapeMapFormat Shapemap format + * @param targetShapeMapFormat Shapemap target format + * @param activeShapeMapTab Active tab, used to know which source the shapemap comes from + */ +sealed case class ShapeMap private ( + shapeMap: String, + shapeMapFormat: ShapeMapFormat, + targetShapeMapFormat: ShapeMapFormat, + activeShapeMapTab: String +) { + + /** Construct the inner shapemap structure from the data in this class + * + * @return A ShapeMap instance used by WESO libraries in validation + */ + val innerShapeMap: Either[String, ShapeMapW] = { + ShapeMapW + .fromString(shapeMap, shapeMapFormat.name) match { + case Left(errorList) => Left(errorList.toList.mkString("\n")) + case Right(sm) => Right(sm) + } + } +} + +private[api] object ShapeMap extends LazyLogging { + + /** Placeholder value used for the shapemap whenever an empty shapemap is issued/needed. + */ + private val emptyShapeMapValue = "" + + /** Default shapemap format used when no alternatives are present + */ + private val defaultShapeMapFormat: ShapeMapFormat = Compact + + /** Given a request's parameters, try to extract a shapemap from them + * + * @param partsMap Request's parameters + * @return Either the shapemap or an error message + */ + def getShapeMap( + partsMap: PartsMap + ): IO[Either[String, ShapeMap]] = { + for { + // Get data sent in que query + shapeMapStr <- partsMap.optPartValue(ShapeMapTextParameter.name) + shapeMapUrl <- partsMap.optPartValue(ShapeMapUrlParameter.name) + shapeMapFile <- partsMap.optPartValue(ShapeMapFileParameter.name) + optShapeMapFormat <- getShapeMapFormat( + ShapeMapFormatParameter.name, + partsMap + ) + optTargetShapeMapFormat <- getShapeMapFormat( + TargetShapeMapFormatParameter.name, + partsMap + ) + activeShapeMapTab <- partsMap.optPartValue( + ActiveShapeMapTabParameter.name + ) + + _ = logger.debug( + s"Getting ShapeMap from params. ShapeMap tab: $activeShapeMapTab" + ) + + // Get the shapemap formats or use the defaults + shapeMapFormat = optShapeMapFormat.getOrElse(defaultShapeMapFormat) + targetShapeMapFormat = optTargetShapeMapFormat.getOrElse( + defaultShapeMapFormat + ) + + // Create the shapemap depending on the client's selected method + maybeShapeMap: Either[String, ShapeMap] = activeShapeMapTab.getOrElse( + ShapeMapTab.defaultActiveShapeMapTab + ) match { + case ShapeMapTab.TEXT => + shapeMapStr match { + case None => Left("No value for the ShapeMap string") + case Some(shapeMapRaw) => + Right( + ShapeMap( + shapeMapRaw, + shapeMapFormat, + targetShapeMapFormat, + ShapeMapTab.TEXT + ) + ) + } + + case ShapeMapTab.URL => + shapeMapUrl match { + case None => Left(s"No value for the shapemap URL") + case Some(url) => + getUrlContents(url) match { + case Right(shapeMapRaw) => + Right( + ShapeMap( + shapeMapRaw, + shapeMapFormat, + targetShapeMapFormat, + ShapeMapTab.URL + ) + ) + case Left(err) => Left(err) + } + } + case ShapeMapTab.FILE => + shapeMapFile match { + case None => Left(s"No value for the shapemap file") + case Some(shapeMapRaw) => + Right( + ShapeMap( + shapeMapRaw, + shapeMapFormat, + targetShapeMapFormat, + ShapeMapTab.FILE + ) + ) + } + case other => + val msg = s"Unknown value for activeShapemapTab: $other" + logger.warn(msg) + Left(msg) + } + + } yield maybeShapeMap + } + + /** Given a list of query parameters and a parameter name, try to create a ShapeMapFormat instance from the format name contained in the parameter + * + * @param name Query parameter containing the format name + * @param partsMap Query parameters + * @return Optionally, a ShapeMapFormat instance corresponding to the shapemap format specified in the query parameters + */ + private def getShapeMapFormat( + name: String, + partsMap: PartsMap + ): IO[Option[ShapeMapFormat]] = + for { + maybeFormat <- partsMap.optPartValue(name) + } yield maybeFormat match { + case None => None + case Some(str) => + ShapeMapFormat + .fromString(str) + .fold( + err => { + logger.error(s"Unsupported shapeMapFormat: $str ($err)") + None + }, + format => Some(format) + ) + } + + /** Empty shapemap representation, with no inner data and all defaults + * + * @return + */ + def empty: ShapeMap = + ShapeMap( + emptyShapeMapValue, + defaultShapeMapFormat, + defaultShapeMapFormat, + ShapeMapTab.defaultActiveShapeMapTab + ) + +} + +/** Enumeration of the different possible ShapeMap tabs sent by the client. + * The tab sent indicates the API if the shapemap was sent in raw text, as a URL + * to be fetched or as a text file containing the shapemap. + * In case the client submits the shapemap in several formats, the selected tab will indicate the preferred one. + */ +private[this] object ShapeMapTab extends Enumeration { + type ShapeMapTab = String + + val TEXT = "#shapeMapTextArea" + val URL = "#shapeMapUrl" + val FILE = "#shapeMapFile" + + val defaultActiveShapeMapTab: ShapeMapTab = TEXT +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapParam.scala deleted file mode 100644 index c0d5b62c..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapParam.scala +++ /dev/null @@ -1,142 +0,0 @@ -package es.weso.rdfshape.server.api.routes.shapemap.service - -import cats.effect.IO -import cats.implicits._ -import es.weso.rdf.PrefixMap -import es.weso.rdfshape.server.api.routes.PartsMap -import es.weso.shapemaps._ -import org.log4s.getLogger - -case class ShapeMapParam( - shapeMap: Option[String], - shapeMapURL: Option[String], - shapeMapFile: Option[String], - optShapeMapFormat: Option[ShapeMapFormat], - targetShapeMapFormat: Option[ShapeMapFormat], - activeShapeMapTab: Option[String] -) { - - val shapeMapFormat: String = optShapeMapFormat.getOrElse(Compact).name - val shapeMapTab: String = activeShapeMapTab.getOrElse(ShapeMapTextAreaType.id) - private[this] val logger = getLogger - - def getShapeMap: IO[ShapeMap] = - for { - tab <- IO.fromEither( - parseShapeMapTab(shapeMapTab).leftMap(e => new RuntimeException(e)) - ) - sm <- tab match { - case ShapeMapTextAreaType => - IO.fromEither( - ShapeMap - .fromString(shapeMap.getOrElse(""), shapeMapFormat) - .leftMap(es => new RuntimeException(es.toList.mkString("\n"))) - ) - case ShapeMapUrlType => - for { - e <- ShapeMap.fromURI( - shapeMapURL.getOrElse(""), - shapeMapFormat, - None, - PrefixMap.empty, - PrefixMap.empty - ) - r <- e.fold( - ls => - IO.raiseError(new RuntimeException(ls.toList.mkString("\n"))), - IO.pure - ) - } yield r - case _ => - IO.raiseError(new RuntimeException(s"Not implemented yet ${tab.id}")) - } - } yield sm - - def parseShapeMapTab(tab: String): Either[String, ShapeMapInputType] = { - logger.debug(s"parseShapeMapTab: tab = $tab") - val inputTypes = - List(ShapeMapUrlType, ShapeMapFileType, ShapeMapTextAreaType) - inputTypes.find(_.id == tab) match { - case Some(x) => Right(x) - case None => - Left( - s"Wrong value of tab: $tab, must be one of [${inputTypes.map(_.id).mkString(",")}]" - ) - } - } - - sealed abstract class ShapeMapInputType { - val id: String - } - - case object ShapeMapUrlType extends ShapeMapInputType { - override val id = "#shapeMapUrl" - } - - case object ShapeMapFileType extends ShapeMapInputType { - override val id = "#shapeMapFile" - } - - case object ShapeMapTextAreaType extends ShapeMapInputType { - override val id = "#shapeMapTextArea" - } - -} - -object ShapeMapParam { - private[this] val logger = getLogger - - private[api] def mkShapeMap( - partsMap: PartsMap - ): IO[(ShapeMap, ShapeMapParam)] = - for { - smp <- mkShapeMapParam(partsMap) - sm <- smp.getShapeMap - } yield (sm, smp) - - private[api] def mkShapeMapParam(partsMap: PartsMap): IO[ShapeMapParam] = - for { - shapeMap <- partsMap.optPartValue("shapeMap") - shapeMapURL <- partsMap.optPartValue("shapeMapURL") - shapeMapFile <- partsMap.optPartValue("shapeMapFile") - shapeMapFormat <- getShapeMapFormat("shapeMapFormat", partsMap) - targetShapeMapFormat <- getShapeMapFormat( - "targetShapeMapFormat", - partsMap - ) - activeShapeMapTab <- partsMap.optPartValue("shapeMapActiveTab") - } yield { - ShapeMapParam( - shapeMap, - shapeMapURL, - shapeMapFile, - shapeMapFormat, - targetShapeMapFormat, - activeShapeMapTab - ) - } - - private def getShapeMapFormat( - name: String, - partsMap: PartsMap - ): IO[Option[ShapeMapFormat]] = - for { - maybeStr <- partsMap.optPartValue(name) - } yield maybeStr match { - case None => None - case Some(str) => - ShapeMapFormat - .fromString(str) - .fold( - err => { - logger.error(s"Unsupported shapeMapFormat: $str") - None - }, - smf => Some(smf) - ) - } - - private[api] def empty: ShapeMapParam = - ShapeMapParam(None, None, None, None, None, None) - -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala index e5159284..53330b5b 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala @@ -3,10 +3,15 @@ package es.weso.rdfshape.server.api.routes.shapemap.service import cats.effect._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api -import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMapInfoResult -import es.weso.rdfshape.server.api.routes.{ApiService, PartsMap} -import es.weso.rdfshape.server.utils.json.JsonUtils.responseJson -import es.weso.shapemaps.ShapeMap +import es.weso.rdfshape.server.api.routes.ApiService +import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMap.getShapeMap +import es.weso.rdfshape.server.api.routes.shapemap.logic.{ + ShapeMap, + ShapeMapInfoResult +} +import es.weso.rdfshape.server.api.utils.parameters.PartsMap +import es.weso.rdfshape.server.utils.json.JsonUtils.errorResponseJson +import es.weso.shapemaps.{ShapeMap => ShapeMapW} import io.circe._ import org.http4s._ import org.http4s.circe._ @@ -30,26 +35,38 @@ class ShapeMapService(client: Client[IO]) val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { case GET -> Root / `api` / `verb` / "formats" => - val formats = ShapeMap.availableFormats + val formats = ShapeMapW.availableFormats val json = Json.fromValues(formats.map(str => Json.fromString(str))) Ok(json) case req @ POST -> Root / `api` / `verb` / "info" => req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) - val t: IO[(ShapeMap, ShapeMapParam)] = - ShapeMapParam.mkShapeMap(partsMap) - t.attempt.flatMap( + + val maybeShapeMap: IO[Either[String, ShapeMap]] = getShapeMap(partsMap) + maybeShapeMap.attempt.flatMap( _.fold( - e => responseJson(e.getMessage, BadRequest), - pair => { - val (sm, smp) = pair - val smi: ShapeMapInfoResult = ShapeMapInfoResult.fromShapeMap( - smp.shapeMap, - smp.optShapeMapFormat, - sm - ) - Ok(smi.toJson) + // General exception + e => errorResponseJson(e.getMessage, InternalServerError), + { + // Error parsing the ShapeMap information sent + case Left(errorStr) => errorResponseJson(errorStr, BadRequest) + // Success parsing the ShapeMap information sent + case Right(shapeMap) => + shapeMap.innerShapeMap match { + // Error creating the inner ShapeMap instance from the data + case Left(errorStr) => + errorResponseJson(errorStr, InternalServerError) + // Success creating the inner ShapeMap instance from the data + case Right(innerSm) => + val shapeMapInfo: ShapeMapInfoResult = + ShapeMapInfoResult.fromShapeMap( + Some(shapeMap.shapeMap), + Some(shapeMap.shapeMapFormat), + innerSm + ) + Ok(shapeMapInfo.toJson) + } } ) ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikibaseSchemaParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikibaseSchemaParam.scala index f9068afa..58dedf6b 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikibaseSchemaParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikibaseSchemaParam.scala @@ -2,8 +2,9 @@ package es.weso.rdfshape.server.api.routes.wikibase import cats.effect._ import es.weso.rdf.RDFReasoner -import es.weso.rdfshape.server.api.routes.PartsMap -import es.weso.rdfshape.server.api.routes.schema.service.SchemaParam +import es.weso.rdfshape.server.api.routes.schema.logic.SchemaParam +import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters.WdSchemaParameter +import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.wikibase._ import es.weso.schema.{Schema, Schemas} import org.http4s._ @@ -83,7 +84,7 @@ object WikibaseSchemaParam { partsMap: PartsMap ): IO[WikibaseSchemaParam] = for { - maybeSchema <- partsMap.eitherPartValue("entitySchema") + maybeSchema <- partsMap.eitherPartValue(WdSchemaParameter.name) // endpointStr <- partsMap.partValue("endpoint") // endpoint <- either2f(IRI.fromString(endpointStr)) maybeSchemaParam <- SchemaParam.mkSchemaParam(partsMap).attempt diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala index 2fff7aa7..b64f03fc 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala @@ -13,8 +13,8 @@ import es.weso.rdf.sgraph._ import es.weso.rdfshape.server.api.routes.schema.logic.SchemaOperations.schemaResult2json import es.weso.rdfshape.server.api.definitions._ import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api -import es.weso.rdfshape.server.api.routes.IncomingRequestParameters._ -import es.weso.rdfshape.server.api.routes.{ApiService, PartsMap} +import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ +import es.weso.rdfshape.server.api.routes.ApiService import es.weso.rdfshape.server.api.utils.Http4sUtils._ import es.weso.schema.{Schema, ShapeMapTrigger} import es.weso.schemaInfer.{InferOptions, SchemaInfer} @@ -35,6 +35,7 @@ import org.http4s.multipart._ import scala.util.control.NoStackTrace import scala.util.matching.Regex +import es.weso.rdfshape.server.api.utils.parameters.PartsMap /** API service to handle wikidata related operations * @@ -58,13 +59,9 @@ class WikidataService(client: Client[IO]) */ def routes: HttpRoutes[IO] = HttpRoutes.of[IO] { - case GET -> Root / `api` / `verb` / "test" => { - Ok("Wikidata Test") - } - case GET -> Root / `api` / `verb` / "entityLabel" :? - WdEntityParam(entity) +& - LanguageParam(language) => + WdEntityParameter(entity) +& + LanguageParameter(language) => val uri = Uri.unsafeFromString( s"https://www.wikidata.org/w/api.php?action=wbgetentities&props=labels&ids=$entity&languages=$language&format=json" ) @@ -84,7 +81,7 @@ class WikidataService(client: Client[IO]) } yield resp case GET -> Root / `api` / `verb` / "schemaContent" :? - WdSchemaParam(wdSchema) => { + WdSchemaParameter(wdSchema) => { val uri = uri"https://www.wikidata.org".withPath( Uri.Path.unsafeFromString(s"/wiki/Special:EntitySchemaText/$wdSchema") ) @@ -111,11 +108,11 @@ class WikidataService(client: Client[IO]) } case GET -> Root / `api` / `verb` / "searchEntity" :? - OptEndpointParam(endpoint) +& - LabelParam(label) +& - LanguageParam(language) +& - LimitParam(maybelimit) +& - ContinueParam(maybeContinue) => { + EndpointParameter(endpoint) +& + LabelParameter(label) +& + LanguageParameter(language) +& + LimitParameter(maybelimit) +& + ContinueParameter(maybeContinue) => { val limit: String = maybelimit.getOrElse(defaultLimit.toString) val continue: String = maybeContinue.getOrElse(defaultContinue.toString) @@ -155,11 +152,11 @@ class WikidataService(client: Client[IO]) } case GET -> Root / `api` / `verb` / "searchProperty" :? - OptEndpointParam(endpoint) +& - LabelParam(label) +& - LanguageParam(language) +& - LimitParam(maybelimit) +& - ContinueParam(maybeContinue) => { + EndpointParameter(endpoint) +& + LabelParameter(label) +& + LanguageParameter(language) +& + LimitParameter(maybelimit) +& + ContinueParameter(maybeContinue) => { val limit: String = maybelimit.getOrElse(defaultLimit.toString) val continue: String = maybeContinue.getOrElse(defaultContinue.toString) @@ -197,10 +194,10 @@ class WikidataService(client: Client[IO]) } case GET -> Root / `api` / `verb` / "searchLexeme" :? - LabelParam(label) +& - LanguageParam(language) +& - LimitParam(maybelimit) +& - ContinueParam(maybeContinue) => { + LabelParameter(label) +& + LanguageParameter(language) +& + LimitParameter(maybelimit) +& + ContinueParameter(maybeContinue) => { val limit: String = maybelimit.getOrElse(defaultLimit.toString) val continue: String = maybeContinue.getOrElse(defaultContinue.toString) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala new file mode 100644 index 00000000..cac0b3b7 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala @@ -0,0 +1,327 @@ +package es.weso.rdfshape.server.api.utils.parameters + +import org.http4s.dsl.io.{ + OptionalQueryParamDecoderMatcher, + QueryParamDecoderMatcher +} + +/** Definitions for all the possible parameters that may come from client requests + */ +object IncomingRequestParameters { + lazy val data = "data" + lazy val compoundData = "compoundData" + lazy val dataURL = "dataURL" + lazy val dataFile = "dataFile" + lazy val dataFormat = "dataFormat" + lazy val targetDataFormat = "targetDataFormat" + + lazy val schema = "schema" + lazy val schemaURL = "schemaURL" + lazy val schemaFile = "schemaFile" + lazy val schemaFormat = "schemaFormat" + lazy val schemaFormatTextArea = "schemaFormatTextArea" + lazy val schemaFormatUrl = "schemaFormatUrl" + lazy val schemaFormatFile = "schemaFormatFile" + lazy val schemaEngine = "schemaEngine" + lazy val targetSchemaFormat = "targetSchemaFormat" + lazy val targetSchemaEngine = "targetSchemaEngine" + lazy val inference = "inference" + lazy val triggerMode = "triggerMode" + + lazy val shape = "shape" + lazy val entity = "entity" + lazy val node = "node" + lazy val nodeSelector = "nodeSelector" + + lazy val shapeMap = "shapeMap" + lazy val shape_map = "shape-map" + lazy val shapeMapURL = "shapeMapURL" + lazy val shapeMapFile = "shapeMapFile" + lazy val shapeMapFormat = "shapeMapFormat" + lazy val shapeMapFormatTextArea = "shapeMapFormatTextArea" + lazy val shapeMapFormatUrl = "shapeMapFormatUrl" + lazy val shapeMapFormatFile = "shapeMapFormatFile" + lazy val targetShapeMapFormat = "targetShapeMapFormat" + + lazy val query = "query" + lazy val queryURL = "queryURL" + lazy val queryFile = "queryFile" + + lazy val endpoint = "endpoint" + + lazy val activeDataTab = "activeDataTab" + lazy val activeSchemaTab = "activeSchemaTab" + lazy val activeShapeMapTab = "activeShapeMapTab" + lazy val activeQueryTab = "activeQueryTab" + + lazy val wdEntity = "wdEntity" + lazy val wdSchema = "wdSchema" + + lazy val url = "url" + lazy val urlCode = "urlCode" + lazy val hostname = "hostname" + lazy val view = "view" + lazy val examples = "examples" + lazy val manifestURL = "manifestURL" + lazy val language = "language" + lazy val label = "label" + lazy val limit = "limit" + lazy val continue = "continue" + lazy val withDot = "withDot" + + object DataParameter extends OptionalQueryParamDecoderMatcher[String](data) { + val name: String = data + } + + object CompoundDataParameter + extends OptionalQueryParamDecoderMatcher[String](compoundData) { + val name: String = compoundData + } + + object DataURLParameter + extends OptionalQueryParamDecoderMatcher[String](dataURL) { + val name: String = dataURL + } + + object DataFileParameter + extends OptionalQueryParamDecoderMatcher[String](dataFile) { + val name: String = dataFile + } + + object DataFormatParameter + extends OptionalQueryParamDecoderMatcher[String](dataFormat) { + val name: String = dataFormat + } + + object TargetDataFormatParameter + extends OptionalQueryParamDecoderMatcher[String](targetDataFormat) { + val name: String = targetDataFormat + } + + object SchemaParameter + extends OptionalQueryParamDecoderMatcher[String](schema) { + val name: String = schema + } + + object SchemaURLParameter + extends OptionalQueryParamDecoderMatcher[String](schemaURL) { + val name: String = schemaURL + } + + object SchemaFileParameter + extends OptionalQueryParamDecoderMatcher[String](schemaFile) { + val name: String = schemaFile + } + + object SchemaFormatParameter + extends OptionalQueryParamDecoderMatcher[String](schemaFormat) { + val name: String = schemaFormat + } + + object SchemaFormatTextAreaParameter + extends OptionalQueryParamDecoderMatcher[String](schemaFormatTextArea) { + val name: String = schemaFormatTextArea + } + + object SchemaFormatUrlParameter + extends OptionalQueryParamDecoderMatcher[String](schemaFormatUrl) { + val name: String = schemaFormatUrl + } + + object SchemaFormatFileParameter + extends OptionalQueryParamDecoderMatcher[String](schemaFormatFile) { + val name: String = schemaFormatFile + } + + object SchemaEngineParameter + extends OptionalQueryParamDecoderMatcher[String](schemaEngine) { + val name: String = schemaEngine + } + + object TargetSchemaFormatParameter + extends OptionalQueryParamDecoderMatcher[String](targetSchemaFormat) { + val name: String = targetSchemaFormat + } + + object TargetSchemaEngineParameter + extends OptionalQueryParamDecoderMatcher[String](targetSchemaEngine) { + val name: String = targetSchemaEngine + } + + object InferenceParameter + extends OptionalQueryParamDecoderMatcher[String](inference) { + val name: String = inference + } + + object TriggerModeParameter + extends OptionalQueryParamDecoderMatcher[String](triggerMode) { + val name: String = triggerMode + } + + object ShapeParameter + extends OptionalQueryParamDecoderMatcher[String](shape) { + val name: String = shape + } + + object EntityParameter + extends OptionalQueryParamDecoderMatcher[String](entity) { + val name: String = entity + } + + object NodeParameter extends OptionalQueryParamDecoderMatcher[String](node) { + val name: String = node + } + + object NodeSelectorParameter + extends OptionalQueryParamDecoderMatcher[String](nodeSelector) { + val name: String = nodeSelector + } + + object ShapeMapTextParameter + extends OptionalQueryParamDecoderMatcher[String](shapeMap) { + val name: String = shapeMap + } + + object ShapeMapParameterAlt + extends OptionalQueryParamDecoderMatcher[String](shape_map) { + val name: String = shape_map + } + + object ShapeMapUrlParameter + extends OptionalQueryParamDecoderMatcher[String](shapeMapURL) { + val name: String = shapeMapURL + } + + object ShapeMapFileParameter + extends OptionalQueryParamDecoderMatcher[String](shapeMapFile) { + val name: String = shapeMapFile + } + + object ShapeMapFormatParameter + extends OptionalQueryParamDecoderMatcher[String](shapeMapFormat) { + val name: String = shapeMapFormat + } + + object ShapeMapFormatTextAreaParameter + extends OptionalQueryParamDecoderMatcher[String](shapeMapFormatTextArea) { + val name: String = shapeMapFormatTextArea + } + + object ShapeMapFormatUrlParameter + extends OptionalQueryParamDecoderMatcher[String](shapeMapFormatUrl) { + val name: String = shapeMapFormatUrl + } + + object ShapeMapFormatFileParameter + extends OptionalQueryParamDecoderMatcher[String](shapeMapFormatFile) { + val name: String = shapeMapFormatFile + } + + object TargetShapeMapFormatParameter + extends OptionalQueryParamDecoderMatcher[String](targetShapeMapFormat) { + val name: String = targetShapeMapFormat + } + + object QueryParameter + extends OptionalQueryParamDecoderMatcher[String](query) { + val name: String = query + } + + object QueryURLParameter + extends OptionalQueryParamDecoderMatcher[String](queryURL) { + val name: String = queryURL + } + + object QueryFileParameter + extends OptionalQueryParamDecoderMatcher[String](queryFile) { + val name: String = queryFile + } + + object EndpointParameter + extends OptionalQueryParamDecoderMatcher[String](endpoint) { + val name: String = endpoint + } + + object ActiveDataTabParameter + extends OptionalQueryParamDecoderMatcher[String](activeDataTab) { + val name: String = activeDataTab + } + + object ActiveSchemaTabParameter + extends OptionalQueryParamDecoderMatcher[String](activeSchemaTab) { + val name: String = activeSchemaTab + } + + object ActiveShapeMapTabParameter + extends OptionalQueryParamDecoderMatcher[String](activeShapeMapTab) { + val name: String = activeShapeMapTab + } + + object ActiveQueryTabParameter + extends OptionalQueryParamDecoderMatcher[String](activeQueryTab) { + val name: String = activeQueryTab + } + + object WdEntityParameter extends QueryParamDecoderMatcher[String](wdEntity) { + val name: String = wdEntity + } + + object WdSchemaParameter extends QueryParamDecoderMatcher[String](wdSchema) { + val name: String = wdSchema + } + + object WithDotParameter + extends OptionalQueryParamDecoderMatcher[Boolean](withDot) { + val name: String = withDot + } + + object OptView extends OptionalQueryParamDecoderMatcher[String](view) { + val name: String = view + } + + object ExamplesParameter + extends OptionalQueryParamDecoderMatcher[String](examples) { + val name: String = examples + } + + object OptExamplesParameter + extends OptionalQueryParamDecoderMatcher[String](examples) { + val name: String = examples + } + + object ManifestURLParameter + extends OptionalQueryParamDecoderMatcher[String](manifestURL) { + val name: String = manifestURL + } + + object LanguageParameter extends QueryParamDecoderMatcher[String](language) { + val name: String = language + } + + object LabelParameter extends QueryParamDecoderMatcher[String](label) { + val name: String = label + } + + object UrlParameter extends QueryParamDecoderMatcher[String](url) { + val name: String = url + } + + object UrlCodeParameter extends QueryParamDecoderMatcher[String](urlCode) { + val name: String = urlCode + } + + object HostNameParameter extends QueryParamDecoderMatcher[String](hostname) { + val name: String = hostname + } + + object LimitParameter + extends OptionalQueryParamDecoderMatcher[String](limit) { + val name: String = limit + } + + object ContinueParameter + extends OptionalQueryParamDecoderMatcher[String](continue) { + val name: String = continue + } + +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/PartsMap.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/PartsMap.scala new file mode 100644 index 00000000..874a6b26 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/PartsMap.scala @@ -0,0 +1,68 @@ +package es.weso.rdfshape.server.api.utils.parameters + +import cats.effect.IO +import cats.implicits._ +import fs2.text.utf8Decode +import org.http4s.multipart.Part + +/** Data class containing a map of a request's parameters with the form (param name: param content) + * The data contained in a request parameter is handled via the {@link Part} class of HTT4s and extracted with the class methods + * + * @param map Map with the request's parameters information + */ +case class PartsMap private (map: Map[String, Part[IO]]) { + + /** Shorthand for extracting boolean values from a request parameter + * + * @param key Parameter key + * @return Optionally, the boolean translation of the contents of the parameter + */ + def optPartValueBoolean(key: String): IO[Option[Boolean]] = for { + maybeValue <- optPartValue(key) + } yield maybeValue match { + case Some("true") => Some(true) + case Some("false") => Some(false) + case _ => None + + } + + /** Extract the value from a request parameter, decoding it and handling errors + * + * @param key Parameter key + * @return Optionally, the String contents of the parameter + */ + def optPartValue(key: String): IO[Option[String]] = + map.get(key) match { + case Some(part) => + part.body.through(utf8Decode).compile.foldMonoid.map(Some.apply) + case None => IO.pure(None) + } + + /** Shorthand for extracting values from a request parameter with an informational error message + * + * @param key Parameter key + * @return Either the String contents of the parameter or an error message + */ + def eitherPartValue(key: String): IO[Either[String, String]] = for { + maybeValue <- optPartValue(key) + } yield maybeValue match { + case None => + Left( + s"Not found value for key $key\nKeys available: ${map.keySet.mkString(",")}" + ) + case Some(s) => Right(s) + } +} + +object PartsMap { + + /** Instantiate a new {@link PartsMap} given a list of the inner parts + * + * @param ps List of parts + * @return A new Parts map containing mapping each part's name to its contents + */ + def apply(ps: Vector[Part[IO]]): PartsMap = { + PartsMap(ps.filter(_.name.isDefined).map(p => (p.name.get, p)).toMap) + } + +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtils.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtils.scala index 5429f6ce..5e8e565e 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtils.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtils.scala @@ -72,7 +72,7 @@ object JsonUtils extends Http4sDsl[IO] { * @param status Desired HTTP status of the response * @return The response object, ready to be dispatched elsewhere */ - def responseJson(msg: String, status: Status = Ok): IO[Response[IO]] = { + def errorResponseJson(msg: String, status: Status = Ok): IO[Response[IO]] = { val responseMessage = mkJson(msg) mapStatusCodes(status) match { case Status.Created => Created(responseMessage) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/networking/NetworkingUtils.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/networking/NetworkingUtils.scala new file mode 100644 index 00000000..ce914399 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/networking/NetworkingUtils.scala @@ -0,0 +1,33 @@ +package es.weso.rdfshape.server.utils.networking + +import com.typesafe.scalalogging.LazyLogging + +import java.net.URL +import scala.io.Source +import scala.util.{Failure, Success, Try} + +object NetworkingUtils extends LazyLogging { + + /** Error-safe way of obtaining the raw contents in a given URL + * + * @param urlString URL to be fetched (String representation) + * @return Either the contents if the URL or an error message + */ + def getUrlContents(urlString: String): Either[String, String] = { + Try { + val url = new URL(urlString) + val src = Source.fromURL(url) + val str = src.mkString + src.close() + str + } match { + case Success(urlContent) => Right(urlContent) + case Failure(exception) => + val msg = + s"Could not obtain data from url $urlString." + logger.warn(s"$msg - ${exception.getMessage}") + Left(msg) + } + } + +} From d4fccd8c2b6ed04a516da5d2c216623b14c4ed9b Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Tue, 7 Sep 2021 20:05:03 +0200 Subject: [PATCH 17/32] Minor refactorings and documentation. --- docs/api-usage/usage_tutorial.md | 4 +- .../es/weso/rdfshape/server/Server.scala | 4 +- .../server/api/definitions/ApiDefaults.scala | 8 +- .../server/api/format/DataFormat.scala | 18 +- .../rdfshape/server/api/format/Format.scala | 12 +- .../server/api/format/HtmlFormat.scala | 14 +- .../server/api/format/RdfFormat.scala | 18 +- .../server/api/format/SchemaFormat.scala | 2 +- .../server/api/format/ShapeMapFormat.scala | 33 + .../api/routes/data/service/DataService.scala | 2 +- .../schema/logic/SchemaOperations.scala | 2 +- .../api/routes/schema/logic/SchemaParam.scala | 58 +- .../routes/schema/service/SchemaService.scala | 2 +- .../schema/service/TriggerModeParam.scala | 78 +- .../shapemap/logic/ShapeMapInfoResult.scala | 2 +- .../shapemap/service/ShapeMapService.scala | 21 +- .../api/routes/shex/service/ShExService.scala | 2 + .../api/routes/wikibase/WikidataService.scala | 760 ------------------ .../wikibase/logic/WikibaseEntity.scala | 66 ++ .../{ => logic}/WikibaseSchemaParam.scala | 2 +- .../wikibase/service/WikibaseService.scala | 562 +++++++++++++ .../service/WikibaseServiceUtils.scala | 146 ++++ .../server/api/utils/OptEitherF.scala | 12 +- .../IncomingRequestParameters.scala | 66 +- .../api/utils/parameters/PartsMap.scala | 50 +- .../SSLContextCreationException.scala | 2 +- .../exceptions/WikibaseServiceException.scala | 12 + 27 files changed, 1033 insertions(+), 925 deletions(-) create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/format/ShapeMapFormat.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseEntity.scala rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/{ => logic}/WikibaseSchemaParam.scala (98%) create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseService.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseServiceUtils.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/exceptions/WikibaseServiceException.scala diff --git a/docs/api-usage/usage_tutorial.md b/docs/api-usage/usage_tutorial.md index bae9d3a8..dbcf0166 100644 --- a/docs/api-usage/usage_tutorial.md +++ b/docs/api-usage/usage_tutorial.md @@ -11,13 +11,13 @@ title: API Tutorial Validate RDF data with ShEx. Example from the [Validating RDF book](https://book.validatingrdf.com/): -- [Example](https://rdfshape.weso.es/shExValidate?activeSchemaTab=%23schemaTextArea&activeTab=%23dataTextArea&data=PREFIX%20%3A%20%20%20%20%20%20%20%3Chttp%3A%2F%2Fexample.org%2F%3E%0APREFIX%20schema%3A%20%3Chttp%3A%2F%2Fschema.org%2F%3E%0APREFIX%20xsd%3A%20%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2F2001%2FXMLSchema%23%3E%0APREFIX%20foaf%3A%20%20%20%3Chttp%3A%2F%2Fxmlns.com%2Ffoaf%2F0.1%2F%3E%0A%0A%3Aalice%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Alice%22%20%3B%20%20%20%20%20%20%20%20%20%20%20%20%23%20%25%2A%20%5CPasses%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AFemale%20%3B%0A%20%20%20%20%20%20%20schema%3Aknows%20%20%20%20%20%20%20%20%20%20%3Abob%20.%0A%0A%3Abob%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AMale%20%3B%20%20%20%20%20%20%20%20%23%20%25%2A%20%5CPasses%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Robert%22%3B%20%20%20%20%20%20%20%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3AbirthDate%20%20%20%20%20%20%221980-03-10%22%5E%5Exsd%3Adate%20.%0A%0A%3Acarol%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Carol%22%20%3B%20%20%20%20%20%20%20%20%20%20%20%20%23%20%25%2A%20%5CPasses%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20%22unspecified%22%20%3B%20%20%0A%20%20%20%20%20%20%20foaf%3Aname%20%20%20%20%20%20%20%20%20%20%20%20%20%22Carol%22%20.%0A%0A%3Adave%20%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Dave%22%3B%20%20%20%20%20%20%20%20%20%23%20%25%2A%20%5CFails%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20%22XYY%22%3B%20%20%20%20%20%20%20%20%20%20%23%0A%20%20%20%20%20%20%20schema%3AbirthDate%20%20%20%20%20%201980%20.%20%20%20%20%20%20%20%20%20%20%23%20%25%2A%201980%20is%20not%20an%20xsd%3Adate%20%2A%29%0A%0A%3Aemily%20schema%3Aname%20%22Emily%22%2C%20%22Emilee%22%20%3B%20%20%20%20%20%20%20%23%20%25%2A%20%5CFails%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AFemale%20.%20%23%20%25%2A%20too%20many%20schema%3Anames%20%2A%29%0A%0A%3Afrank%20foaf%3Aname%20%20%20%20%20%20%20%20%20%20%20%20%20%22Frank%22%20%3B%20%20%20%20%20%20%20%23%20%25%2A%20%5CFails%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Agender%3A%20%20%20%20%20%20%20%20schema%3AMale%20.%20%20%20%23%20%25%2A%20missing%20schema%3Aname%20%2A%29%0A%0A%3Agrace%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Grace%22%20%3B%20%20%20%20%20%20%20%23%20%25%2A%20%5CFails%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AMale%20%3B%20%20%20%23%20%0A%20%20%20%20%20%20%20schema%3Aknows%20%20%20%20%20%20%20%20%20%20_%3Ax%20.%20%20%20%20%20%20%20%20%20%20%20%23%20%25%2A%20%5C_%3Ax%20is%20not%20an%20IRI%20%2A%29%0A%0A%3Aharold%20schema%3Aname%20%20%20%20%20%20%20%20%20%22Harold%22%20%3B%20%20%20%20%23%20%25%2A%20%5CFails%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20schema%3AMale%20%3B%20%0A%20%20%20%20%20%20%20%20schema%3Aknows%20%20%20%20%20%20%20%20%3Agrace%20.%20%20%20%20%20%20%23%20%25%2A%20%3Agrace%20does%20not%20conform%20to%20%3AUser%20%2A%29&dataFormat=TURTLE&dataFormatTextArea=TURTLE&endpoint=&inference=None&schema=PREFIX%20%3A%20%20%20%20%20%20%20%3Chttp%3A%2F%2Fexample.org%2F%3E%0APREFIX%20schema%3A%20%3Chttp%3A%2F%2Fschema.org%2F%3E%0APREFIX%20xsd%3A%20%20%3Chttp%3A%2F%2Fwww.w3.org%2F2001%2FXMLSchema%23%3E%0A%0A%3AUser%20%7B%0A%20%20schema%3Aname%20%20%20%20%20%20%20%20%20%20xsd%3Astring%20%20%3B%0A%20%20schema%3AbirthDate%20%20%20%20%20xsd%3Adate%3F%20%20%3B%0A%20%20schema%3Agender%20%20%20%20%20%20%20%20%5B%20schema%3AMale%20schema%3AFemale%20%5D%20OR%20xsd%3Astring%20%3B%0A%20%20schema%3Aknows%20%20%20%20%20%20%20%20%20IRI%20%40%3AUser%2A%0A%7D&schemaEngine=ShEx&schemaFormat=ShExC&schemaFormatTextArea=ShExC&shapeMap=%3Aalice%40%3AUser%2C%3Abob%40%3AUser%2C%3Acarol%40%3AUser%2C%3Aemily%40%3AUser%2C%3Afrank%40%3AUser%2C%3Agrace%40%3AUser%2C%3Aharold%40%3AUser&shapeMapActiveTab=%23shapeMapTextArea&shapeMapFormat=Compact&shapeMapFormatTextArea=Compact&triggerMode=shapeMap) +- [Example](https://rdfshape.weso.es/shExValidate?activeSchemaTab=%23schemaTextArea&activeTab=%23dataTextArea&data=PREFIX%20%3A%20%20%20%20%20%20%20%3Chttp%3A%2F%2Fexample.org%2F%3E%0APREFIX%20schema%3A%20%3Chttp%3A%2F%2Fschema.org%2F%3E%0APREFIX%20xsd%3A%20%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2F2001%2FXMLSchema%23%3E%0APREFIX%20foaf%3A%20%20%20%3Chttp%3A%2F%2Fxmlns.com%2Ffoaf%2F0.1%2F%3E%0A%0A%3Aalice%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Alice%22%20%3B%20%20%20%20%20%20%20%20%20%20%20%20%23%20%25%2A%20%5CPasses%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AFemale%20%3B%0A%20%20%20%20%20%20%20schema%3Aknows%20%20%20%20%20%20%20%20%20%20%3Abob%20.%0A%0A%3Abob%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AMale%20%3B%20%20%20%20%20%20%20%20%23%20%25%2A%20%5CPasses%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Robert%22%3B%20%20%20%20%20%20%20%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3AbirthDate%20%20%20%20%20%20%221980-03-10%22%5E%5Exsd%3Adate%20.%0A%0A%3Acarol%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Carol%22%20%3B%20%20%20%20%20%20%20%20%20%20%20%20%23%20%25%2A%20%5CPasses%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20%22unspecified%22%20%3B%20%20%0A%20%20%20%20%20%20%20foaf%3Aname%20%20%20%20%20%20%20%20%20%20%20%20%20%22Carol%22%20.%0A%0A%3Adave%20%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Dave%22%3B%20%20%20%20%20%20%20%20%20%23%20%25%2A%20%5CFails%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20%22XYY%22%3B%20%20%20%20%20%20%20%20%20%20%23%0A%20%20%20%20%20%20%20schema%3AbirthDate%20%20%20%20%20%201980%20.%20%20%20%20%20%20%20%20%20%20%23%20%25%2A%201980%20is%20not%20an%20xsd%3Adate%20%2A%29%0A%0A%3Aemily%20schema%3Aname%20%22Emily%22%2C%20%22Emilee%22%20%3B%20%20%20%20%20%20%20%23%20%25%2A%20%5CFails%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AFemale%20.%20%23%20%25%2A%20too%20many%20schema%3Anames%20%2A%29%0A%0A%3Afrank%20foaf%3Aname%20%20%20%20%20%20%20%20%20%20%20%20%20%22Frank%22%20%3B%20%20%20%20%20%20%20%23%20%25%2A%20%5CFails%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Agender%3A%20%20%20%20%20%20%20%20schema%3AMale%20.%20%20%20%23%20%25%2A%20missing%20schema%3Aname%20%2A%29%0A%0A%3Agrace%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Grace%22%20%3B%20%20%20%20%20%20%20%23%20%25%2A%20%5CFails%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AMale%20%3B%20%20%20%23%20%0A%20%20%20%20%20%20%20schema%3Aknows%20%20%20%20%20%20%20%20%20%20_%3Ax%20.%20%20%20%20%20%20%20%20%20%20%20%23%20%25%2A%20%5C_%3Ax%20is%20not%20an%20IRI%20%2A%29%0A%0A%3Aharold%20schema%3Aname%20%20%20%20%20%20%20%20%20%22Harold%22%20%3B%20%20%20%20%23%20%25%2A%20%5CFails%7B%3AUser%7D%20%2A%29%0A%20%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20schema%3AMale%20%3B%20%0A%20%20%20%20%20%20%20%20schema%3Aknows%20%20%20%20%20%20%20%20%3Agrace%20.%20%20%20%20%20%20%23%20%25%2A%20%3Agrace%20does%20not%20conform%20to%20%3AUser%20%2A%29&dataFormat=TURTLE&dataFormatTextArea=TURTLE&endpoint=&inference=None&schema=PREFIX%20%3A%20%20%20%20%20%20%20%3Chttp%3A%2F%2Fexample.org%2F%3E%0APREFIX%20schema%3A%20%3Chttp%3A%2F%2Fschema.org%2F%3E%0APREFIX%20xsd%3A%20%20%3Chttp%3A%2F%2Fwww.w3.org%2F2001%2FXMLSchema%23%3E%0A%0A%3AUser%20%7B%0A%20%20schema%3Aname%20%20%20%20%20%20%20%20%20%20xsd%3Astring%20%20%3B%0A%20%20schema%3AbirthDate%20%20%20%20%20xsd%3Adate%3F%20%20%3B%0A%20%20schema%3Agender%20%20%20%20%20%20%20%20%5B%20schema%3AMale%20schema%3AFemale%20%5D%20OR%20xsd%3Astring%20%3B%0A%20%20schema%3Aknows%20%20%20%20%20%20%20%20%20IRI%20%40%3AUser%2A%0A%7D&schemaEngine=ShEx&schemaFormat=ShExC&shapeMap=%3Aalice%40%3AUser%2C%3Abob%40%3AUser%2C%3Acarol%40%3AUser%2C%3Aemily%40%3AUser%2C%3Afrank%40%3AUser%2C%3Agrace%40%3AUser%2C%3Aharold%40%3AUser&shapeMapActiveTab=%23shapeMapTextArea&shapeMapFormat=Compact&triggerMode=shapeMap) ## SHACL Validate RDF data with SHACL. Example from the Validating RDF book: -- [Example](https://rdfshape.weso.es/shaclValidate?activeSchemaTab=%23schemaTextArea&activeTab=%23dataTextArea&data=%40prefix%20%3A%20%20%20%20%20%20%20%3Chttp%3A%2F%2Fexample.org%2F%3E%20.%0A%40prefix%20sh%3A%20%20%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2Fns%2Fshacl%23%3E%20.%0A%40prefix%20xsd%3A%20%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2F2001%2FXMLSchema%23%3E%20.%0A%40prefix%20schema%3A%20%3Chttp%3A%2F%2Fschema.org%2F%3E%20.%0A%40prefix%20foaf%3A%20%20%20%3Chttp%3A%2F%2Fxmlns.com%2Ffoaf%2F0.1%2F%3E%20.%0A%40prefix%20rdfs%3A%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2F2000%2F01%2Frdf-schema%23%3E%20.%0A%20%20%20%20%20%20%20%20%0A%0A%3Aalice%20a%20%3AUser%3B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%25%2A%5CPasses%7B%3AUserShape%7D%20%2A%29%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Alice%22%20%3B%20%20%20%20%20%20%20%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AFemale%20%3B%0A%20%20%20%20%20%20%20schema%3Aknows%20%20%20%20%20%20%20%20%20%20%3Abob%20.%0A%0A%3Abob%20%20%20a%20%3AUser%3B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%25%2A%5CPasses%7B%3AUserShape%7D%20%2A%29%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AMale%20%3B%20%20%20%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Robert%22%3B%0A%20%20%20%20%20%20%20schema%3AbirthDate%20%20%20%20%20%20%221980-03-10%22%5E%5Exsd%3Adate%20.%0A%0A%3Acarol%20a%20%3AUser%3B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%25%2A%5CPasses%7B%3AUserShape%7D%20%2A%29%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Carol%22%20%3B%20%20%20%20%20%20%20%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AFemale%20%3B%20%20%0A%20%20%20%20%20%20%20foaf%3Aname%20%20%20%20%20%20%20%20%20%20%20%20%20%22Carol%22%20.&dataFormat=TURTLE&dataFormatTextArea=TURTLE&endpoint=&inference=None&schema=%40prefix%20%3A%20%20%20%20%20%20%20%3Chttp%3A%2F%2Fexample.org%2F%3E%20.%0A%40prefix%20sh%3A%20%20%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2Fns%2Fshacl%23%3E%20.%0A%40prefix%20xsd%3A%20%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2F2001%2FXMLSchema%23%3E%20.%0A%40prefix%20schema%3A%20%3Chttp%3A%2F%2Fschema.org%2F%3E%20.%0A%40prefix%20foaf%3A%20%20%20%3Chttp%3A%2F%2Fxmlns.com%2Ffoaf%2F0.1%2F%3E%20.%0A%40prefix%20rdfs%3A%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2F2000%2F01%2Frdf-schema%23%3E%20.%0A%20%20%20%20%20%20%20%20%0A%3AUserShape%20a%20sh%3ANodeShape%3B%0A%20%20%20sh%3AtargetClass%20%3AUser%20%3B%0A%20%20%20sh%3Aproperty%20%5B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%20Blank%20node%201%0A%20%20%20%20sh%3Apath%20%20%20%20%20schema%3Aname%20%3B%20%0A%20%20%20%20sh%3AminCount%201%3B%20%0A%20%20%20%20sh%3AmaxCount%201%3B%0A%20%20%20%20sh%3Adatatype%20xsd%3Astring%20%3B%0A%20%20%5D%20%3B%0A%20%20sh%3Aproperty%20%5B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%20Blank%20node%202%0A%20%20%20sh%3Apath%20schema%3Agender%20%3B%0A%20%20%20sh%3AminCount%201%3B%0A%20%20%20sh%3AmaxCount%201%3B%0A%20%20%20sh%3Aor%20%28%0A%20%20%20%20%5B%20sh%3Ain%20%28schema%3AMale%20schema%3AFemale%29%20%5D%0A%20%20%20%20%5B%20sh%3Adatatype%20xsd%3Astring%5D%0A%20%20%20%29%0A%20%20%5D%20%3B%0A%20%20sh%3Aproperty%20%5B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%20Blank%20node%203%20%20%0A%20%20%20sh%3Apath%20%20%20%20%20schema%3AbirthDate%20%3B%20%0A%20%20%20sh%3AmaxCount%201%3B%20%0A%20%20%20sh%3Adatatype%20xsd%3Adate%20%3B%0A%20%20%5D%20%3B%0A%20%20sh%3Aproperty%20%5B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%20Blank%20node%204%20%0A%20%20%20sh%3Apath%20%20%20%20%20schema%3Aknows%20%3B%20%0A%20%20%20sh%3AnodeKind%20sh%3AIRI%20%3B%0A%20%20%20sh%3Aclass%20%20%20%20%3AUser%20%3B%0A%20%20%5D%20.&schemaEngine=JenaSHACL&schemaFormat=TURTLE&schemaFormatTextArea=TURTLE&schemaInference=none&triggerMode=targetDecls) +- [Example](https://rdfshape.weso.es/shaclValidate?activeSchemaTab=%23schemaTextArea&activeTab=%23dataTextArea&data=%40prefix%20%3A%20%20%20%20%20%20%20%3Chttp%3A%2F%2Fexample.org%2F%3E%20.%0A%40prefix%20sh%3A%20%20%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2Fns%2Fshacl%23%3E%20.%0A%40prefix%20xsd%3A%20%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2F2001%2FXMLSchema%23%3E%20.%0A%40prefix%20schema%3A%20%3Chttp%3A%2F%2Fschema.org%2F%3E%20.%0A%40prefix%20foaf%3A%20%20%20%3Chttp%3A%2F%2Fxmlns.com%2Ffoaf%2F0.1%2F%3E%20.%0A%40prefix%20rdfs%3A%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2F2000%2F01%2Frdf-schema%23%3E%20.%0A%20%20%20%20%20%20%20%20%0A%0A%3Aalice%20a%20%3AUser%3B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%25%2A%5CPasses%7B%3AUserShape%7D%20%2A%29%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Alice%22%20%3B%20%20%20%20%20%20%20%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AFemale%20%3B%0A%20%20%20%20%20%20%20schema%3Aknows%20%20%20%20%20%20%20%20%20%20%3Abob%20.%0A%0A%3Abob%20%20%20a%20%3AUser%3B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%25%2A%5CPasses%7B%3AUserShape%7D%20%2A%29%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AMale%20%3B%20%20%20%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Robert%22%3B%0A%20%20%20%20%20%20%20schema%3AbirthDate%20%20%20%20%20%20%221980-03-10%22%5E%5Exsd%3Adate%20.%0A%0A%3Acarol%20a%20%3AUser%3B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%25%2A%5CPasses%7B%3AUserShape%7D%20%2A%29%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3Aname%20%20%20%20%20%20%20%20%20%20%20%22Carol%22%20%3B%20%20%20%20%20%20%20%20%20%20%20%20%0A%20%20%20%20%20%20%20schema%3Agender%20%20%20%20%20%20%20%20%20schema%3AFemale%20%3B%20%20%0A%20%20%20%20%20%20%20foaf%3Aname%20%20%20%20%20%20%20%20%20%20%20%20%20%22Carol%22%20.&dataFormat=TURTLE&dataFormatTextArea=TURTLE&endpoint=&inference=None&schema=%40prefix%20%3A%20%20%20%20%20%20%20%3Chttp%3A%2F%2Fexample.org%2F%3E%20.%0A%40prefix%20sh%3A%20%20%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2Fns%2Fshacl%23%3E%20.%0A%40prefix%20xsd%3A%20%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2F2001%2FXMLSchema%23%3E%20.%0A%40prefix%20schema%3A%20%3Chttp%3A%2F%2Fschema.org%2F%3E%20.%0A%40prefix%20foaf%3A%20%20%20%3Chttp%3A%2F%2Fxmlns.com%2Ffoaf%2F0.1%2F%3E%20.%0A%40prefix%20rdfs%3A%20%20%20%3Chttp%3A%2F%2Fwww.w3.org%2F2000%2F01%2Frdf-schema%23%3E%20.%0A%20%20%20%20%20%20%20%20%0A%3AUserShape%20a%20sh%3ANodeShape%3B%0A%20%20%20sh%3AtargetClass%20%3AUser%20%3B%0A%20%20%20sh%3Aproperty%20%5B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%20Blank%20node%201%0A%20%20%20%20sh%3Apath%20%20%20%20%20schema%3Aname%20%3B%20%0A%20%20%20%20sh%3AminCount%201%3B%20%0A%20%20%20%20sh%3AmaxCount%201%3B%0A%20%20%20%20sh%3Adatatype%20xsd%3Astring%20%3B%0A%20%20%5D%20%3B%0A%20%20sh%3Aproperty%20%5B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%20Blank%20node%202%0A%20%20%20sh%3Apath%20schema%3Agender%20%3B%0A%20%20%20sh%3AminCount%201%3B%0A%20%20%20sh%3AmaxCount%201%3B%0A%20%20%20sh%3Aor%20%28%0A%20%20%20%20%5B%20sh%3Ain%20%28schema%3AMale%20schema%3AFemale%29%20%5D%0A%20%20%20%20%5B%20sh%3Adatatype%20xsd%3Astring%5D%0A%20%20%20%29%0A%20%20%5D%20%3B%0A%20%20sh%3Aproperty%20%5B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%20Blank%20node%203%20%20%0A%20%20%20sh%3Apath%20%20%20%20%20schema%3AbirthDate%20%3B%20%0A%20%20%20sh%3AmaxCount%201%3B%20%0A%20%20%20sh%3Adatatype%20xsd%3Adate%20%3B%0A%20%20%5D%20%3B%0A%20%20sh%3Aproperty%20%5B%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%23%20Blank%20node%204%20%0A%20%20%20sh%3Apath%20%20%20%20%20schema%3Aknows%20%3B%20%0A%20%20%20sh%3AnodeKind%20sh%3AIRI%20%3B%0A%20%20%20sh%3Aclass%20%20%20%20%3AUser%20%3B%0A%20%20%5D%20.&schemaEngine=JenaSHACL&schemaFormat=TURTLE&schemaInference=none&triggerMode=targetDecls) ## Data + Schema diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala index 56aef825..e078db75 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala @@ -12,7 +12,7 @@ import es.weso.rdfshape.server.api.routes.permalink.service.PermalinkService import es.weso.rdfshape.server.api.routes.schema.service.SchemaService import es.weso.rdfshape.server.api.routes.shapemap.service.ShapeMapService import es.weso.rdfshape.server.api.routes.shex.service.ShExService -import es.weso.rdfshape.server.api.routes.wikibase.WikidataService +import es.weso.rdfshape.server.api.routes.wikibase.service.WikibaseService import es.weso.rdfshape.server.utils.error.exceptions.SSLContextCreationException import es.weso.rdfshape.server.utils.error.{ExitCodes, SysUtils} import es.weso.rdfshape.server.utils.secure.SSLHelper @@ -202,7 +202,7 @@ object Server { SchemaService(client).routes <+> ShExService(client).routes <+> ShapeMapService(client).routes <+> - WikidataService(client).routes <+> + WikibaseService(client).routes <+> EndpointService(client).routes <+> PermalinkService(client).routes <+> FetchService(client).routes, diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala index 8dca6531..2edd7dc9 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala @@ -1,7 +1,11 @@ package es.weso.rdfshape.server.api.definitions import es.weso.rdf.nodes.IRI -import es.weso.rdfshape.server.api.format.{DataFormat, SchemaFormat} +import es.weso.rdfshape.server.api.format.{ + DataFormat, + SchemaFormat, + ShapeMapFormat +} import es.weso.schema.{Schemas, ShapeMapTrigger} import es.weso.shapemaps.ShapeMap @@ -25,7 +29,7 @@ case object ApiDefaults { val defaultInference: String = availableInferenceEngines.head val defaultActiveDataTab = "#dataTextArea" val defaultActiveSchemaTab = "#schemaTextArea" - val defaultShapeMapFormat: String = ShapeMap.defaultFormat + val defaultShapeMapFormat: ShapeMapFormat = ShapeMapFormat.defaultFormat val availableShapeMapFormats: List[String] = ShapeMap.formats val defaultActiveShapeMapTab = "#shapeMapTextArea" val defaultShapeLabel: IRI = IRI("Shape") diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/DataFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/DataFormat.scala index 8e3bf0e1..eb4044b9 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/DataFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/DataFormat.scala @@ -14,25 +14,27 @@ class DataFormat(formatName: String, formatMimeType: MediaType) extends Format { object DataFormat extends FormatCompanion[DataFormat] { override lazy val availableFormats: List[DataFormat] = List( + Json, + Dot, + Svg, + Png, Turtle, - JsonLd, NTriples, + Trig, + JsonLd, RdfXml, RdfJson, - Trig, HtmlMicrodata, HtmlRdfa11, - Dot, - Svg, - Png, - JsonDataFormat + ShExC, + Compact ) - override val defaultFormat: DataFormat = Turtle + override val defaultFormat: DataFormat = Json } /** Represents the mime-type "application/json" */ -case object JsonDataFormat +case object Json extends DataFormat( formatName = "json", formatMimeType = new MediaType("application", "json") diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala index c5970dde..0fa6e4ef 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala @@ -19,6 +19,16 @@ trait Format { } +object Format extends FormatCompanion[Format] { + + override val defaultFormat: Format = DataFormat.defaultFormat + + // Should append all available formats in the future. + // Currently, all formats are data formats. + override val availableFormats: List[Format] = + DataFormat.availableFormats // ++ futureFormats +} + trait FormatCompanion[F <: Format] extends LazyLogging { /** Default format to be used when none specified @@ -33,7 +43,7 @@ trait FormatCompanion[F <: Format] extends LazyLogging { * DataFormat * * @param name String name of the format we require - * @return the DataFormat object with the format data (an error String if it does not exist) + * @return the Format object with the format data (an error String if it does not exist) */ def fromString(name: String): Either[String, F] = { if(name.isBlank) Right(defaultFormat) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/HtmlFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/HtmlFormat.scala index cacbe222..703b2f40 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/HtmlFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/HtmlFormat.scala @@ -7,7 +7,19 @@ import org.http4s.MediaType * @see {@link es.weso.rdfshape.server.api.format.DataFormat} */ class HtmlFormat(formatName: String) - extends DataFormat(formatName, MediaType.text.html) + extends DataFormat(formatName, MediaType.text.html) {} + +/** Companion object with all HtmlFormat static utilities + */ +object HtmlFormat extends FormatCompanion[HtmlFormat] { + + override lazy val availableFormats: List[HtmlFormat] = + List( + HtmlRdfa11, + HtmlMicrodata + ) + override val defaultFormat: HtmlFormat = HtmlRdfa11 +} /** Represents the mime-type "text/html" when used along rdfa11 */ diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/RdfFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/RdfFormat.scala index 89ffdb9c..6498c430 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/RdfFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/RdfFormat.scala @@ -7,7 +7,23 @@ import org.http4s.MediaType * @see {@link es.weso.rdfshape.server.api.format.DataFormat} */ sealed class RDFFormat(formatName: String, formatMimeType: MediaType) - extends DataFormat(formatName, formatMimeType) + extends DataFormat(formatName, formatMimeType) {} + +/** Companion object with all RDFFormat static utilities + */ +object RDFFormat extends FormatCompanion[RDFFormat] { + + override lazy val availableFormats: List[RDFFormat] = + List( + Turtle, + NTriples, + Trig, + JsonLd, + RdfXml, + RdfJson + ) + override val defaultFormat: RDFFormat = Turtle +} /** Represents the mime-type "text/turtle" */ diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/SchemaFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/SchemaFormat.scala index 5a598516..f9a74781 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/SchemaFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/SchemaFormat.scala @@ -2,7 +2,7 @@ package es.weso.rdfshape.server.api.format import org.http4s.MediaType -/** Dummy trait to differentiate schema formats from the more generic DataFormat +/** Dummy class to differentiate shapemap formats from the more generic DataFormat * @see {@link es.weso.rdfshape.server.api.format.DataFormat} */ class SchemaFormat(formatName: String, formatMimeType: MediaType) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/ShapeMapFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/ShapeMapFormat.scala new file mode 100644 index 00000000..dcc69aa7 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/ShapeMapFormat.scala @@ -0,0 +1,33 @@ +package es.weso.rdfshape.server.api.format + +import org.http4s.MediaType + +/** Dummy trait to differentiate shapemap formats from the more generic DataFormat + * @see {@link es.weso.rdfshape.server.api.format.DataFormat} + */ +class ShapeMapFormat(formatName: String, formatMimeType: MediaType) + extends DataFormat(formatName, formatMimeType) { + def this(format: Format) = { + this(format.name, format.mimeType) + } +} + +/** Companion object with all SchemaFormat static utilities + */ +object ShapeMapFormat extends FormatCompanion[ShapeMapFormat] { + + override lazy val availableFormats: List[ShapeMapFormat] = + List( + Compact, + new ShapeMapFormat(Json) + ) + override val defaultFormat: ShapeMapFormat = Compact +} + +/** Represents the mime-type "text/shex" + */ +case object Compact + extends ShapeMapFormat( + formatName = "compact", + formatMimeType = new MediaType("text", "shex") + ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala index 04fd2e41..b37922ce 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala @@ -24,7 +24,7 @@ import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.utils.json.JsonUtils.errorResponseJson import es.weso.schema._ import es.weso.utils.IOUtils._ -import io.circe._ +import io.circe.Json import org.http4s._ import org.http4s.circe._ import org.http4s.client.Client diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala index f58ced4b..efcb28d0 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala @@ -154,7 +154,7 @@ private[api] object SchemaOperations extends LazyLogging { ) val sp = SchemaParam.empty.copy( schema = optSchema, - optSchemaFormat = optSchemaFormat, + schemaFormat = optSchemaFormat.getOrElse(SchemaFormat.defaultFormat), schemaEngine = optSchemaEngine ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaParam.scala index 96ed62e6..7b33e762 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaParam.scala @@ -4,10 +4,7 @@ import cats.effect._ import cats.implicits._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.RDFReasoner -import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ - defaultSchemaEngine, - defaultSchemaFormat -} +import es.weso.rdfshape.server.api.definitions.ApiDefaults.defaultSchemaEngine import es.weso.rdfshape.server.api.format._ import es.weso.rdfshape.server.api.routes.schema.logic.SchemaOperations.getBase import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ @@ -21,16 +18,13 @@ sealed case class SchemaParam( schema: Option[String], schemaURL: Option[String], schemaFile: Option[String], - optSchemaFormat: Option[SchemaFormat], + schemaFormat: SchemaFormat, schemaEngine: Option[String], targetSchemaEngine: Option[String], targetSchemaFormat: Option[String], activeSchemaTab: Option[String] ) extends LazyLogging { - val schemaFormat: SchemaFormat = - optSchemaFormat.getOrElse(defaultSchemaFormat) - def getSchema( data: Option[RDFReasoner] ): IO[(Option[String], Either[String, Schema])] = { @@ -193,7 +187,7 @@ object SchemaParam extends LazyLogging { pair => { val (maybeStr, maybeSchema) = pair maybeSchema match { - // TODO: HERE ERROR IS NULL + // TODO: HERE "SRT" ERROR IS NULL case Left(str) => IO.pure(Left(str)) case Right(schema) => IO.pure(Right((schema, sp.copy(schema = maybeStr)))) @@ -215,11 +209,11 @@ object SchemaParam extends LazyLogging { } private[api] def mkSchemaParam(partsMap: PartsMap): IO[SchemaParam] = for { - schema <- partsMap.optPartValue(SchemaParameter.name) - schemaURL <- partsMap.optPartValue(SchemaURLParameter.name) - schemaFile <- partsMap.optPartValue(SchemaFileParameter.name) - schemaFormatValue <- getSchemaFormat(SchemaFormatParameter.name, partsMap) - schemaEngine <- partsMap.optPartValue(SchemaEngineParameter.name) + schema <- partsMap.optPartValue(SchemaParameter.name) + schemaURL <- partsMap.optPartValue(SchemaURLParameter.name) + schemaFile <- partsMap.optPartValue(SchemaFileParameter.name) + schemaFormat <- getSchemaFormat(SchemaFormatParameter.name, partsMap) + schemaEngine <- partsMap.optPartValue(SchemaEngineParameter.name) targetSchemaEngine <- partsMap.optPartValue( TargetSchemaEngineParameter.name ) @@ -232,7 +226,7 @@ object SchemaParam extends LazyLogging { schema, schemaURL, schemaFile, - schemaFormatValue, + schemaFormat, schemaEngine, targetSchemaEngine, targetSchemaFormat, @@ -240,23 +234,23 @@ object SchemaParam extends LazyLogging { ) } + /** Try to build a {@link es.weso.rdfshape.server.api.format.SchemaFormat} object from a request's parameters + * + * @param parameter Name of the parameter with the format name + * @param parameterMap Request parameters + * @return The SchemaFormat found or the default one + */ private def getSchemaFormat( - name: String, - partsMap: PartsMap - ): IO[Option[SchemaFormat]] = for { - maybeStr <- partsMap.optPartValue(name) - } yield maybeStr match { - case None => None - case Some(str) => - SchemaFormat - .fromString(str) - .fold( - err => { - logger.error(s"Unsupported schemaFormat for $name: $str") - None - }, - df => Some(df) - ) + parameter: String, + parameterMap: PartsMap + ): IO[SchemaFormat] = { + for { + maybeFormat <- PartsMap.getFormat(parameter, parameterMap) + } yield maybeFormat match { + case None => SchemaFormat.defaultFormat + case Some(format) => new SchemaFormat(format) + } + } private[api] def empty: SchemaParam = @@ -264,7 +258,7 @@ object SchemaParam extends LazyLogging { schema = None, schemaURL = None, schemaFile = None, - optSchemaFormat = None, + schemaFormat = SchemaFormat.defaultFormat, schemaEngine = None, targetSchemaEngine = None, targetSchemaFormat = None, diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala index e05e82df..ee31bab5 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala @@ -18,7 +18,7 @@ import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.utils.json.JsonUtils.errorResponseJson import es.weso.schema._ import es.weso.utils.IOUtils._ -import io.circe._ +import io.circe.Json import org.http4s._ import org.http4s.circe._ import org.http4s.client.Client diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/TriggerModeParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/TriggerModeParam.scala index f64940cf..7d065296 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/TriggerModeParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/TriggerModeParam.scala @@ -4,10 +4,8 @@ import cats.effect.IO import cats.implicits._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.PrefixMap -import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ - defaultActiveShapeMapTab, - defaultShapeMapFormat -} +import es.weso.rdfshape.server.api.definitions.ApiDefaults.defaultActiveShapeMapTab +import es.weso.rdfshape.server.api.format.ShapeMapFormat import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.shapemaps.ShapeMap @@ -15,23 +13,12 @@ import es.weso.shapemaps.ShapeMap case class TriggerModeParam( triggerMode: Option[String], shapeMap: Option[String], - shapeMapFormatTextarea: Option[String], shapeMapURL: Option[String], - shapeMapFormatUrl: Option[String], shapeMapFile: Option[String], - shapeMapFormatFile: Option[String], + shapeMapFormat: ShapeMapFormat, activeShapeMapTab: Option[String] ) extends LazyLogging { - val shapeMapFormat: Option[String] = parseShapeMapTab( - activeShapeMapTab.getOrElse(defaultActiveShapeMapTab) - ) match { - case Right(`shapeMapUrlType`) => shapeMapFormatUrl - case Right(`shapeMapFileType`) => shapeMapFormatFile - case Right(`shapeMapTextAreaType`) => shapeMapFormatTextarea - case _ => None - } - def getShapeMap( nodesPrefixMap: PrefixMap, shapesPrefixMap: PrefixMap @@ -49,12 +36,10 @@ case class TriggerModeParam( case Some(shapeMapUrl) => logger.trace(s"ShapeMapUrl: $shapeMapUrl") - val shapeMapFormat = - shapeMapFormatUrl.getOrElse(defaultShapeMapFormat) ShapeMap .fromURI( shapeMapUrl, - shapeMapFormat, + shapeMapFormat.name, None, nodesPrefixMap, shapesPrefixMap @@ -79,9 +64,7 @@ case class TriggerModeParam( case Some(shapeMapStr) => logger.trace(s"ShapeMapFile: $shapeMapStr") - val shapeMapFormat = - shapeMapFormatFile.getOrElse(defaultShapeMapFormat) - ShapeMap.fromString(shapeMapStr, shapeMapFormat, None) match { + ShapeMap.fromString(shapeMapStr, shapeMapFormat.name, None) match { case Left(ls) => IO.pure((Some(shapeMapStr), Left(ls.toList.mkString("\n")))) case Right(parsedShapeMap) => @@ -96,9 +79,7 @@ case class TriggerModeParam( case Some(shapeMapStr) => logger.trace(s"ShapeMapText: $shapeMapStr") - val shapeMapFormat = - shapeMapFormatTextarea.getOrElse(defaultShapeMapFormat) - ShapeMap.fromString(shapeMapStr, shapeMapFormat, None) match { + ShapeMap.fromString(shapeMapStr, shapeMapFormat.name, None) match { case Left(ls) => IO.pure((Some(shapeMapStr), Left(ls.toList.mkString("\n")))) case Right(parsedShapeMap) => @@ -151,14 +132,10 @@ object TriggerModeParam extends LazyLogging { optShapeMap <- partsMap.optPartValue(ShapeMapTextParameter.name) optShapeMapURL <- partsMap.optPartValue(ShapeMapUrlParameter.name) optShapeMapFile <- partsMap.optPartValue(ShapeMapFileParameter.name) - optShapeMapFormatTextArea <- partsMap.optPartValue( - ShapeMapFormatTextAreaParameter.name - ) - optShapeMapFormatUrl <- partsMap.optPartValue( - ShapeMapFormatUrlParameter.name - ) - optShapeMapFormatFile <- partsMap.optPartValue( - ShapeMapFormatFileParameter.name + + shapeMapFormat <- getShapeMapFormat( + ShapeMapFormatParameter.name, + partsMap ) optActiveShapeMapTab <- partsMap.optPartValue( ActiveShapeMapTabParameter.name @@ -167,16 +144,14 @@ object TriggerModeParam extends LazyLogging { logger.debug(s"optTriggerMode: $optTriggerMode") logger.debug(s"optShapeMap: $optShapeMap") logger.debug(s"optActiveShapeMapTab: $optActiveShapeMapTab") - logger.debug(s"optShapeMapFormatFile: $optShapeMapFormatFile") + logger.debug(s"optShapeMapFormat: $shapeMapFormat") TriggerModeParam( - optTriggerMode, - optShapeMap, - optShapeMapFormatTextArea, - optShapeMapURL, - optShapeMapFormatUrl, - optShapeMapFile, - optShapeMapFormatFile, - optActiveShapeMapTab + triggerMode = optTriggerMode, + shapeMap = optShapeMap, + shapeMapURL = optShapeMapURL, + shapeMapFile = optShapeMapFile, + shapeMapFormat = shapeMapFormat, + activeShapeMapTab = optActiveShapeMapTab ) } val r: IO[Either[String, TriggerModeParam]] = tp.map(_.asRight[String]) @@ -190,4 +165,23 @@ object TriggerModeParam extends LazyLogging { ) ) } + + /** Try to build a {@link es.weso.rdfshape.server.api.format.ShapeMapFormat} object from a request's parameters + * + * @param parameter Name of the parameter with the format name + * @param parameterMap Request parameters + * @return The ShapeMapFormat found or the default one + */ + private def getShapeMapFormat( + parameter: String, + parameterMap: PartsMap + ): IO[ShapeMapFormat] = { + for { + maybeFormat <- PartsMap.getFormat(parameter, parameterMap) + } yield maybeFormat match { + case None => ShapeMapFormat.defaultFormat + case Some(format) => new ShapeMapFormat(format) + } + + } } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMapInfoResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMapInfoResult.scala index e8ccb533..6a25a04f 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMapInfoResult.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMapInfoResult.scala @@ -24,7 +24,7 @@ case class ShapeMapInfoResult private ( */ def toJson: Json = { Json.fromFields( - List(("msg", Json.fromString(msg))) ++ + List(("message", Json.fromString(msg))) ++ maybeField(shapeMap, "shapeMap", Json.fromString) ++ maybeField( shapeMapFormat, diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala index 53330b5b..bc863232 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala @@ -3,6 +3,7 @@ package es.weso.rdfshape.server.api.routes.shapemap.service import cats.effect._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api +import es.weso.rdfshape.server.api.format.ShapeMapFormat import es.weso.rdfshape.server.api.routes.ApiService import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMap.getShapeMap import es.weso.rdfshape.server.api.routes.shapemap.logic.{ @@ -11,7 +12,6 @@ import es.weso.rdfshape.server.api.routes.shapemap.logic.{ } import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.utils.json.JsonUtils.errorResponseJson -import es.weso.shapemaps.{ShapeMap => ShapeMapW} import io.circe._ import org.http4s._ import org.http4s.circe._ @@ -34,11 +34,28 @@ class ShapeMapService(client: Client[IO]) */ val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { + /** Returns a JSON array with the accepted shapeMap formats. + */ case GET -> Root / `api` / `verb` / "formats" => - val formats = ShapeMapW.availableFormats + val formats = ShapeMapFormat.availableFormats.map(_.name) val json = Json.fromValues(formats.map(str => Json.fromString(str))) Ok(json) + /** Obtain information about a shapeMap. + * Receives a JSON object with the input shapeMap information: + * - shapeMap [String]: Raw shapemap data + * - shapeMapUrl [String]: Url containing the shapemap + * - shapeMapFile [File Object]: File containing the shapemap + * - shapeMapFormat [String]: Format of the shapeMap + * - activeShapeMapTab [String]: Identifies the source of the shapeMap (raw, URL, file...) + * Returns a JSON object with the shapeMap information: + * - message [String]: Informational message on success + * - shapeMap [String]: Input shapeMap string + * - shapeMapFormat [String]: Input shapeMap format + * - shapeMapJson [Array]: Array of the elements in the shapeMap + * - node [String]: Referenced node + * - shape [String]: Target shape for the node + */ case req @ POST -> Root / `api` / `verb` / "info" => req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shex/service/ShExService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shex/service/ShExService.scala index 75c04fb4..848e3d4c 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shex/service/ShExService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shex/service/ShExService.scala @@ -22,6 +22,8 @@ class ShExService(client: Client[IO]) */ val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { + /** Returns a JSON array with the accepted ShEx schema formats + */ case GET -> Root / `api` / `verb` / "formats" => val formats = Schemas.availableFormats val json = Json.fromValues(formats.map(str => Json.fromString(str))) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala deleted file mode 100644 index b64f03fc..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikidataService.scala +++ /dev/null @@ -1,760 +0,0 @@ -package es.weso.rdfshape.server.api.routes.wikibase - -// import cats._ - -import cats.data._ -import cats.effect._ -import cats.implicits._ -import com.typesafe.scalalogging.LazyLogging -import es.weso.rdf.RDFReader -import es.weso.rdf.jena.RDFAsJenaModel -import es.weso.rdf.nodes.IRI -import es.weso.rdf.sgraph._ -import es.weso.rdfshape.server.api.routes.schema.logic.SchemaOperations.schemaResult2json -import es.weso.rdfshape.server.api.definitions._ -import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api -import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ -import es.weso.rdfshape.server.api.routes.ApiService -import es.weso.rdfshape.server.api.utils.Http4sUtils._ -import es.weso.schema.{Schema, ShapeMapTrigger} -import es.weso.schemaInfer.{InferOptions, SchemaInfer} -import es.weso.shapemaps.{Status => _, _} -import es.weso.utils.IOUtils._ -import es.weso.wikibaserdf._ -import io.circe._ -import io.circe.parser._ -import fs2._ -import org.http4s._ -import org.http4s.circe._ -import org.http4s.client._ -import org.http4s.client.middleware.FollowRedirect -import org.http4s.dsl._ -import org.http4s.headers._ -import org.http4s.implicits._ -import org.http4s.multipart._ - -import scala.util.control.NoStackTrace -import scala.util.matching.Regex -import es.weso.rdfshape.server.api.utils.parameters.PartsMap - -/** API service to handle wikidata related operations - * - * @param client HTTP4S client object - */ -class WikidataService(client: Client[IO]) - extends Http4sDsl[IO] - with ApiService - with LazyLogging { - - override val verb: String = "wikidata" - - val wikidataEntityUrl = uri"http://www.wikidata.org/entity" - val apiUri = uri"/api/wikidata/entity" - val wikidataUri: Uri = uri"https://query.wikidata.org/sparql" - val defaultLimit = 20 - val defaultContinue = 0 - val redirectClient = FollowRedirect(3)(client) - - /** Describe the API routes handled by this service and the actions performed on each of them - */ - def routes: HttpRoutes[IO] = HttpRoutes.of[IO] { - - case GET -> Root / `api` / `verb` / "entityLabel" :? - WdEntityParameter(entity) +& - LanguageParameter(language) => - val uri = Uri.unsafeFromString( - s"https://www.wikidata.org/w/api.php?action=wbgetentities&props=labels&ids=$entity&languages=$language&format=json" - ) - val req: Request[IO] = Request(method = GET, uri = uri) - for { - either <- client.run(req).use { - case Status.Successful(r) => - r.attemptAs[Json].leftMap(_.message).value - case r => - r.as[String] - .map(b => - s"Request $req failed with status ${r.status.code} and body $b" - .asLeft[Json] - ) - } - resp <- Ok(either.fold(Json.fromString, identity)) - } yield resp - - case GET -> Root / `api` / `verb` / "schemaContent" :? - WdSchemaParameter(wdSchema) => { - val uri = uri"https://www.wikidata.org".withPath( - Uri.Path.unsafeFromString(s"/wiki/Special:EntitySchemaText/$wdSchema") - ) - - val req: Request[IO] = Request(method = GET, uri = uri) - for { - eitherValues <- client.run(req).use { - case Status.Successful(r) => - r.attemptAs[String].leftMap(_.message).value - case r => - r.as[String] - .map(b => - s"Request $req failed with status ${r.status.code} and body $b" - .asLeft[String] - ) - } - json: Json = eitherValues.fold( - e => Json.fromFields(List(("error", Json.fromString(e)))), - s => Json.fromFields(List(("result", Json.fromString(s)))) - ) - resp <- Ok(json) - } yield resp - - } - - case GET -> Root / `api` / `verb` / "searchEntity" :? - EndpointParameter(endpoint) +& - LabelParameter(label) +& - LanguageParameter(language) +& - LimitParameter(maybelimit) +& - ContinueParameter(maybeContinue) => { - val limit: String = maybelimit.getOrElse(defaultLimit.toString) - val continue: String = maybeContinue.getOrElse(defaultContinue.toString) - - val requestUrl = s"""${endpoint.getOrElse("https://www.wikidata.org")}""" - - val uri = Uri - .fromString(requestUrl) - .valueOr(throw _) - .withPath(Uri.Path.unsafeFromString("/w/api.php")) - .withQueryParam("action", "wbsearchentities") - .withQueryParam("search", label) - .withQueryParam("language", language) - .withQueryParam("limit", limit) - .withQueryParam("continue", continue) - .withQueryParam("format", "json") - - logger.debug(s"wikidata searchEntity uri: ${uri.toString}") - - val req: Request[IO] = Request(method = GET, uri = uri) - for { - eitherValues <- client.run(req).use { - case Status.Successful(r) => - r.attemptAs[Json].leftMap(_.message).value - case r => - r.as[String] - .map(b => - s"Request $req failed with status ${r.status.code} and body $b" - .asLeft[Json] - ) - } - eitherResult = for { - json <- eitherValues - converted <- cnvEntities(json) - } yield converted - resp <- Ok(eitherResult.fold(Json.fromString, identity)) - } yield resp - } - - case GET -> Root / `api` / `verb` / "searchProperty" :? - EndpointParameter(endpoint) +& - LabelParameter(label) +& - LanguageParameter(language) +& - LimitParameter(maybelimit) +& - ContinueParameter(maybeContinue) => { - val limit: String = maybelimit.getOrElse(defaultLimit.toString) - val continue: String = maybeContinue.getOrElse(defaultContinue.toString) - - val requestUrl = s"${endpoint.getOrElse("https: //www.wikidata.org")}" - val uri = Uri - .fromString(requestUrl) - .valueOr(throw _) - .withPath(Uri.Path.unsafeFromString("/w/api.php")) - .withQueryParam("action", "wbsearchentities") - .withQueryParam("search", label) - .withQueryParam("language", language) - .withQueryParam("limit", limit) - .withQueryParam("continue", continue) - .withQueryParam("type", "property") - .withQueryParam("format", "json") - - val req: Request[IO] = Request(method = GET, uri = uri) - for { - eitherValues <- client.run(req).use { - case Status.Successful(r) => - r.attemptAs[Json].leftMap(_.message).value - case r => - r.as[String] - .map(b => - s"Request $req failed with status ${r.status.code} and body $b" - .asLeft[Json] - ) - } - eitherResult = for { - json <- eitherValues - converted <- cnvEntities(json) - } yield converted - resp <- Ok(eitherResult.fold(Json.fromString, identity)) - } yield resp - } - - case GET -> Root / `api` / `verb` / "searchLexeme" :? - LabelParameter(label) +& - LanguageParameter(language) +& - LimitParameter(maybelimit) +& - ContinueParameter(maybeContinue) => { - val limit: String = maybelimit.getOrElse(defaultLimit.toString) - val continue: String = maybeContinue.getOrElse(defaultContinue.toString) - - val uri = uri"https://www.wikidata.org" - .withPath(Uri.Path.unsafeFromString("/w/api.php")) - .withQueryParam("action", "wbsearchentities") - .withQueryParam("search", label) - .withQueryParam("language", language) - .withQueryParam("limit", limit) - .withQueryParam("continue", continue) - .withQueryParam("type", "lexeme") - .withQueryParam("format", "json") - - val req: Request[IO] = Request(method = GET, uri = uri) - for { - eitherValues <- client.run(req).use { - case Status.Successful(r) => - r.attemptAs[Json].leftMap(_.message).value - case r => - r.as[String] - .map(b => - s"Request $req failed with status ${r.status.code} and body $b" - .asLeft[Json] - ) - } - eitherResult = for { - json <- eitherValues - converted <- cnvEntities(json) - } yield converted - resp <- Ok(eitherResult.fold(Json.fromString, identity)) - } yield resp - } - - case GET -> Root / `api` / `verb` / "languages" => { - - val uri = uri"https://www.wikidata.org" - .withPath(Uri.Path.unsafeFromString("/w/api.php")) - .withQueryParam("action", "query") - .withQueryParam("meta", "wbcontentlanguages") - .withQueryParam("wbclcontext", "term") - .withQueryParam("wbclprop", "code|autonym") - .withQueryParam("format", "json") - - logger.debug(s"wikidata/languages uri: ${uri.toString}") - - val req: Request[IO] = Request(method = GET, uri = uri) - for { - eitherValues <- client.run(req).use { - case Status.Successful(r) => - r.attemptAs[Json].leftMap(_.message).value - case r => - r.as[String] - .map(b => - s"Request $req failed with status ${r.status.code} and body $b" - .asLeft[Json] - ) - } - eitherResult = for { - json <- eitherValues - converted <- cnvLanguages(json) - } yield converted - resp <- Ok( - eitherResult.fold(Json.fromString, identity) - ) - } yield resp - } - - case req @ POST -> Root / `api` / `verb` / "query" => - req.decode[Multipart[IO]] { m => - { - val partsMap = PartsMap(m.parts) - for { - optQuery <- partsMap.optPartValue("query") - optEndpoint <- partsMap.optPartValue("endpoint") - endpoint = optEndpoint.getOrElse(wikidataUri.toString()) - query = optQuery.getOrElse("") - req: Request[IO] = - Request( - method = GET, - uri = Uri - .fromString(endpoint) - .valueOr(throw _) - .withQueryParam("query", query) - ) - .withHeaders( - `Accept`(MediaType.application.`json`) - ) - eitherValue <- client.run(req).use { - case Status.Successful(r) => - r.attemptAs[Json].leftMap(_.message).value - case r => - r.as[String] - .map(b => - s"Request $req failed with status ${r.status.code} and body $b" - .asLeft[Json] - ) - } - resp <- Ok(eitherValue.fold(Json.fromString, identity)) - } yield resp - } - } - - case req @ POST -> Root / `api` / `verb` / "extract" => { - req.decode[Multipart[IO]] { m => - val partsMap = PartsMap(m.parts) - val r: EitherT[IO, String, Response[IO]] = for { - label <- EitherT(partsMap.eitherPartValue("entity")) - info <- either2es[InfoEntity](cnvEntity(label)) - _ <- { - logger.debug(s"Extraction URI: ${info.uri}"); - ok_esf[Unit, IO](()) - } - strRdf <- io2es(redirectClient.expect[String](info.uri)) - eitherInferred <- io2es( - RDFAsJenaModel - .fromString(strRdf, "TURTLE") - .flatMap( - _.use(rdf => - for { - rdfSerialized <- rdf.serialize("TURTLE") - nodeSelector = RDFNodeSelector(IRI(label)) - inferred <- SchemaInfer.runInferSchema( - rdf, - nodeSelector, - "ShEx", - IRI(s"http://example.org/Shape_${info.localName}"), - InferOptions.defaultOptions.copy(maxFollowOn = 3) - ) - } yield inferred - ) - ) - ) - pair <- either2es[(Schema, ResultShapeMap)](eitherInferred) - shExCStr <- io2es({ - val (schema, _) = pair - schema.serialize("SHEXC") - }) - _ <- { - logger.trace(s"ShExC str: $shExCStr"); - ok_es[Unit](()) - } - resp <- io2es(Ok(mkExtractAnswer(shExCStr, label))) - } yield resp - for { - either <- r.value - resp <- either.fold(s => Ok(errExtract(s)), r => IO.pure(r)) - } yield resp - } - } - - // TODO: This one doesn't work. It gives a timeout response - case req @ POST -> Root / `api` / `verb` / "shexer" => { - req.decode[Multipart[IO]] { m => - val partsMap = PartsMap(m.parts) - val r: EitherT[IO, String, Response[IO]] = for { - label <- EitherT(partsMap.eitherPartValue("entity")) - jsonParams <- either2es[Json](mkShexerParams(label)) - postRequest = Request[IO]( - method = POST, - uri = uri"http://156.35.94.158:8081/shexer" - ).withHeaders(`Content-Type`(MediaType.application.`json`)) - .withEntity[Json](jsonParams) - _ <- { - logger.debug(s"URI: ${jsonParams.spaces2}"); - ok_es[Unit](()) - } - result <- f2es(redirectClient.expect[Json](postRequest)) - _ <- { - logger.trace(s"Result\n${result.spaces2}"); - ok_es[Unit](()) - } - resp <- f2es(Ok(result)) - } yield resp - for { - either <- r.value - resp <- either.fold(s => Ok(errExtract(s)), r => IO.pure(r)) - } yield resp - } - } - - case req @ POST -> Root / `api` / `verb` / "validate" => { - logger.debug(s"Wikidata validate request: $req") - req.decode[Multipart[IO]] { m => - val partsMap = PartsMap(m.parts) - val r: IO[Response[IO]] = for { - eitherItem <- partsMap.eitherPartValue("item") - _ <- { - logger.debug(eitherItem.toString); - IO.pure(()) - } - item <- fromEither(eitherItem) - _ <- { - logger.debug(item); - IO.pure(()) - } - info <- fromEither(cnvEntity2(item)) - _ <- { - logger.debug(info.toString); - IO.pure(()) - } - pair <- WikibaseSchemaParam.mkSchema(partsMap, None, client) - _ <- { - logger.debug(pair.toString()); - IO.pure(()) - } - (schema, wbp) = pair - iriItem <- fromEither(IRI.fromString(info.sourceUri)) - shapeMap <- fromEither(ShapeMap.empty.add(iriItem, Start)) - triggerMode = ShapeMapTrigger(shapeMap) - result <- for { - res1 <- WikibaseRDF.wikidata - res2 <- RDFAsJenaModel.empty - vv <- (res1, res2).tupled.use { case (rdf, builder) => - for { - r <- schema.validate(rdf, triggerMode, builder) - json <- schemaResult2json(r) - } yield json - } - } yield vv - resp <- Ok(result) - } yield resp - r.attempt.flatMap(_.fold(s => Ok(errExtract(s.getMessage)), IO.pure(_))) - } - } - } - - private def fromEither[A](either: Either[String, A]): IO[A] = { - either.fold(s => IO.raiseError(WikibaseServiceError(s)), IO.pure(_)) - } - - private def errExtract(msg: String): Json = { - Json.fromFields( - List( - ("error", Json.fromString(msg)) - ) - ) - } - - private def mkExtractAnswer(result: String, entity: String): Json = { - Json.fromFields( - List( - ("entity", Json.fromString(entity)), - ("result", Json.fromString(result)) - ) - ) - } - - private def mkShexerParams(entity: String): Either[String, Json] = for { - prefixes <- wikidataPrefixes - } yield Json.fromFields( - List( - ("prefixes", prefixes), - ( - "shape_map", - Json.fromString( - "SPARQL'SELECT DISTINCT ?virus WHERE { VALUES ?virus { wd:Q82069695 } }'@ " - ) - ), - ("endpoint", Json.fromString("https://query.wikidata.org/sparql")), - ("all_classes", Json.False), - ("query_depth", Json.fromInt(1)), - ("threshold", Json.fromInt(0)), - ( - "instantiation_prop", - Json.fromString("http://www.wikidata.org/prop/direct/P31") - ), - ("disable_comments", Json.True), - ("shape_qualifiers_mode", Json.True), - ( - "namespaces_for_qualifiers", - Json.arr(Json.fromString("http://www.wikidata.org/prop/")) - ) - ) - ) - - private def wikidataPrefixes: Either[String, Json] = { - val json = - """{ - "http://wikiba.se/ontology#": "wikibase", - "http://www.bigdata.com/rdf#": "bd", - "http://www.wikidata.org/entity/": "wd", - "http://www.wikidata.org/prop/direct/": "wdt", - "http://www.wikidata.org/prop/direct-normalized/": "wdtn", - "http://www.wikidata.org/entity/statement/": "wds", - "http://www.wikidata.org/prop/": "p", - "http://www.wikidata.org/reference/": "wdref", - "http://www.wikidata.org/value/": "wdv", - "http://www.wikidata.org/prop/statement/": "ps", - "http://www.wikidata.org/prop/statement/value/": "psv", - "http://www.wikidata.org/prop/statement/value-normalized/": "psn", - "http://www.wikidata.org/prop/qualifier/": "pq", - "http://www.wikidata.org/prop/qualifier/value/": "pqv", - "http://www.wikidata.org/prop/qualifier/value-normalized/": "pqn", - "http://www.wikidata.org/prop/reference/": "pr", - "http://www.wikidata.org/prop/reference/value/": "prv", - "http://www.wikidata.org/prop/reference/value-normalized/": "prn", - "http://www.wikidata.org/prop/novalue/": "wdno" - }""" - parse(json).leftMap(e => s"Error parsing prefixes: $e") - } - - private def cnvEntity(entity: String): Either[String, InfoEntity] = { - val wdRegex = "http://www.wikidata.org/entity/(.*)".r - entity match { - case wdRegex(localName) => { - val uri = - uri"https://www.wikidata.org" / "wiki" / "Special:EntityData" / (localName + ".ttl") - InfoEntity(localName, uri, entity).asRight[String] - } - case _ => - s"Entity: $entity doesn't match regular expression: $wdRegex" - .asLeft[InfoEntity] - } - } - - private def cnvEntity2(entity: String): Either[String, InfoEntity] = { - val wdRegex: Regex = "<(http://www.wikidata.org/entity/(.*))>".r - entity match { - case wdRegex(_, _) => { - val matches = wdRegex.findAllIn(entity) - logger.debug(s"Wikidata matches: ${matches.groupCount}") - if(matches.groupCount == 2) { - val localName = matches.group(2) - val sourceUri = matches.group(1) - val uri = - uri"https://www.wikidata.org" / "wiki" / "Special:EntityData" / (localName + ".ttl") - logger.debug(s"Wikidata item uri: $uri") - InfoEntity(localName, uri, sourceUri).asRight[String] - } else - s"Entity: $entity doesn't match regular expression: $wdRegex" - .asLeft[InfoEntity] - } - case _ => - s"Entity: $entity doesn't match regular expression: $wdRegex" - .asLeft[InfoEntity] - } - } - - def cnvEntities(json: Json): Either[String, Json] = for { - entities <- json.hcursor - .downField("search") - .values - .toRight("Error obtaining search value") - converted = Json.fromValues( - entities.map((value: Json) => - Json.fromFields( - List( - ( - "label", - value.hcursor.downField("label").focus.getOrElse(Json.Null) - ), - ("id", value.hcursor.downField("id").focus.getOrElse(Json.Null)), - ( - "uri", - value.hcursor.downField("concepturi").focus.getOrElse(Json.Null) - ), - ( - "descr", - value.hcursor.downField("description").focus.getOrElse(Json.Null) - ) - ) - ) - ) - ) - } yield converted - - private def cnvLanguages(json: Json): Either[String, Json] = for { - // query <- .focus.toRight(s"Error obtaining query at ${json.spaces2}" ) - languagesObj <- json.hcursor - .downField("query") - .downField("wbcontentlanguages") - .focus - .toRight(s"Error obtaining query/wbcontentlanguages at ${json.spaces2}") - keys <- languagesObj.hcursor.keys.toRight( - s"Error obtaining values from languages: ${languagesObj.spaces2}" - ) - converted = Json.fromValues( - keys.map(key => - Json.fromFields( - List( - ( - "label", - languagesObj.hcursor - .downField(key) - .downField("code") - .focus - .getOrElse(Json.Null) - ), - ( - "name", - languagesObj.hcursor - .downField(key) - .downField("autonym") - .focus - .getOrElse(Json.Null) - ) - ) - ) - ) - ) - } yield { - converted - } - - private def wdEntity( - optEntity: Option[String], - withDot: Boolean - ): IO[Option[Json]] = { - optEntity match { - case None => IO.pure(None) - case Some(entity) => { - val process = for { - uri <- getUri(entity) - // data <- resolve(uri) - // rdf <- getRDF(data) - // maybeDot <- generateDot(rdf, withDot)/* if (generateDot) - /* EitherT.fromEither[F](RDF2Dot.rdf2dot(rdf).bimap(e => s"Error - * converting to Dot: $e", s => Some(s.toString))) */ - // else EitherT.pure(none) */ - json <- prepareJson( - entity, - proxyUri(uri) - ) // prepareJsonOk(entity, uri, rdf, maybeDot) - } yield Option(json) - process.value.flatMap(e => IO.pure(mkJson(entity, e))) - } - } - } - - private def proxyUri(uri: Uri): Uri = { - apiUri.withQueryParam("entity", uri.renderString) - } - - private def mkJson( - entity: String, - e: Either[String, Option[Json]] - ): Option[Json] = - e.fold(msg => Some(jsonErr(entity, msg)), identity) - - private def jsonErr(entity: String, msg: String): Json = - Json.fromFields( - List( - ("entity", Json.fromString(entity)), - ("error", Json.fromString(msg)) - ) - ) - - private def getUri(entity: String): EitherT[IO, String, Uri] = { - logger.debug(s"get entity: $entity") - val q = """Q(\d*)""".r - entity match { - case q(n) => EitherT.pure(wikidataEntityUrl / ("Q" + n)) - case _ => - EitherT.fromEither[IO]( - Uri - .fromString(entity) - .leftMap(f => s"Error creating URI from $entity: $f") - ) - } - } - - /* private def getRDF(str: Stream[F,String]): EitherT[F, String, RDFReader] = - * EitherT.liftF(LiftIO[F].liftIO(RDFAsJenaModel.empty)) */ - - private def prepareJson(entity: String, uri: Uri): EitherT[IO, String, Json] = - EitherT.pure( - Json.fromFields( - List( - ("entity", Json.fromString(entity)), - ("uri", Json.fromString(uri.toString)) - ) - ) - ) - - private def resolve(uri: Uri): EitherT[IO, String, Stream[IO, String]] = { - logger.debug(s"Resolve: $uri") - for { - eitherData <- EitherT.liftF(resolveStream[IO](uri, client)) - data <- EitherT.fromEither[IO]( - eitherData.leftMap(e => s"Error retrieving $uri: $e") - ) - } yield data - } - - private def generateDot( - rdf: RDFReader, - maybeDot: Boolean - ): EitherT[IO, String, Option[String]] = - if(maybeDot) for { - sgraph <- fromIO( - RDF2SGraph.rdf2sgraph(rdf) - ) // .bimap(e => s"Error converting to Dot: $e", s => Some(s.toString))) - } yield Option(sgraph.toDot(RDFDotPreferences.defaultRDFPrefs)) - else - EitherT.pure(None) - - private def fromIO[A](io: IO[A]): EitherT[IO, String, A] = EitherT.liftF(io) - - private def prepareJsonOk( - entity: String, - uri: Uri, - rdf: RDFReader, - maybeDot: Option[String] - ): EitherT[IO, String, Json] = for { - serialized <- fromIO(rdf.serialize("TURTLE")) - } yield Json.fromFields( - List( - ("entity", Json.fromString(entity)), - ("uri", Json.fromString(uri.toString)), - ("rdf", Json.fromString(serialized)) - ) ++ dotField(maybeDot) - ) - - private def dotField(maybeDot: Option[String]): List[(String, Json)] = - maybeDot.fold(List[(String, Json)]())(s => - List(("dot", Json.fromString(s))) - ) - - /* optEntity match { case None => Monad[F].pure(None) case Some(entity) => { - * val q = """Q(\d*)""".r val eitherUri = entity match { case q(n) => - * Uri.fromString(wikidataEntityUrl + n) case _ => Uri.fromString(entity) } - * val result = eitherUri.fold( e => Json.fromString(s"Bad URI ${entity}: - * $e"), uri => for { data <- client.use { - * withRedirect(_).expect[String]("http://www.wikidata.org/entity/Q14317") } - * eitherRDF = RDFAsJenaModel.fromString(data,"TURTLE",None).fold( e => - * Monad[F].pure(s"Error parsing RDF: $e\n$data".asLeft[RDFReader]), rdf => - * rdf.asRight[String]) _ <- LiftIO[F].liftIO(IO { println(s"Searching - * wikidata: $data") }) } yield Monad[F].pure(Json.fromFields(List( ("entity", - * Json.fromString(entity)), ("data", Json.fromString(data)) )))) ) } */ - - /* Languages have this structure: - * { "batchcomplete": "", "query": { "wbcontentlanguages": { "aa": { "code": - * "aa", "autonym": "Qafár af" }, "ab": { "code": "ab", "autonym": "Аҧсшәа" } - * } } */ - - private def cnvEntitySchema(wdSchema: String): Uri = { - val uri = uri"https://www.wikidata.org".withPath( - Uri.Path.unsafeFromString(s"/wiki/Special:EntitySchemaText/$wdSchema") - ) - uri - } - - case class WikibaseServiceError(msg: String) - extends RuntimeException(msg) - with NoStackTrace - - private case class InfoEntity(localName: String, uri: Uri, sourceUri: String) - -} - -object WikidataService { - - /** Service factory - * - * @param client Underlying http4s client - * @return A new Wikidata Service - */ - def apply(client: Client[IO]): WikidataService = - new WikidataService(client) -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseEntity.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseEntity.scala new file mode 100644 index 00000000..76514796 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseEntity.scala @@ -0,0 +1,66 @@ +package es.weso.rdfshape.server.api.routes.wikibase.logic + +import cats.implicits._ +import com.typesafe.scalalogging.LazyLogging +import org.http4s.Uri +import org.http4s.implicits._ + +import scala.util.matching.Regex + +/** Data class containing the information to fetch a given WikibaseEntity + */ +private[wikibase] case class WikibaseEntity( + localName: String, + uri: Uri, + sourceUri: String +) + +/** Static utilities to aid in converting information to WikibaseEntity instances + */ +object WikibaseEntity extends LazyLogging { + + /** Create a WikibaseEntity instance from a wikidata URI + * + * @param entity String containing an entity unique URI in wikidata (e.g.: https://www.wikidata.org/wiki/Q14317) + * @return Either an instance of {@link es.weso.rdfshape.server.api.routes.wikibase.logic.WikibaseEntity} containing the entity information, or an error message + */ + def uriToEntity(entity: String): Either[String, WikibaseEntity] = { + val wdRegex = "http://www.wikidata.org/entity/(.*)".r + entity match { + case wdRegex(localName) => + val uri = + uri"https://www.wikidata.org" / "wiki" / "Special:EntityData" / (localName + ".ttl") + WikibaseEntity(localName, uri, entity).asRight[String] + case _ => + s"Entity: $entity doesn't match regular expression: $wdRegex" + .asLeft[WikibaseEntity] + } + } + + /** Create a WikibaseEntity instance from a wikidata URI (alternate) + * + * @param entity String containing an entity unique URI in wikidata (e.g.: https://www.wikidata.org/wiki/Q14317) + * @return Either an instance of {@link es.weso.rdfshape.server.api.routes.wikibase.logic.WikibaseEntity} containing the entity information, or an error message + */ + def uriToEntity2(entity: String): Either[String, WikibaseEntity] = { + val wdRegex: Regex = "<(http://www.wikidata.org/entity/(.*))>".r + entity match { + case wdRegex(_, _) => + val matches = wdRegex.findAllIn(entity) + logger.debug(s"Wikidata matches: ${matches.groupCount}") + if(matches.groupCount == 2) { + val localName = matches.group(2) + val sourceUri = matches.group(1) + val uri = + uri"https://www.wikidata.org" / "wiki" / "Special:EntityData" / (localName + ".ttl") + logger.debug(s"Wikidata item uri: $uri") + WikibaseEntity(localName, uri, sourceUri).asRight[String] + } else + s"Entity: $entity doesn't match regular expression: $wdRegex" + .asLeft[WikibaseEntity] + case _ => + s"Entity: $entity doesn't match regular expression: $wdRegex" + .asLeft[WikibaseEntity] + } + } +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikibaseSchemaParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseSchemaParam.scala similarity index 98% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikibaseSchemaParam.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseSchemaParam.scala index 58dedf6b..3c83d541 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/WikibaseSchemaParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseSchemaParam.scala @@ -1,4 +1,4 @@ -package es.weso.rdfshape.server.api.routes.wikibase +package es.weso.rdfshape.server.api.routes.wikibase.logic import cats.effect._ import es.weso.rdf.RDFReasoner diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseService.scala new file mode 100644 index 00000000..f979bea5 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseService.scala @@ -0,0 +1,562 @@ +package es.weso.rdfshape.server.api.routes.wikibase.service + +import cats.data._ +import cats.effect._ +import cats.implicits._ +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf.jena.RDFAsJenaModel +import es.weso.rdf.nodes.IRI +import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api +import es.weso.rdfshape.server.api.routes.ApiService +import es.weso.rdfshape.server.api.routes.schema.logic.SchemaOperations.schemaResult2json +import es.weso.rdfshape.server.api.routes.wikibase.logic.WikibaseEntity.{ + uriToEntity, + uriToEntity2 +} +import es.weso.rdfshape.server.api.routes.wikibase.logic.{ + WikibaseEntity, + WikibaseSchemaParam +} +import es.weso.rdfshape.server.api.routes.wikibase.service.WikibaseServiceUtils.{ + convertEntities, + convertLanguages, + mkShexerParams +} +import es.weso.rdfshape.server.api.utils.OptEitherF.ioFromEither +import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ +import es.weso.rdfshape.server.api.utils.parameters.PartsMap +import es.weso.rdfshape.server.utils.json.JsonUtils.errorResponseJson +import es.weso.schema.{Schema, ShapeMapTrigger} +import es.weso.schemaInfer.{InferOptions, SchemaInfer} +import es.weso.shapemaps.{Status => _, _} +import es.weso.utils.IOUtils._ +import es.weso.wikibaserdf._ +import io.circe._ +import org.http4s._ +import org.http4s.circe._ +import org.http4s.client._ +import org.http4s.client.middleware.FollowRedirect +import org.http4s.dsl._ +import org.http4s.headers._ +import org.http4s.implicits._ +import org.http4s.multipart._ + +/** API service to handle wikibase (and mostly wikidata) related operations + * Acts as an intermediate proxy between clients and the MediaWiki API + * + * @param client HTTP4S client object + */ +class WikibaseService(client: Client[IO]) + extends Http4sDsl[IO] + with ApiService + with LazyLogging { + + override val verb: String = "wikidata" + + val wikidataUrl = "https://www.wikidata.org" + val wikidataUri = uri"https://www.wikidata.org" + val wikidataEntityUrl = uri"https://www.wikidata.org/entity" + val apiUri = uri"/api/wikidata/entity" + val wikidataQueryUri: Uri = uri"https://query.wikidata.org/sparql" + val defaultLimit = 20 + val defaultContinue = 0 + val redirectClient: Client[IO] = FollowRedirect(3)(client) + + /** Describe the API routes handled by this service and the actions performed on each of them + */ + def routes: HttpRoutes[IO] = HttpRoutes.of[IO] { + + /** Search for wikidata entities using MediaWiki's API. Search based on entity ID + * See https://www.wikidata.org/w/api.php?action=help&modules=wbgetentities + * Receives a Wikidata entity label and a language and fetches entities in Wikidata + * - wdEntity [String]: Wikidata entity label + * - language [String]: Response desired language + * Returns a JSON object after querying MediaWiki's "wbgetentities" endpoint + */ + case GET -> Root / `api` / `verb` / "entityLabel" :? + WdEntityParameter(entity) +& + LanguageParameter(language) => + val uri = wikidataUri + .withPath(Uri.Path.unsafeFromString("/w/api.php")) + .withQueryParam("action", "wbgetentities") + .withQueryParam("props", "labels") + .withQueryParam("ids", entity) + .withQueryParam("languages", language) + .withQueryParam("format", "json") + + logger.debug(s"wikidata searchEntity uri: ${uri.toString}") + + val req: Request[IO] = Request(method = GET, uri = uri) + for { + either <- client.run(req).use { + case Status.Successful(r) => + r.attemptAs[Json].leftMap(_.message).value + case r => + r.as[String] + .map(b => + s"Request $req failed with status ${r.status.code} and body $b" + .asLeft[Json] + ) + } + resp <- Ok(either.fold(Json.fromString, identity)) + } yield resp + + /** Search for wikidata schemas using MediaWiki's API. + * Receives a Wikidata schema label and fetches schemas in Wikidata + * - wdSchema [String]: Wikidata schema label + * Returns a JSON object after manually querying the schema's page + */ + case GET -> Root / `api` / `verb` / "schemaContent" :? + WdSchemaParameter(wdSchema) => + val uri = wikidataUri.withPath( + Uri.Path.unsafeFromString(s"/wiki/Special:EntitySchemaText/$wdSchema") + ) + + val req: Request[IO] = Request(method = GET, uri = uri) + for { + eitherValues <- client.run(req).use { + case Status.Successful(r) => + r.attemptAs[String].leftMap(_.message).value + case r => + r.as[String] + .map(b => + s"Request $req failed with status ${r.status.code} and body $b" + .asLeft[String] + ) + } + json: Json = eitherValues.fold( + e => Json.fromFields(List(("error", Json.fromString(e)))), + s => Json.fromFields(List(("result", Json.fromString(s)))) + ) + resp <- Ok(json) + } yield resp + + /** Search for entities in a wikibase using MediaWiki's API. Search based on entity labels. + * See https://www.wikidata.org/w/api.php?action=help&modules=wbsearchentities + * Receives an entity label and a language and fetches entities in the wikibase whose endpoint was selected + * - endpoint [String]: SPARQL query endpoint of the target wikibase instance. Defaults to Wikidata + * - label [String]: Label / keywords in the name of the entities searched + * - language [String]: Response desired language + * - limit [Int]: Max number of results + * - continue [Int]: Offset where to continue a search + * Returns a JSON object after querying MediaWiki's "wbsearchentities" endpoint + */ + case GET -> Root / `api` / `verb` / "searchEntity" :? + EndpointParameter(maybeEndpoint) +& + LabelParameter(label) +& + LanguageParameter(language) +& + LimitParameter(maybelimit) +& + ContinueParameter(maybeContinue) => + val limit: String = maybelimit.getOrElse(defaultLimit.toString) + val continue: String = maybeContinue.getOrElse(defaultContinue.toString) + val endpoint: String = maybeEndpoint.getOrElse(wikidataUrl) + + logger.debug(s"Wikibase entity search with endpoint: $endpoint") + + val uri = Uri + .unsafeFromString(endpoint) + .withPath(Uri.Path.unsafeFromString("/w/api.php")) + .withQueryParam("action", "wbsearchentities") + .withQueryParam("search", label) + .withQueryParam("language", language) + .withQueryParam("limit", limit) + .withQueryParam("continue", continue) + .withQueryParam("format", "json") + + logger.debug(s"wikidata searchEntity uri: $uri") + + val req: Request[IO] = Request(method = GET, uri = uri) + + for { + eitherValues <- client.run(req).use { + case Status.Successful(r) => + r.attemptAs[Json].leftMap(_.message).value + case r => + r.as[String] + .map(b => + s"Request $req failed with status ${r.status.code} and body $b" + .asLeft[Json] + ) + } + eitherResult = for { + json <- eitherValues + converted <- convertEntities(json) + } yield converted + resp <- Ok(eitherResult.fold(Json.fromString, identity)) + } yield resp + + /** Search for properties in a wikibase using MediaWiki's API. Search based on property labels. + * See https://www.wikidata.org/w/api.php?action=help&modules=wbsearchentities + * Receives a property label and a language and fetches properties in the wikibase whose endpoint was selected + * - endpoint [String]: SPARQL query endpoint of the target wikibase instance. Defaults to Wikidata + * - label [String]: Label / keywords in the name of the properties searched + * - language [String]: Response desired language + * - limit [Int]: Max number of results + * - continue [Int]: Offset where to continue a search + * Returns a JSON object after querying MediaWiki's "wbsearchentities" endpoint. + */ + case GET -> Root / `api` / `verb` / "searchProperty" :? + EndpointParameter(maybeEndpoint) +& + LabelParameter(label) +& + LanguageParameter(language) +& + LimitParameter(maybelimit) +& + ContinueParameter(maybeContinue) => + val limit: String = maybelimit.getOrElse(defaultLimit.toString) + val continue: String = maybeContinue.getOrElse(defaultContinue.toString) + val endpoint: String = maybeEndpoint.getOrElse(wikidataUrl) + + logger.debug(s"Wikibase property search with endpoint: $endpoint") + + val uri = Uri + .fromString(endpoint) + .valueOr(throw _) + .withPath(Uri.Path.unsafeFromString("/w/api.php")) + .withQueryParam("action", "wbsearchentities") + .withQueryParam("search", label) + .withQueryParam("language", language) + .withQueryParam("limit", limit) + .withQueryParam("continue", continue) + .withQueryParam("type", "property") + .withQueryParam("format", "json") + + logger.debug(s"wikidata searchProperty uri: $uri") + + val req: Request[IO] = Request(method = GET, uri = uri) + + for { + eitherValues <- client.run(req).use { + case Status.Successful(r) => + r.attemptAs[Json].leftMap(_.message).value + case r => + r.as[String] + .map(b => + s"Request $req failed with status ${r.status.code} and body $b" + .asLeft[Json] + ) + } + eitherResult = for { + json <- eitherValues + converted <- convertEntities(json) + } yield converted + resp <- Ok(eitherResult.fold(Json.fromString, identity)) + } yield resp + + /** Search for lexemes in a wikibase using MediaWiki's API. Search based on lexeme labels. + * See https://www.wikidata.org/w/api.php?action=help&modules=wbsearchentities + * Receives a lexeme label and a language and fetches properties in the wikibase whose endpoint was selected + * - endpoint [String]: SPARQL query endpoint of the target wikibase instance. Defaults to Wikidata + * - label [String]: Label / keywords in the name of the lexemes searched + * - language [String]: Response desired language + * - limit [Int]: Max number of results + * - continue [Int]: Offset where to continue a search + * Returns a JSON object after querying MediaWiki's "wbsearchentities" endpoint. + */ + case GET -> Root / `api` / `verb` / "searchLexeme" :? + EndpointParameter(maybeEndpoint) +& + LabelParameter(label) +& + LanguageParameter(language) +& + LimitParameter(maybelimit) +& + ContinueParameter(maybeContinue) => + val limit: String = maybelimit.getOrElse(defaultLimit.toString) + val continue: String = maybeContinue.getOrElse(defaultContinue.toString) + val endpoint: String = maybeEndpoint.getOrElse(wikidataUrl) + + logger.debug(s"Wikibase lexeme search with endpoint: $endpoint") + + val uri = Uri + .fromString(endpoint) + .valueOr(throw _) + .withPath(Uri.Path.unsafeFromString("/w/api.php")) + .withQueryParam("action", "wbsearchentities") + .withQueryParam("search", label) + .withQueryParam("language", language) + .withQueryParam("limit", limit) + .withQueryParam("continue", continue) + .withQueryParam("type", "lexeme") + .withQueryParam("format", "json") + + logger.debug(s"wikidata searchLexeme uri: $uri") + + val req: Request[IO] = Request(method = GET, uri = uri) + for { + eitherValues <- client.run(req).use { + case Status.Successful(r) => + r.attemptAs[Json].leftMap(_.message).value + case r => + r.as[String] + .map(b => + s"Request $req failed with status ${r.status.code} and body $b" + .asLeft[Json] + ) + } + eitherResult = for { + json <- eitherValues + converted <- convertEntities(json) + } yield converted + resp <- Ok(eitherResult.fold(Json.fromString, identity)) + } yield resp + + /** Search for all the languages used in a wikibase instance. + * - endpoint [String]: SPARQL query endpoint of the target wikibase instance. Defaults to Wikidata. + * Returns a JSON object with the array of languages returned by the endpoint. + */ + case GET -> Root / `api` / `verb` / "languages" :? + EndpointParameter(maybeEndpoint) => + val endpoint: String = maybeEndpoint.getOrElse(wikidataUrl) + logger.debug(s"Wikibase language search with endpoint: $endpoint") + + val uri = Uri + .fromString(endpoint) + .valueOr(throw _) + .withPath(Uri.Path.unsafeFromString("/w/api.php")) + .withQueryParam("action", "query") + .withQueryParam("meta", "wbcontentlanguages") + .withQueryParam("wbclcontext", "term") + .withQueryParam("wbclprop", "code|autonym") + .withQueryParam("format", "json") + + val req: Request[IO] = Request(method = GET, uri = uri) + for { + eitherValues <- client.run(req).use { + case Status.Successful(r) => + r.attemptAs[Json].leftMap(_.message).value + case r => + r.as[String] + .map(b => + s"Request $req failed with status ${r.status.code} and body $b" + .asLeft[Json] + ) + } + eitherResult = for { + json <- eitherValues + converted <- convertLanguages(json) + } yield converted + resp <- Ok( + eitherResult.fold(Json.fromString, identity) + ) + } yield resp + + /** Execute a given SPARQL query to a given SPARQL endpoint of a wikibase instance. + * Receives a target endpoint and the query text. + * - endpoint [String]: SPARQL query endpoint. Defaults to Wikidata + * - query [String]: SPARQL query to be run + * Returns a JSON object with the query results: + * - head [Object]: Query metadata + * - vars: [Array]: Query variables + * - results [Object]: Query results + * - bindings: [Array]: Query results, each item being an object mapping each variable to its value + */ + case req @ POST -> Root / `api` / `verb` / "query" => + req.decode[Multipart[IO]] { m => + { + val partsMap = PartsMap(m.parts) + for { + optQuery <- partsMap.optPartValue("query") + optEndpoint <- partsMap.optPartValue("endpoint") + endpoint = optEndpoint.getOrElse(wikidataQueryUri.toString()) + query = optQuery.getOrElse("") + req: Request[IO] = + Request( + method = GET, + uri = Uri + .fromString(endpoint) + .valueOr(throw _) + .withQueryParam("query", query) + ) + .withHeaders( + `Accept`(MediaType.application.`json`) + ) + eitherValue <- client.run(req).use { + case Status.Successful(r) => + r.attemptAs[Json].leftMap(_.message).value + case r => + r.as[String] + .map(b => + s"Request $req failed with status ${r.status.code} and body $b" + .asLeft[Json] + ) + } + resp <- Ok(eitherValue.fold(Json.fromString, identity)) + } yield resp + } + } + + /** Attempts to extract an schema (ShEx) from a given entity present in wikidata. + * Receives an entity URI: + * - entity [String]: Unique address of the entity in wikidata + * Returns a JSON object with the extraction results: + * - entity [String]: URI of the entity whose information we searched + * - result [String]: Extracted schema + */ + case req @ POST -> Root / `api` / `verb` / "extract" => + req.decode[Multipart[IO]] { m => + val partsMap = PartsMap(m.parts) + val r: EitherT[IO, String, Response[IO]] = for { + label <- EitherT(partsMap.eitherPartValue("entity")) + info <- either2es[WikibaseEntity](uriToEntity(label)) + _ <- { + logger.debug(s"Extraction URI: ${info.uri}"); + ok_esf[Unit, IO](()) + } + strRdf <- io2es(redirectClient.expect[String](info.uri)) + eitherInferred <- io2es( + RDFAsJenaModel + .fromString(strRdf, "TURTLE") + .flatMap( + _.use(rdf => + for { + rdfSerialized <- rdf.serialize("TURTLE") + nodeSelector = RDFNodeSelector(IRI(label)) + inferred <- SchemaInfer.runInferSchema( + rdf, + nodeSelector, + "ShEx", + IRI(s"http://example.org/Shape_${info.localName}"), + InferOptions.defaultOptions.copy(maxFollowOn = 3) + ) + } yield inferred + ) + ) + ) + pair <- either2es[(Schema, ResultShapeMap)](eitherInferred) + shExCStr <- io2es({ + val (schema, _) = pair + schema.serialize("SHEXC") + }) + _ <- { + logger.trace(s"ShExC str: $shExCStr"); + ok_es[Unit](()) + } + resp <- io2es( + Ok( + Json.fromFields( + List( + ("entity", Json.fromString(label)), + ("result", Json.fromString(shExCStr)) + ) + ) + ) + ) + } yield resp + for { + either <- r.value + resp <- either.fold( + err => errorResponseJson(err, InternalServerError), + r => IO.pure(r) + ) + } yield resp + } + + // TODO: This one doesn't work. It gives a timeout response + /** Attempts to extract an schema (ShEx) from a given entity present in wikidata using "shexer". + * See https://github.com/DaniFdezAlvarez/shexer + * Receives an entity URI: + * - entity [String]: Unique address of the entity in wikidata + * Returns a JSON object with the extraction results: + * - entity [String]: URI of the entity whose information we searched + * - result [String]: Extracted schema + */ + case req @ POST -> Root / `api` / `verb` / "shexer" => + req.decode[Multipart[IO]] { m => + val partsMap = PartsMap(m.parts) + val r: EitherT[IO, String, Response[IO]] = for { + label <- EitherT(partsMap.eitherPartValue("entity")) + jsonParams <- either2es[Json](mkShexerParams(label)) + postRequest = Request[IO]( + method = POST, + uri = uri"http://156.35.94.158:8081/shexer" + ).withHeaders(`Content-Type`(MediaType.application.`json`)) + .withEntity[Json](jsonParams) + _ <- { + logger.debug(s"URI: ${jsonParams.spaces2}"); + ok_es[Unit](()) + } + result <- f2es(redirectClient.expect[Json](postRequest)) + _ <- { + logger.trace(s"Result\n${result.spaces2}"); + ok_es[Unit](()) + } + resp <- f2es(Ok(result)) + } yield resp + for { + either <- r.value + resp <- either.fold( + err => errorResponseJson(err, InternalServerError), + r => IO.pure(r) + ) + } yield resp + } + + /** Validate entities in a wikibase using wikidata schemas or shape expressions. + * Receives several data: + * - endpoint [String]: Endpoint of the target wikibase instance. Defaults to Wikidata + * - entity [String]: URI of the entity to be validated + * - entitySchema [String]: (Wikidata schema only) Identifier of the wikidata schema to be used + * - schema [String]: (ShEx schema only) Raw contents of the schema supplied by the user + * - schemaFormat [String]: (ShEx schema only) Format of the schema supplied by the user + * - schemaEngine [String]: Schema engine to be used (defaults to ShEx) + * - shape [String]: Shape of the schema which will be compared against the entity + * Returns a JSON object with the results (pending). + */ + case req @ POST -> Root / `api` / `verb` / "validate" => + logger.debug(s"Wikidata validate request: $req") + req.decode[Multipart[IO]] { m => + val partsMap = PartsMap(m.parts) + val r: IO[Response[IO]] = for { + eitherItem <- partsMap.eitherPartValue("entity") + _ <- { + logger.debug(eitherItem.toString); + IO.pure(()) + } + item <- ioFromEither(eitherItem) + _ <- { + logger.debug(item); + IO.pure(()) + } + info <- ioFromEither(uriToEntity2(item)) + _ <- { + logger.debug(info.toString); + IO.pure(()) + } + pair <- WikibaseSchemaParam.mkSchema(partsMap, None, client) + _ <- { + logger.debug(pair.toString()); + IO.pure(()) + } + (schema, wbp) = pair + iriItem <- ioFromEither(IRI.fromString(info.sourceUri)) + shapeMap <- ioFromEither(ShapeMap.empty.add(iriItem, Start)) + triggerMode = ShapeMapTrigger(shapeMap) + result <- for { + res1 <- WikibaseRDF.wikidata + res2 <- RDFAsJenaModel.empty + vv <- (res1, res2).tupled.use { case (rdf, builder) => + for { + r <- schema.validate(rdf, triggerMode, builder) + json <- schemaResult2json(r) + } yield json + } + } yield vv + resp <- Ok(result) + } yield resp + r.attempt.flatMap( + _.fold( + s => errorResponseJson(s.getMessage, InternalServerError), + IO.pure + ) + ) + } + } + +} + +object WikibaseService { + + /** Service factory + * + * @param client Underlying http4s client + * @return A new Wikidata Service + */ + def apply(client: Client[IO]): WikibaseService = + new WikibaseService(client) +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseServiceUtils.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseServiceUtils.scala new file mode 100644 index 00000000..14f9852b --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseServiceUtils.scala @@ -0,0 +1,146 @@ +package es.weso.rdfshape.server.api.routes.wikibase.service + +import cats.implicits._ +import io.circe.Json +import io.circe.parser.parse + +private[service] class WikibaseServiceUtils {} + +/** Static utilities used by the Wikibase service + */ +object WikibaseServiceUtils { + + /** For a given entity, create the JSON structure accepted by the Shexer API + * + * @param entity Entity to be examined by Shexer + * @return JSON object with the structure accepted by the Shexer API, adapted for the given entity + */ + def mkShexerParams(entity: String): Either[String, Json] = for { + prefixes <- wikidataPrefixes + } yield Json.fromFields( + List( + ("prefixes", prefixes), + ( + "shape_map", + Json.fromString( + "SPARQL'SELECT DISTINCT ?virus WHERE { VALUES ?virus { wd:Q82069695 } }'@ " + ) + ), + ("endpoint", Json.fromString("https://query.wikidata.org/sparql")), + ("all_classes", Json.False), + ("query_depth", Json.fromInt(1)), + ("threshold", Json.fromInt(0)), + ( + "instantiation_prop", + Json.fromString("http://www.wikidata.org/prop/direct/P31") + ), + ("disable_comments", Json.True), + ("shape_qualifiers_mode", Json.True), + ( + "namespaces_for_qualifiers", + Json.arr(Json.fromString("http://www.wikidata.org/prop/")) + ) + ) + ) + + /** @return JSON containing all the prefixed used by Wikidata + */ + def wikidataPrefixes: Either[String, Json] = { + val json = + """{ + "http://wikiba.se/ontology#": "wikibase", + "http://www.bigdata.com/rdf#": "bd", + "http://www.wikidata.org/entity/": "wd", + "http://www.wikidata.org/prop/direct/": "wdt", + "http://www.wikidata.org/prop/direct-normalized/": "wdtn", + "http://www.wikidata.org/entity/statement/": "wds", + "http://www.wikidata.org/prop/": "p", + "http://www.wikidata.org/reference/": "wdref", + "http://www.wikidata.org/value/": "wdv", + "http://www.wikidata.org/prop/statement/": "ps", + "http://www.wikidata.org/prop/statement/value/": "psv", + "http://www.wikidata.org/prop/statement/value-normalized/": "psn", + "http://www.wikidata.org/prop/qualifier/": "pq", + "http://www.wikidata.org/prop/qualifier/value/": "pqv", + "http://www.wikidata.org/prop/qualifier/value-normalized/": "pqn", + "http://www.wikidata.org/prop/reference/": "pr", + "http://www.wikidata.org/prop/reference/value/": "prv", + "http://www.wikidata.org/prop/reference/value-normalized/": "prn", + "http://www.wikidata.org/prop/novalue/": "wdno" + }""" + parse(json).leftMap(e => s"Error parsing prefixes: $e") + } + + /** Convert the response from Wikibase "wbcontentlanguages" to a more convenient JSON structure + * @param json Input JSON, as received from Wikibase + * @return Either a JSON representation of the languages in the Wikibase, or an error message + */ + def convertLanguages(json: Json): Either[String, Json] = for { + languagesObj <- json.hcursor + .downField("query") + .downField("wbcontentlanguages") + .focus + .toRight(s"Error obtaining query/wbcontentlanguages at ${json.spaces2}") + keys <- languagesObj.hcursor.keys.toRight( + s"Error obtaining values from languages: ${languagesObj.spaces2}" + ) + converted = Json.fromValues( + keys.map(key => + Json.fromFields( + List( + ( + "label", + languagesObj.hcursor + .downField(key) + .downField("code") + .focus + .getOrElse(Json.Null) + ), + ( + "name", + languagesObj.hcursor + .downField(key) + .downField("autonym") + .focus + .getOrElse(Json.Null) + ) + ) + ) + ) + ) + } yield { + converted + } + + /** Convert the response from Wikibase "wbsearchentities" to a more convenient JSON structure + * @param json Input JSON, as received from Wikibase + * @return Either a JSON representation of the entities in the Wikibase, or an error message + */ + def convertEntities(json: Json): Either[String, Json] = for { + entities <- json.hcursor + .downField("search") + .values + .toRight("Error obtaining search value") + converted = Json.fromValues( + entities.map((value: Json) => + Json.fromFields( + List( + ( + "label", + value.hcursor.downField("label").focus.getOrElse(Json.Null) + ), + ("id", value.hcursor.downField("id").focus.getOrElse(Json.Null)), + ( + "uri", + value.hcursor.downField("concepturi").focus.getOrElse(Json.Null) + ), + ( + "descr", + value.hcursor.downField("description").focus.getOrElse(Json.Null) + ) + ) + ) + ) + ) + } yield converted +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/OptEitherF.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/OptEitherF.scala index a43825c9..534eb1bf 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/OptEitherF.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/OptEitherF.scala @@ -4,8 +4,9 @@ import cats._ import cats.data._ import cats.effect._ import cats.implicits._ +import es.weso.rdfshape.server.utils.error.exceptions.WikibaseServiceException -/** Static utility methods to help work with Optional and Either types +/** Static utility methods to help work with Optional, Either or IO types */ object OptEitherF { @@ -51,4 +52,13 @@ object OptEitherF { case Some(value) => EitherT.fromEither(function(value).map(Some(_))) } + /** Given an Either, try to obtain an IO wrapping its value + * @param either Input Either + * @tparam A Type of value contained in the either and result + * @return IO wrapping the Either value if right, an IO error if left + */ + def ioFromEither[A](either: Either[String, A]): IO[A] = { + either.fold(err => IO.raiseError(WikibaseServiceException(err)), IO.pure) + } + } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala index cac0b3b7..4ba66e93 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala @@ -15,33 +15,27 @@ object IncomingRequestParameters { lazy val dataFormat = "dataFormat" lazy val targetDataFormat = "targetDataFormat" - lazy val schema = "schema" - lazy val schemaURL = "schemaURL" - lazy val schemaFile = "schemaFile" - lazy val schemaFormat = "schemaFormat" - lazy val schemaFormatTextArea = "schemaFormatTextArea" - lazy val schemaFormatUrl = "schemaFormatUrl" - lazy val schemaFormatFile = "schemaFormatFile" - lazy val schemaEngine = "schemaEngine" - lazy val targetSchemaFormat = "targetSchemaFormat" - lazy val targetSchemaEngine = "targetSchemaEngine" - lazy val inference = "inference" - lazy val triggerMode = "triggerMode" + lazy val schema = "schema" + lazy val schemaURL = "schemaURL" + lazy val schemaFile = "schemaFile" + lazy val schemaFormat = "schemaFormat" + lazy val schemaEngine = "schemaEngine" + lazy val targetSchemaFormat = "targetSchemaFormat" + lazy val targetSchemaEngine = "targetSchemaEngine" + lazy val inference = "inference" + lazy val triggerMode = "triggerMode" lazy val shape = "shape" lazy val entity = "entity" lazy val node = "node" lazy val nodeSelector = "nodeSelector" - lazy val shapeMap = "shapeMap" - lazy val shape_map = "shape-map" - lazy val shapeMapURL = "shapeMapURL" - lazy val shapeMapFile = "shapeMapFile" - lazy val shapeMapFormat = "shapeMapFormat" - lazy val shapeMapFormatTextArea = "shapeMapFormatTextArea" - lazy val shapeMapFormatUrl = "shapeMapFormatUrl" - lazy val shapeMapFormatFile = "shapeMapFormatFile" - lazy val targetShapeMapFormat = "targetShapeMapFormat" + lazy val shapeMap = "shapeMap" + lazy val shape_map = "shape-map" + lazy val shapeMapURL = "shapeMapURL" + lazy val shapeMapFile = "shapeMapFile" + lazy val shapeMapFormat = "shapeMapFormat" + lazy val targetShapeMapFormat = "targetShapeMapFormat" lazy val query = "query" lazy val queryURL = "queryURL" @@ -118,21 +112,6 @@ object IncomingRequestParameters { val name: String = schemaFormat } - object SchemaFormatTextAreaParameter - extends OptionalQueryParamDecoderMatcher[String](schemaFormatTextArea) { - val name: String = schemaFormatTextArea - } - - object SchemaFormatUrlParameter - extends OptionalQueryParamDecoderMatcher[String](schemaFormatUrl) { - val name: String = schemaFormatUrl - } - - object SchemaFormatFileParameter - extends OptionalQueryParamDecoderMatcher[String](schemaFormatFile) { - val name: String = schemaFormatFile - } - object SchemaEngineParameter extends OptionalQueryParamDecoderMatcher[String](schemaEngine) { val name: String = schemaEngine @@ -202,21 +181,6 @@ object IncomingRequestParameters { val name: String = shapeMapFormat } - object ShapeMapFormatTextAreaParameter - extends OptionalQueryParamDecoderMatcher[String](shapeMapFormatTextArea) { - val name: String = shapeMapFormatTextArea - } - - object ShapeMapFormatUrlParameter - extends OptionalQueryParamDecoderMatcher[String](shapeMapFormatUrl) { - val name: String = shapeMapFormatUrl - } - - object ShapeMapFormatFileParameter - extends OptionalQueryParamDecoderMatcher[String](shapeMapFormatFile) { - val name: String = shapeMapFormatFile - } - object TargetShapeMapFormatParameter extends OptionalQueryParamDecoderMatcher[String](targetShapeMapFormat) { val name: String = targetShapeMapFormat diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/PartsMap.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/PartsMap.scala index 874a6b26..f90a5f53 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/PartsMap.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/PartsMap.scala @@ -2,6 +2,8 @@ package es.weso.rdfshape.server.api.utils.parameters import cats.effect.IO import cats.implicits._ +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdfshape.server.api.format.Format import fs2.text.utf8Decode import org.http4s.multipart.Part @@ -26,18 +28,6 @@ case class PartsMap private (map: Map[String, Part[IO]]) { } - /** Extract the value from a request parameter, decoding it and handling errors - * - * @param key Parameter key - * @return Optionally, the String contents of the parameter - */ - def optPartValue(key: String): IO[Option[String]] = - map.get(key) match { - case Some(part) => - part.body.through(utf8Decode).compile.foldMonoid.map(Some.apply) - case None => IO.pure(None) - } - /** Shorthand for extracting values from a request parameter with an informational error message * * @param key Parameter key @@ -52,9 +42,21 @@ case class PartsMap private (map: Map[String, Part[IO]]) { ) case Some(s) => Right(s) } + + /** Extract the value from a request parameter, decoding it and handling errors + * + * @param key Parameter key + * @return Optionally, the String contents of the parameter + */ + def optPartValue(key: String): IO[Option[String]] = + map.get(key) match { + case Some(part) => + part.body.through(utf8Decode).compile.foldMonoid.map(Some.apply) + case None => IO.pure(None) + } } -object PartsMap { +object PartsMap extends LazyLogging{ /** Instantiate a new {@link PartsMap} given a list of the inner parts * @@ -65,4 +67,26 @@ object PartsMap { PartsMap(ps.filter(_.name.isDefined).map(p => (p.name.get, p)).toMap) } + /** Try to build a Format object from a request's parameters + * + * @param parameter Name of the parameter with the format name + * @param parameterMap Request parameters + * @return Optionally, a new generic Format instance with the format + */ + def getFormat( + parameter: String, + parameterMap: PartsMap + ): IO[Option[Format]] = { + for { + maybeFormatName <- parameterMap.optPartValue(parameter) + } yield maybeFormatName match { + case None => + logger.error(s"No valid format found. No parameter \"$parameter\"") + None + case Some(formatNameParsed) => + logger.info(s"Format value \"$formatNameParsed\" found in parameter \"$parameter\"") + Format.fromString(formatNameParsed).toOption + } + } + } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/exceptions/SSLContextCreationException.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/exceptions/SSLContextCreationException.scala index 18198b77..77862d09 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/exceptions/SSLContextCreationException.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/exceptions/SSLContextCreationException.scala @@ -8,7 +8,7 @@ package es.weso.rdfshape.server.utils.error.exceptions final case class SSLContextCreationException( private val message: String, private val cause: Throwable -) extends Exception(message, cause) {} +) extends Exception(message, cause) object SSLContextCreationException { diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/exceptions/WikibaseServiceException.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/exceptions/WikibaseServiceException.scala new file mode 100644 index 00000000..02a50618 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/exceptions/WikibaseServiceException.scala @@ -0,0 +1,12 @@ +package es.weso.rdfshape.server.utils.error.exceptions + +import scala.util.control.NoStackTrace + +/** Custom exception thrown when a failure occurs when operating on Wikibase data + * + * @param message Reason/explanation of why the exception occurred + */ +final case class WikibaseServiceException( + private val message: String +) extends RuntimeException(message) + with NoStackTrace From 70a9c22708bce2f62d1c5c839b52f671e760244a Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Wed, 8 Sep 2021 13:36:10 +0200 Subject: [PATCH 18/32] Documented the accepted parameters. --- .../endpoint/service/EndpointService.scala | 13 +- .../IncomingRequestParameters.scala | 125 ++++++++++++++---- 2 files changed, 109 insertions(+), 29 deletions(-) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala index d9c90e02..bfc9f0a2 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala @@ -127,7 +127,18 @@ class EndpointService(client: Client[IO]) } - // TODO: document + /** Attempt to contact a wikibase endpoint and return the data (triplets) about a node in it. + * Receives a JSON object with the input endpoint, node and limits: + * - endpoint [String]: Target endpoint + * - node [String]: Node identifier in the target wikibase + * - limit [Int]: Max number of results + * Returns a JSON object with the endpoint response: + * - endpoint [String]: Target endpoint + * - node [String]: Node identifier in the target wikibase + * - children [Array]: List of returned objects, each being a triplet: + * - pred: [String]: Predicate identifier in the target wikibase + * - values: [Array]: List of raw values for the entity and predicate + */ case GET -> Root / `api` / `verb` / "outgoing" :? EndpointParameter(optEndpoint) +& NodeParameter(optNode) +& diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala index 4ba66e93..aedcd7ff 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala @@ -8,6 +8,8 @@ import org.http4s.dsl.io.{ /** Definitions for all the possible parameters that may come from client requests */ object IncomingRequestParameters { + + // String constants representing each parameter name expected by the server lazy val data = "data" lazy val compoundData = "compoundData" lazy val dataURL = "dataURL" @@ -63,226 +65,293 @@ object IncomingRequestParameters { lazy val continue = "continue" lazy val withDot = "withDot" + /** Parameter expected to contain raw RDF data (URL encoded) + */ object DataParameter extends OptionalQueryParamDecoderMatcher[String](data) { val name: String = data } + /** Parameter expected to contain a compound of raw RDF data (URL encoded), formed by 2 or more RDF sources + */ object CompoundDataParameter extends OptionalQueryParamDecoderMatcher[String](compoundData) { val name: String = compoundData } + /** Parameter expected to contain a URL where RDF data is located + */ object DataURLParameter extends OptionalQueryParamDecoderMatcher[String](dataURL) { val name: String = dataURL } + /** Parameter expected to contain a file where RDF data is located + */ object DataFileParameter extends OptionalQueryParamDecoderMatcher[String](dataFile) { val name: String = dataFile } + /** Parameter expected to contain an RDF format name, referencing the user's data format + */ object DataFormatParameter extends OptionalQueryParamDecoderMatcher[String](dataFormat) { val name: String = dataFormat } + /** Parameter expected to contain an RDF format name, referencing the target format of a conversion + */ object TargetDataFormatParameter extends OptionalQueryParamDecoderMatcher[String](targetDataFormat) { val name: String = targetDataFormat } + /** Parameter expected to contain raw schema data (URL encoded) + */ object SchemaParameter extends OptionalQueryParamDecoderMatcher[String](schema) { val name: String = schema } + /** Parameter expected to contain a URL where a validation schema is located + */ object SchemaURLParameter extends OptionalQueryParamDecoderMatcher[String](schemaURL) { val name: String = schemaURL } + /** Parameter expected to contain the contents a file where a validation schema is located + */ object SchemaFileParameter extends OptionalQueryParamDecoderMatcher[String](schemaFile) { val name: String = schemaFile } + /** Parameter expected to contain an schema format name, referencing the user's schema format + */ object SchemaFormatParameter extends OptionalQueryParamDecoderMatcher[String](schemaFormat) { val name: String = schemaFormat } + /** Parameter expected to contain an schema engine name, referencing the user's desired schema engine + */ object SchemaEngineParameter extends OptionalQueryParamDecoderMatcher[String](schemaEngine) { val name: String = schemaEngine } + /** Parameter expected to contain an schema format name, referencing the target format of a conversion + */ object TargetSchemaFormatParameter extends OptionalQueryParamDecoderMatcher[String](targetSchemaFormat) { val name: String = targetSchemaFormat } + /** Parameter expected to contain an schema engine name, referencing the target engine of a conversion + */ object TargetSchemaEngineParameter extends OptionalQueryParamDecoderMatcher[String](targetSchemaEngine) { val name: String = targetSchemaEngine } + /** Parameter expected to contain the inference applied in data validations + */ object InferenceParameter extends OptionalQueryParamDecoderMatcher[String](inference) { val name: String = inference } + /** Parameter expected to contain the trigger mode present applied in data validations + */ object TriggerModeParameter extends OptionalQueryParamDecoderMatcher[String](triggerMode) { val name: String = triggerMode } + /** Parameter expected to contain a shape name or identifier on wikibase operations + */ object ShapeParameter extends OptionalQueryParamDecoderMatcher[String](shape) { val name: String = shape } + /** Parameter expected to contain an entity name or identifier on wikibase operations + */ object EntityParameter extends OptionalQueryParamDecoderMatcher[String](entity) { val name: String = entity } + /** Parameter expected to contain a node name or identifier on SPARQL query operations + */ object NodeParameter extends OptionalQueryParamDecoderMatcher[String](node) { val name: String = node } + /** Parameter expected to contain a node name or identifier on schema-extraction operations + */ object NodeSelectorParameter extends OptionalQueryParamDecoderMatcher[String](nodeSelector) { val name: String = nodeSelector } + /** Parameter expected to contain raw shapemap data (URL encoded) + */ object ShapeMapTextParameter extends OptionalQueryParamDecoderMatcher[String](shapeMap) { val name: String = shapeMap } + /** Parameter expected to contain raw shapemap data (URL encoded) + */ object ShapeMapParameterAlt extends OptionalQueryParamDecoderMatcher[String](shape_map) { val name: String = shape_map } + /** Parameter expected to contain a URL where a shapemap is located + */ object ShapeMapUrlParameter extends OptionalQueryParamDecoderMatcher[String](shapeMapURL) { val name: String = shapeMapURL } + /** Parameter expected to contain a file where a shapemap is located + */ object ShapeMapFileParameter extends OptionalQueryParamDecoderMatcher[String](shapeMapFile) { val name: String = shapeMapFile } + /** Parameter expected to contain a shapemap format name, referencing the user's shapemap format + */ object ShapeMapFormatParameter extends OptionalQueryParamDecoderMatcher[String](shapeMapFormat) { val name: String = shapeMapFormat } + /** Parameter expected to contain a shapemap format name, referencing the target format of a conversion + */ object TargetShapeMapFormatParameter extends OptionalQueryParamDecoderMatcher[String](targetShapeMapFormat) { val name: String = targetShapeMapFormat } + /** Parameter expected to contain a raw SPARQL query (URL encoded) + */ object QueryParameter extends OptionalQueryParamDecoderMatcher[String](query) { val name: String = query } + /** Parameter expected to contain a URL where a SPARQL query is located + */ object QueryURLParameter extends OptionalQueryParamDecoderMatcher[String](queryURL) { val name: String = queryURL } + /** Parameter expected to contain a file where a SPARQL query is located + */ object QueryFileParameter extends OptionalQueryParamDecoderMatcher[String](queryFile) { val name: String = queryFile } + /** Parameter expected to contain a raw endpoint location + */ object EndpointParameter extends OptionalQueryParamDecoderMatcher[String](endpoint) { val name: String = endpoint } + /** Parameter expected to contain a valid identifier of the source of the data sent by the client (currently raw data, a URL or a file) + * in data-related operations + */ object ActiveDataTabParameter extends OptionalQueryParamDecoderMatcher[String](activeDataTab) { val name: String = activeDataTab } + /** Parameter expected to contain a valid identifier of the source of the data sent by the client (currently raw data, a URL or a file) + * in schema-related operations + */ object ActiveSchemaTabParameter extends OptionalQueryParamDecoderMatcher[String](activeSchemaTab) { val name: String = activeSchemaTab } + /** Parameter expected to contain a valid identifier of the source of the data sent by the client (currently raw data, a URL or a file) + * in shapemap-related operations + */ object ActiveShapeMapTabParameter extends OptionalQueryParamDecoderMatcher[String](activeShapeMapTab) { val name: String = activeShapeMapTab } + /** Parameter expected to contain a valid identifier of the source of the data sent by the client (currently raw data, a URL or a file) + * in query-related operations + */ object ActiveQueryTabParameter extends OptionalQueryParamDecoderMatcher[String](activeQueryTab) { val name: String = activeQueryTab } + /** Parameter expected to contain a valid identifier/name/label of a wikidata entity + * in wikidata-related operations + */ object WdEntityParameter extends QueryParamDecoderMatcher[String](wdEntity) { val name: String = wdEntity } + /** Parameter expected to contain a valid identifier/name/label of a wikidata schema + * in wikidata-related operations + */ object WdSchemaParameter extends QueryParamDecoderMatcher[String](wdSchema) { val name: String = wdSchema } - object WithDotParameter - extends OptionalQueryParamDecoderMatcher[Boolean](withDot) { - val name: String = withDot - } - - object OptView extends OptionalQueryParamDecoderMatcher[String](view) { - val name: String = view - } - - object ExamplesParameter - extends OptionalQueryParamDecoderMatcher[String](examples) { - val name: String = examples - } - - object OptExamplesParameter - extends OptionalQueryParamDecoderMatcher[String](examples) { - val name: String = examples - } - - object ManifestURLParameter - extends OptionalQueryParamDecoderMatcher[String](manifestURL) { - val name: String = manifestURL - } - + /** Parameter expected to contain a valid language code, normally for + * wikidata-related operations that return data in a user-selected language + * + * @note See {@linkplain https:// en.wikipedia.org / wiki / List_of_ISO_639 - 1 _codes} + */ object LanguageParameter extends QueryParamDecoderMatcher[String](language) { val name: String = language } + /** Parameter expected to contain a valid identifier/name/label of a wikibase entity + * in wikibase-related operations + */ object LabelParameter extends QueryParamDecoderMatcher[String](label) { val name: String = label } + /** Parameter expected to contain a valid URL + * Used for multiple operations (permalinks, fetching information...) + */ object UrlParameter extends QueryParamDecoderMatcher[String](url) { val name: String = url } + /** Parameter expected to contain a permalink identifier for the permalink service to fetch its corresponding link + */ object UrlCodeParameter extends QueryParamDecoderMatcher[String](urlCode) { val name: String = urlCode } - object HostNameParameter extends QueryParamDecoderMatcher[String](hostname) { - val name: String = hostname - } - + /** Parameter expected to contain a positive numeric value to serve as a limit of a query/search operation, normally + * in wikibase-related operations + */ object LimitParameter extends OptionalQueryParamDecoderMatcher[String](limit) { val name: String = limit } + /** Parameter expected to contain a positive numeric value to serve as the offset where to continue a search operation, normally + * in wikibase-related operations + */ object ContinueParameter extends OptionalQueryParamDecoderMatcher[String](continue) { val name: String = continue From 0fcd2cca5f69a89678d1de576d5fc16bf9c5ac0a Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Wed, 8 Sep 2021 17:53:48 +0200 Subject: [PATCH 19/32] Refactor shapemap logic. --- .../routes/data/logic/DataConversion.scala | 2 +- .../api/routes/data/logic/DataExtract.scala | 6 +- .../api/routes/data/logic/DataInfo.scala | 10 +-- .../schema/logic/SchemaConversionResult.scala | 14 +-- .../api/routes/shapemap/logic/ShapeMap.scala | 90 +++++++++++-------- .../shapemap/logic/ShapeMapInfoResult.scala | 65 -------------- .../shapemap/service/ShapeMapService.scala | 27 +++--- .../api/utils/parameters/PartsMap.scala | 2 +- .../exceptions/JsonConversionException.scala | 12 +++ .../SSLContextCreationException.scala | 2 +- .../exceptions/WikibaseServiceException.scala | 2 +- .../server/utils/json/JsonUtils.scala | 12 +-- 12 files changed, 104 insertions(+), 140 deletions(-) delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMapInfoResult.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/exceptions/JsonConversionException.scala diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConversion.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConversion.scala index 7d4b5652..09023035 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConversion.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConversion.scala @@ -47,7 +47,7 @@ final case class DataConversion( ("dataFormat", Json.fromString(dataFormat.name)), ("targetDataFormat", Json.fromString(targetFormat)) ) ++ - maybeField(data, "data", Json.fromString) + maybeField("data", data, Json.fromString) ) } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataExtract.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataExtract.scala index 36811308..2b8882a6 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataExtract.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataExtract.scala @@ -55,15 +55,15 @@ final case class DataExtract private ( ("schemaFormat", Json.fromString(schemaFormat)), ("schemaEngine", Json.fromString(engine)) ) ++ - maybeField(optData, "data", Json.fromString) ++ + maybeField("data", optData, Json.fromString) ++ maybeField( - optDataFormat, "dataFormat", + optDataFormat, (df: DataFormat) => Json.fromString(df.name) ) ++ maybeField( - optResultShapeMap, "resultShapeMap", + optResultShapeMap, (r: ResultShapeMap) => Json.fromString(r.toString) ) ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataInfo.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataInfo.scala index f7d47a9d..3da3ed8a 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataInfo.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataInfo.scala @@ -40,17 +40,17 @@ final case class DataInfo private ( def toJson: Json = { Json.fromFields( List(("message", Json.fromString(message))) ++ - maybeField(data, "data", Json.fromString) ++ + maybeField("data", data, Json.fromString) ++ maybeField( - dataFormat, "dataFormat", + dataFormat, (df: DataFormat) => Json.fromString(df.name) ) ++ - maybeField(numberOfStatements, "numberOfStatements", Json.fromInt) ++ - maybeField(prefixMap, "prefixMap", prefixMap2Json) ++ + maybeField("numberOfStatements", numberOfStatements, Json.fromInt) ++ + maybeField("prefixMap", prefixMap, prefixMap2Json) ++ maybeField( - predicates, "predicates", + predicates, (preds: Set[IRI]) => Json.fromValues(preds.map(iri2Json)) ) ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaConversionResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaConversionResult.scala index a4830839..5cb525fc 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaConversionResult.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaConversionResult.scala @@ -35,15 +35,15 @@ case class SchemaConversionResult( List( ("message", Json.fromString(msg)) ) ++ - maybeField(schema, "schema", Json.fromString) ++ - maybeField(schemaFormat, "schemaFormat", Json.fromString) ++ - maybeField(schemaEngine, "schemaEngine", Json.fromString) ++ - maybeField(targetSchemaFormat, "targetSchemaFormat", Json.fromString) ++ - maybeField(targetSchemaEngine, "targetSchemaEngine", Json.fromString) ++ - maybeField(result, "result", Json.fromString) ++ + maybeField("schema", schema, Json.fromString) ++ + maybeField("schemaFormat", schemaFormat, Json.fromString) ++ + maybeField("schemaEngine", schemaEngine, Json.fromString) ++ + maybeField("targetSchemaFormat", targetSchemaFormat, Json.fromString) ++ + maybeField("targetSchemaEngine", targetSchemaEngine, Json.fromString) ++ + maybeField("result", result, Json.fromString) ++ maybeField( - resultShapeMap, "shapeMap", + resultShapeMap, (sm: ShapeMap) => sm.toString.asJson ) ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala index fcfabe35..c5bea5ee 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala @@ -2,16 +2,21 @@ package es.weso.rdfshape.server.api.routes.shapemap.logic import cats.effect.IO import com.typesafe.scalalogging.LazyLogging +import es.weso.rdfshape.server.api.format.{Compact, ShapeMapFormat} import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ import es.weso.rdfshape.server.api.utils.parameters.PartsMap +import es.weso.rdfshape.server.utils.error.exceptions.JsonConversionException +import es.weso.rdfshape.server.utils.json.JsonUtils.maybeField import es.weso.rdfshape.server.utils.networking.NetworkingUtils.getUrlContents -import es.weso.shapemaps.{Compact, ShapeMapFormat, ShapeMap => ShapeMapW} +import es.weso.shapemaps.{ShapeMap => ShapeMapW} +import io.circe.Json -/** Data class representing a ShapeMap and its current source +/** Data class representing a ShapeMap and its current source. + * @note Invalid initial data is accepted, but may cause exceptions when operating with it (like converting to JSON). * * @param shapeMap Shapemap raw text * @param shapeMapFormat Shapemap format - * @param targetShapeMapFormat Shapemap target format + * @param targetShapeMapFormat Shapemap target format (only present in conversion operations) * @param activeShapeMapTab Active tab, used to know which source the shapemap comes from */ sealed case class ShapeMap private ( @@ -25,13 +30,39 @@ sealed case class ShapeMap private ( * * @return A ShapeMap instance used by WESO libraries in validation */ - val innerShapeMap: Either[String, ShapeMapW] = { + lazy val innerShapeMap: Either[String, ShapeMapW] = { ShapeMapW .fromString(shapeMap, shapeMapFormat.name) match { case Left(errorList) => Left(errorList.toList.mkString("\n")) - case Right(sm) => Right(sm) + case Right(shapeMap) => Right(shapeMap) } } + + /** JSON representation of this shapemap to be used in API responses + * + * @return JSON information of the shapemap (raw content, format, JSON structure) or an + */ + @throws(classOf[JsonConversionException]) + lazy val shapeMapJson: Json = { + innerShapeMap match { + case Left(err) => throw JsonConversionException(err) + case Right(dataShapeMap) => + Json.fromFields( + maybeField("shapeMap", Some(shapeMap), Json.fromString) ++ + maybeField( + "shapeMapFormat", + Some(shapeMapFormat), + (format: ShapeMapFormat) => Json.fromString(format.name) + ) ++ + maybeField( + "shapeMapJson", + Some(dataShapeMap.toJson), + identity[Json] + ) + ) + } + + } } private[api] object ShapeMap extends LazyLogging { @@ -57,11 +88,11 @@ private[api] object ShapeMap extends LazyLogging { shapeMapStr <- partsMap.optPartValue(ShapeMapTextParameter.name) shapeMapUrl <- partsMap.optPartValue(ShapeMapUrlParameter.name) shapeMapFile <- partsMap.optPartValue(ShapeMapFileParameter.name) - optShapeMapFormat <- getShapeMapFormat( + shapeMapFormat <- getShapeMapFormat( ShapeMapFormatParameter.name, partsMap ) - optTargetShapeMapFormat <- getShapeMapFormat( + targetShapeMapFormat <- getShapeMapFormat( TargetShapeMapFormatParameter.name, partsMap ) @@ -73,12 +104,6 @@ private[api] object ShapeMap extends LazyLogging { s"Getting ShapeMap from params. ShapeMap tab: $activeShapeMapTab" ) - // Get the shapemap formats or use the defaults - shapeMapFormat = optShapeMapFormat.getOrElse(defaultShapeMapFormat) - targetShapeMapFormat = optTargetShapeMapFormat.getOrElse( - defaultShapeMapFormat - ) - // Create the shapemap depending on the client's selected method maybeShapeMap: Either[String, ShapeMap] = activeShapeMapTab.getOrElse( ShapeMapTab.defaultActiveShapeMapTab @@ -136,42 +161,35 @@ private[api] object ShapeMap extends LazyLogging { } yield maybeShapeMap } - /** Given a list of query parameters and a parameter name, try to create a ShapeMapFormat instance from the format name contained in the parameter + /** Try to build a {@link es.weso.rdfshape.server.api.format.ShapeMapFormat} object from a request's parameters * - * @param name Query parameter containing the format name - * @param partsMap Query parameters - * @return Optionally, a ShapeMapFormat instance corresponding to the shapemap format specified in the query parameters + * @param parameter Name of the parameter with the format name + * @param parameterMap Request parameters + * @return The ShapeMap format found or the default one */ private def getShapeMapFormat( - name: String, - partsMap: PartsMap - ): IO[Option[ShapeMapFormat]] = + parameter: String, + parameterMap: PartsMap + ): IO[ShapeMapFormat] = { for { - maybeFormat <- partsMap.optPartValue(name) + maybeFormat <- PartsMap.getFormat(parameter, parameterMap) } yield maybeFormat match { - case None => None - case Some(str) => - ShapeMapFormat - .fromString(str) - .fold( - err => { - logger.error(s"Unsupported shapeMapFormat: $str ($err)") - None - }, - format => Some(format) - ) + case None => ShapeMapFormat.defaultFormat + case Some(format) => new ShapeMapFormat(format) } + } + /** Empty shapemap representation, with no inner data and all defaults * * @return */ def empty: ShapeMap = ShapeMap( - emptyShapeMapValue, - defaultShapeMapFormat, - defaultShapeMapFormat, - ShapeMapTab.defaultActiveShapeMapTab + shapeMap = emptyShapeMapValue, + shapeMapFormat = defaultShapeMapFormat, + targetShapeMapFormat = defaultShapeMapFormat, + activeShapeMapTab = ShapeMapTab.defaultActiveShapeMapTab ) } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMapInfoResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMapInfoResult.scala deleted file mode 100644 index 6a25a04f..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMapInfoResult.scala +++ /dev/null @@ -1,65 +0,0 @@ -package es.weso.rdfshape.server.api.routes.shapemap.logic - -import es.weso.rdfshape.server.utils.json.JsonUtils._ -import es.weso.shapemaps._ -import io.circe.Json - -/** Data class representing the output of a ShapeMapInfo operation - * - * @param msg Output informational message after processing. Used in case of error. - * @param shapeMap Input shapemap - * @param shapeMapFormat Input shapemap format - * @param shapeMapJson Output shapemap (JSON representation) - */ -case class ShapeMapInfoResult private ( - msg: String, - shapeMap: Option[String], - shapeMapFormat: Option[ShapeMapFormat], - shapeMapJson: Option[Json] -) { - - /** Convert a result to its JSON representation - * - * @return JSON information of the shapemap result - */ - def toJson: Json = { - Json.fromFields( - List(("message", Json.fromString(msg))) ++ - maybeField(shapeMap, "shapeMap", Json.fromString) ++ - maybeField( - shapeMapFormat, - "shapeMapFormat", - (sf: ShapeMapFormat) => Json.fromString(sf.name) - ) ++ - maybeField(shapeMapJson, "shapeMapJson", identity[Json]) - ) - } - -} - -object ShapeMapInfoResult { - - /** Message attached to the result when created successfully - */ - val successMessage = "Well formed ShapeMap" - - /** @param msg Error message contained in the result - * @return A ShapeMapInfoResult consisting of a single error message and no data - */ - def fromMsg(msg: String): ShapeMapInfoResult = - ShapeMapInfoResult(msg, None, None, None) - - /** @return A ShapeMapInfoResult, given all the parameters needed to build it (shapemap, formats, etc.) - */ - def fromShapeMap( - shapeMapStr: Option[String], - shapeMapFormat: Option[ShapeMapFormat], - shapeMap: ShapeMap - ): ShapeMapInfoResult = - ShapeMapInfoResult( - successMessage, - shapeMapStr, - shapeMapFormat, - Some(shapeMap.toJson) - ) -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala index bc863232..4c6ff8a1 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala @@ -5,11 +5,8 @@ import com.typesafe.scalalogging.LazyLogging import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.format.ShapeMapFormat import es.weso.rdfshape.server.api.routes.ApiService +import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMap import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMap.getShapeMap -import es.weso.rdfshape.server.api.routes.shapemap.logic.{ - ShapeMap, - ShapeMapInfoResult -} import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.utils.json.JsonUtils.errorResponseJson import io.circe._ @@ -19,6 +16,8 @@ import org.http4s.client.Client import org.http4s.dsl.Http4sDsl import org.http4s.multipart._ +import scala.util.{Failure, Success, Try} + /** API service to handle shapemap-related operations * * @param client HTTP4S client object @@ -49,7 +48,6 @@ class ShapeMapService(client: Client[IO]) * - shapeMapFormat [String]: Format of the shapeMap * - activeShapeMapTab [String]: Identifies the source of the shapeMap (raw, URL, file...) * Returns a JSON object with the shapeMap information: - * - message [String]: Informational message on success * - shapeMap [String]: Input shapeMap string * - shapeMapFormat [String]: Input shapeMap format * - shapeMapJson [Array]: Array of the elements in the shapeMap @@ -74,15 +72,16 @@ class ShapeMapService(client: Client[IO]) // Error creating the inner ShapeMap instance from the data case Left(errorStr) => errorResponseJson(errorStr, InternalServerError) - // Success creating the inner ShapeMap instance from the data - case Right(innerSm) => - val shapeMapInfo: ShapeMapInfoResult = - ShapeMapInfoResult.fromShapeMap( - Some(shapeMap.shapeMap), - Some(shapeMap.shapeMapFormat), - innerSm - ) - Ok(shapeMapInfo.toJson) + // Success creating the inner ShapeMap instance from the data. + // Try to get JSON representation + case Right(_) => + Try { + shapeMap.shapeMapJson + } match { + case Failure(exc) => + errorResponseJson(exc.getMessage, InternalServerError) + case Success(json) => Ok(json) + } } } ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/PartsMap.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/PartsMap.scala index f90a5f53..23628fc6 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/PartsMap.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/PartsMap.scala @@ -81,7 +81,7 @@ object PartsMap extends LazyLogging{ maybeFormatName <- parameterMap.optPartValue(parameter) } yield maybeFormatName match { case None => - logger.error(s"No valid format found. No parameter \"$parameter\"") + logger.info(s"No valid format found for parameter \"$parameter\"") None case Some(formatNameParsed) => logger.info(s"Format value \"$formatNameParsed\" found in parameter \"$parameter\"") diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/exceptions/JsonConversionException.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/exceptions/JsonConversionException.scala new file mode 100644 index 00000000..0ca6b77f --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/exceptions/JsonConversionException.scala @@ -0,0 +1,12 @@ +package es.weso.rdfshape.server.utils.error.exceptions + +import scala.util.control.NoStackTrace + +/** Custom exception thrown when a failure occurs while converting JSON data + * + * @param message Reason/explanation of why the exception occurred + */ +final case class JsonConversionException( + private val message: String +) extends RuntimeException(message) + with NoStackTrace diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/exceptions/SSLContextCreationException.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/exceptions/SSLContextCreationException.scala index 77862d09..35641fac 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/exceptions/SSLContextCreationException.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/exceptions/SSLContextCreationException.scala @@ -1,6 +1,6 @@ package es.weso.rdfshape.server.utils.error.exceptions -/** Custom exception thrown when a failure occurs when trying to create an SSL Context from user's environment data +/** Custom exception thrown when a failure occurs while trying to create an SSL Context from user's environment data * * @param message Reason/explanation of why the exception occurred * @param cause Nested exception that caused the SSL Context creation to fail diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/exceptions/WikibaseServiceException.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/exceptions/WikibaseServiceException.scala index 02a50618..37707718 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/exceptions/WikibaseServiceException.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/error/exceptions/WikibaseServiceException.scala @@ -2,7 +2,7 @@ package es.weso.rdfshape.server.utils.error.exceptions import scala.util.control.NoStackTrace -/** Custom exception thrown when a failure occurs when operating on Wikibase data +/** Custom exception thrown when a failure occurs while operating on Wikibase data * * @param message Reason/explanation of why the exception occurred */ diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtils.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtils.scala index 5e8e565e..2db57f0d 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtils.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtils.scala @@ -30,21 +30,21 @@ object JsonUtils extends Http4sDsl[IO] { /** Converts some object to JSON, given a converter function. * - * @param data Data to be converted to JSON - * @param name Name given to the data - * @param cnv Converter function from A to Json + * @param data Data to be converted to JSON + * @param name Name given to the data + * @param converter Converter function from A to Json * @tparam A Type of the data to be converted to JSON * @return A list with containing a single tuple: the name given to the data and the JSON representation of "A" itself. * The list will be empty if no data is provided for conversion. */ def maybeField[A]( - data: Option[A], name: String, - cnv: A => Json + data: Option[A], + converter: A => Json ): List[(String, Json)] = data match { case None => List() - case Some(v) => List((name, cnv(v))) + case Some(v) => List((name, converter(v))) } /** Converts some object to JSON, given a converter function. From ad5f0e42b218e0f4b1f3b5c94ef2644447618208 Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Thu, 9 Sep 2021 19:15:50 +0200 Subject: [PATCH 20/32] Refactor trigger mode logic. --- .../server/api/definitions/ApiDefaults.scala | 2 +- .../rdfshape/server/api/format/Format.scala | 30 +++ .../format/{ => dataFormats}/DataFormat.scala | 3 +- .../format/{ => dataFormats}/HtmlFormat.scala | 5 +- .../format/{ => dataFormats}/RdfFormat.scala | 5 +- .../{ => dataFormats}/SchemaFormat.scala | 5 +- .../{ => dataFormats}/ShapeMapFormat.scala | 5 +- .../server/api/merged/DataElement.scala | 2 +- .../routes/data/logic/DataConversion.scala | 2 +- .../api/routes/data/logic/DataExtract.scala | 2 +- .../api/routes/data/logic/DataInfo.scala | 2 +- .../api/routes/data/logic/DataParam.scala | 15 +- .../api/routes/data/service/DataService.scala | 4 +- .../routes/endpoint/logic/SparqlQuery.scala | 96 +++++---- .../endpoint/service/EndpointService.scala | 2 +- .../schema/logic/SchemaOperations.scala | 28 ++- .../api/routes/schema/logic/SchemaParam.scala | 48 ++--- .../api/routes/schema/logic/TriggerMode.scala | 76 +++++++ .../routes/schema/service/SchemaService.scala | 45 +++-- .../schema/service/TriggerModeParam.scala | 187 ------------------ .../api/routes/shapemap/logic/ShapeMap.scala | 176 +++++++++-------- .../shapemap/service/ShapeMapService.scala | 2 +- 22 files changed, 355 insertions(+), 387 deletions(-) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/format/{ => dataFormats}/DataFormat.scala (93%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/format/{ => dataFormats}/HtmlFormat.scala (84%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/format/{ => dataFormats}/RdfFormat.scala (92%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/format/{ => dataFormats}/SchemaFormat.scala (86%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/format/{ => dataFormats}/ShapeMapFormat.scala (84%) create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/TriggerMode.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/TriggerModeParam.scala diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala index 2edd7dc9..1a3f2be5 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala @@ -1,7 +1,7 @@ package es.weso.rdfshape.server.api.definitions import es.weso.rdf.nodes.IRI -import es.weso.rdfshape.server.api.format.{ +import es.weso.rdfshape.server.api.format.dataFormats.{ DataFormat, SchemaFormat, ShapeMapFormat diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala index 0fa6e4ef..cf8035c7 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala @@ -1,6 +1,9 @@ package es.weso.rdfshape.server.api.format +import cats.effect.IO import com.typesafe.scalalogging.LazyLogging +import es.weso.rdfshape.server.api.format.dataFormats.DataFormat +import es.weso.rdfshape.server.api.utils.parameters.PartsMap import org.http4s.MediaType /** Generic interface for any format any data transmitted to/from the API may have @@ -29,6 +32,9 @@ object Format extends FormatCompanion[Format] { DataFormat.availableFormats // ++ futureFormats } +/** Static utilities to be used with formats + * @tparam F Specific format type that we are handling + */ trait FormatCompanion[F <: Format] extends LazyLogging { /** Default format to be used when none specified @@ -39,6 +45,30 @@ trait FormatCompanion[F <: Format] extends LazyLogging { */ val availableFormats: List[F] + /** Try to build a Format object from a request's parameters + * + * @param parameter Name of the parameter with the format name + * @param parameterMap Request parameters + * @return Optionally, a new Format instance of type F with the format + */ + def fromRequestParams( + parameter: String, + parameterMap: PartsMap + ): IO[Option[F]] = { + for { + maybeFormatName <- parameterMap.optPartValue(parameter) + } yield maybeFormatName match { + case None => + logger.info(s"No valid format found for parameter '$parameter'") + None + case Some(formatNameParsed) => + logger.info( + s"Format value '$formatNameParsed' found in parameter '$parameter'" + ) + fromString(formatNameParsed).toOption + } + } + /** Given a format name, get its corresponding DataFormat object * DataFormat * diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/DataFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/DataFormat.scala similarity index 93% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/format/DataFormat.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/DataFormat.scala index eb4044b9..49faa7f8 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/DataFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/DataFormat.scala @@ -1,5 +1,6 @@ -package es.weso.rdfshape.server.api.format +package es.weso.rdfshape.server.api.format.dataFormats +import es.weso.rdfshape.server.api.format._ import org.http4s.MediaType /** Extension of the Format interface to represent RDF data formats diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/HtmlFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/HtmlFormat.scala similarity index 84% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/format/HtmlFormat.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/HtmlFormat.scala index 703b2f40..71335d42 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/HtmlFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/HtmlFormat.scala @@ -1,10 +1,11 @@ -package es.weso.rdfshape.server.api.format +package es.weso.rdfshape.server.api.format.dataFormats +import es.weso.rdfshape.server.api.format.FormatCompanion import org.http4s.MediaType /** Dummy class to differentiate HTML-based formats from the more generic DataFormat * - * @see {@link es.weso.rdfshape.server.api.format.DataFormat} + * @see {@link DataFormat} */ class HtmlFormat(formatName: String) extends DataFormat(formatName, MediaType.text.html) {} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/RdfFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/RdfFormat.scala similarity index 92% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/format/RdfFormat.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/RdfFormat.scala index 6498c430..0c194c24 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/RdfFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/RdfFormat.scala @@ -1,10 +1,11 @@ -package es.weso.rdfshape.server.api.format +package es.weso.rdfshape.server.api.format.dataFormats +import es.weso.rdfshape.server.api.format.FormatCompanion import org.http4s.MediaType /** Dummy class to differentiate RDF formats from the more generic DataFormat * - * @see {@link es.weso.rdfshape.server.api.format.DataFormat} + * @see {@link DataFormat} */ sealed class RDFFormat(formatName: String, formatMimeType: MediaType) extends DataFormat(formatName, formatMimeType) {} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/SchemaFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/SchemaFormat.scala similarity index 86% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/format/SchemaFormat.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/SchemaFormat.scala index f9a74781..2bc282a6 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/SchemaFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/SchemaFormat.scala @@ -1,9 +1,10 @@ -package es.weso.rdfshape.server.api.format +package es.weso.rdfshape.server.api.format.dataFormats +import es.weso.rdfshape.server.api.format.{Format, FormatCompanion} import org.http4s.MediaType /** Dummy class to differentiate shapemap formats from the more generic DataFormat - * @see {@link es.weso.rdfshape.server.api.format.DataFormat} + * @see {@link DataFormat} */ class SchemaFormat(formatName: String, formatMimeType: MediaType) extends DataFormat(formatName, formatMimeType) { diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/ShapeMapFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/ShapeMapFormat.scala similarity index 84% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/format/ShapeMapFormat.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/ShapeMapFormat.scala index dcc69aa7..77a83713 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/ShapeMapFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/ShapeMapFormat.scala @@ -1,9 +1,10 @@ -package es.weso.rdfshape.server.api.format +package es.weso.rdfshape.server.api.format.dataFormats +import es.weso.rdfshape.server.api.format.{Format, FormatCompanion} import org.http4s.MediaType /** Dummy trait to differentiate shapemap formats from the more generic DataFormat - * @see {@link es.weso.rdfshape.server.api.format.DataFormat} + * @see {@link DataFormat} */ class ShapeMapFormat(formatName: String, formatMimeType: MediaType) extends DataFormat(formatName, formatMimeType) { diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala index 6ad1903f..0f7f5ca0 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala @@ -5,7 +5,7 @@ import cats.implicits._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.jena.RDFAsJenaModel import es.weso.rdfshape.server.api.definitions.ApiDefaults -import es.weso.rdfshape.server.api.format.DataFormat +import es.weso.rdfshape.server.api.format.dataFormats.DataFormat import io.circe._ /** Represent each chunk of RDF data submitted (mainly on RDF-merging operations) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConversion.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConversion.scala index 09023035..b7f649e0 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConversion.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConversion.scala @@ -5,7 +5,7 @@ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.RDFReasoner import es.weso.rdf.jena.RDFAsJenaModel import es.weso.rdf.sgraph.{RDF2SGraph, RDFDotPreferences} -import es.weso.rdfshape.server.api.format.DataFormat +import es.weso.rdfshape.server.api.format.dataFormats.DataFormat import es.weso.rdfshape.server.api.merged.CompoundData import es.weso.rdfshape.server.utils.json.JsonUtils.maybeField import es.weso.utils.IOUtils.{either2io, err} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataExtract.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataExtract.scala index 2b8882a6..dee150b8 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataExtract.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataExtract.scala @@ -9,7 +9,7 @@ import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ defaultSchemaFormat, defaultShapeLabel } -import es.weso.rdfshape.server.api.format._ +import es.weso.rdfshape.server.api.format.dataFormats.{DataFormat, SchemaFormat} import es.weso.rdfshape.server.utils.json.JsonUtils._ import es.weso.schema.Schema import es.weso.schemaInfer.{InferOptions, PossiblePrefixes, SchemaInfer} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataInfo.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataInfo.scala index 3da3ed8a..3df9f8c7 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataInfo.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataInfo.scala @@ -5,7 +5,7 @@ import cats.effect.IO import es.weso.rdf.jena.RDFAsJenaModel import es.weso.rdf.nodes.IRI import es.weso.rdf.{PrefixMap, RDFReasoner} -import es.weso.rdfshape.server.api.format._ +import es.weso.rdfshape.server.api.format.dataFormats.DataFormat import es.weso.rdfshape.server.api.routes.data.logic.DataOperations.prefixMap2Json import es.weso.rdfshape.server.utils.json.JsonUtils._ import es.weso.utils.IOUtils.{either2es, io2es} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataParam.scala index 93f4236e..1661dd2c 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataParam.scala @@ -7,6 +7,7 @@ import es.weso.rdf.jena._ import es.weso.rdf.nodes.IRI import es.weso.rdf.{InferenceEngine, RDFReasoner} import es.weso.rdfshape.server.api.format._ +import es.weso.rdfshape.server.api.format.dataFormats.DataFormat import es.weso.rdfshape.server.api.merged.CompoundData import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ import es.weso.rdfshape.server.api.utils.parameters.PartsMap @@ -209,6 +210,13 @@ sealed case class DataParam( } + private def applyInference( + rdf: Resource[IO, RDFReasoner], + inference: Option[String], + dataFormat: Format + ): Resource[IO, RDFReasoner] = + extendWithInference(rdf, inference) + private def extendWithInference( resourceRdf: Resource[IO, RDFReasoner], optInference: Option[String] @@ -230,13 +238,6 @@ sealed case class DataParam( } } - private def applyInference( - rdf: Resource[IO, RDFReasoner], - inference: Option[String], - dataFormat: Format - ): Resource[IO, RDFReasoner] = - extendWithInference(rdf, inference) - private def mkBaseIri( maybeBase: Option[String] ): Either[String, Option[IRI]] = maybeBase match { diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala index b37922ce..6cbb9f0a 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala @@ -9,7 +9,7 @@ import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ defaultInference } import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api -import es.weso.rdfshape.server.api.format._ +import es.weso.rdfshape.server.api.format.dataFormats.{DataFormat, SchemaFormat} import es.weso.rdfshape.server.api.routes.ApiService import es.weso.rdfshape.server.api.routes.data.logic.DataExtract.dataExtract import es.weso.rdfshape.server.api.routes.data.logic.DataInfo.{ @@ -224,7 +224,7 @@ class DataService(client: Client[IO]) errorResponseJson(s"Error obtaining query data: $err", BadRequest) case Right(query) => // Query was parsed, but may be invalid still - val optQueryStr = query.query + val optQueryStr = query.queryRaw logger.debug(s"Data query with querystring: $optQueryStr") for { result <- io2f( diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/SparqlQuery.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/SparqlQuery.scala index 9c1b8687..3bd144d2 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/SparqlQuery.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/SparqlQuery.scala @@ -17,11 +17,11 @@ import es.weso.rdfshape.server.utils.networking.NetworkingUtils.getUrlContents /** Data class representing a SPARQL query and its current source * - * @param query Query raw text + * @param queryRaw Query raw text * @param activeQueryTab Active tab, used to know which source the query comes from */ sealed case class SparqlQuery private ( - query: String, + queryRaw: String, activeQueryTab: SparqlQueryTab ) @@ -49,42 +49,68 @@ private[api] object SparqlQuery extends LazyLogging { s"Getting SPARQL from params. Query tab: $activeQueryTab" ) - maybeQuery: Either[String, SparqlQuery] = activeQueryTab.getOrElse( - defaultActiveQueryTab - ) match { - case SparqlQueryTab.TEXT => - queryStr match { - case None => Left("No value for the query string") - case Some(queryRaw) => - Right(SparqlQuery(queryRaw, SparqlQueryTab.TEXT)) - } - case SparqlQueryTab.URL => - queryUrl match { - case None => Left(s"No value for the query URL") - case Some(queryUrl) => - getUrlContents(queryUrl) match { - case Right(queryRaw) => - Right(SparqlQuery(queryRaw, SparqlQueryTab.URL)) - case Left(err) => Left(err) - } - - } - case SparqlQueryTab.FILE => - queryFile match { - case None => Left(s"No value for the query file") - case Some(queryRaw) => - Right(SparqlQuery(queryRaw, SparqlQueryTab.FILE)) - } - - case other => - val msg = s"Unknown value for activeQueryTab: $other" - logger.warn(msg) - Left(msg) - - } + maybeQuery: Either[String, SparqlQuery] = mkSparqlQuery( + queryStr, + queryUrl, + queryFile, + activeQueryTab + ) } yield maybeQuery + /** Create a SparqlQuery instance, given its source and data + * + * @param queryStr Optionally, the raw contents of the query + * @param queryUrl Optionally, the URL with the contents of the query + * @param queryFile Optionally, the file with the contents of the query + * @param activeQueryTab Optionally, the indicator of the query source (raw, url or file) + * @return + */ + def mkSparqlQuery( + queryStr: Option[String], + queryUrl: Option[String], + queryFile: Option[String], + activeQueryTab: Option[SparqlQueryTab] + ): Either[String, SparqlQuery] = { + + // Create the query depending on the client's selected method + val maybeQuery: Either[String, SparqlQuery] = activeQueryTab.getOrElse( + defaultActiveQueryTab + ) match { + case SparqlQueryTab.TEXT => + queryStr match { + case None => Left("No value for the query string") + case Some(queryRaw) => + Right(SparqlQuery(queryRaw, SparqlQueryTab.TEXT)) + } + case SparqlQueryTab.URL => + queryUrl match { + case None => Left(s"No value for the query URL") + case Some(queryUrl) => + getUrlContents(queryUrl) match { + case Right(queryRaw) => + Right(SparqlQuery(queryRaw, SparqlQueryTab.URL)) + case Left(err) => Left(err) + } + + } + case SparqlQueryTab.FILE => + queryFile match { + case None => Left(s"No value for the query file") + case Some(queryRaw) => + Right(SparqlQuery(queryRaw, SparqlQueryTab.FILE)) + } + + case other => + val msg = s"Unknown value for activeQueryTab: $other" + logger.warn(msg) + Left(msg) + + } + + maybeQuery + } + } /** Enumeration of the different possible QueryTabs sent by the client. diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala index bfc9f0a2..e1815a81 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala @@ -74,7 +74,7 @@ class EndpointService(client: Client[IO]) getSparqlQuery(partsMap) ) query <- EitherT.fromEither[IO](either) - queryString = query.query + queryString = query.queryRaw json <- { logger.debug( s"Query to endpoint $endpoint: $queryString" diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala index efcb28d0..d6bbc136 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala @@ -8,9 +8,9 @@ import es.weso.rdf.nodes.IRI import es.weso.rdf.{InferenceEngine, RDFBuilder, RDFReasoner} import es.weso.rdfshape.server.api.definitions.ApiDefaults import es.weso.rdfshape.server.api.definitions.UmlDefinitions.umlOptions -import es.weso.rdfshape.server.api.format.{DataFormat, SchemaFormat} +import es.weso.rdfshape.server.api.format.dataFormats.{DataFormat, SchemaFormat} import es.weso.rdfshape.server.api.routes.data.logic.DataParam -import es.weso.rdfshape.server.api.routes.schema.service.TriggerModeParam +import es.weso.rdfshape.server.api.routes.schema.service.TriggerMode import es.weso.schema.{Result, Schema, ShaclexSchema, ValidationTrigger} import es.weso.shacl.converter.Shacl2ShEx import es.weso.shapemaps.ShapeMap @@ -142,7 +142,7 @@ private[api] object SchemaOperations extends LazyLogging { optSchema: Option[String], optSchemaFormat: Option[SchemaFormat], optSchemaEngine: Option[String], - tp: TriggerModeParam, + tp: TriggerMode, optInference: Option[String], relativeBase: Option[IRI], builder: RDFBuilder @@ -182,7 +182,7 @@ private[api] object SchemaOperations extends LazyLogging { * * @param rdf Input RDF data * @param schema Input schema - * @param tp Trigger mode + * @param triggerMode Trigger mode * @param relativeBase Relative base (optional) * @param builder RDF builder * @return @@ -190,31 +190,29 @@ private[api] object SchemaOperations extends LazyLogging { def schemaValidate( rdf: RDFReasoner, schema: Schema, - tp: TriggerModeParam, + triggerMode: TriggerMode, relativeBase: Option[IRI], builder: RDFBuilder ): IO[(Result, Option[ValidationTrigger], Long)] = { - logger.debug(s"APIHelper: validate") - val base = relativeBase.map(_.str) // Some(FileUtils.currentFolderURL) - val triggerMode = tp.triggerMode + val base = relativeBase.map(_.str) // Some(FileUtils.currentFolderURL) + val triggerModeStr = triggerMode.triggerModeStr for { - pm <- rdf.getPrefixMap - p <- tp.getShapeMap(pm, schema.pm) - (optShapeMapStr, _) = p + prefixMap <- rdf.getPrefixMap + shapeMapRaw = triggerMode.shapeMap.shapeMapRaw pair <- ValidationTrigger.findTrigger( - triggerMode.getOrElse(ApiDefaults.defaultTriggerMode), - optShapeMapStr.getOrElse(""), + triggerModeStr, + shapeMapRaw, base, None, None, - pm, + prefixMap, schema.pm ) match { case Left(msg) => schemaErr( - s"Cannot obtain trigger: $triggerMode\nshapeMap: $optShapeMapStr\nmsg: $msg" + s"Cannot obtain trigger: $triggerModeStr\nshapeMap: $shapeMapRaw\nmsg: $msg" ) case Right(trigger) => val run = for { diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaParam.scala index 7b33e762..f64649d5 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaParam.scala @@ -5,7 +5,7 @@ import cats.implicits._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.RDFReasoner import es.weso.rdfshape.server.api.definitions.ApiDefaults.defaultSchemaEngine -import es.weso.rdfshape.server.api.format._ +import es.weso.rdfshape.server.api.format.dataFormats.SchemaFormat import es.weso.rdfshape.server.api.routes.schema.logic.SchemaOperations.getBase import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ import es.weso.rdfshape.server.api.utils.parameters.PartsMap @@ -209,10 +209,13 @@ object SchemaParam extends LazyLogging { } private[api] def mkSchemaParam(partsMap: PartsMap): IO[SchemaParam] = for { - schema <- partsMap.optPartValue(SchemaParameter.name) - schemaURL <- partsMap.optPartValue(SchemaURLParameter.name) - schemaFile <- partsMap.optPartValue(SchemaFileParameter.name) - schemaFormat <- getSchemaFormat(SchemaFormatParameter.name, partsMap) + schema <- partsMap.optPartValue(SchemaParameter.name) + schemaURL <- partsMap.optPartValue(SchemaURLParameter.name) + schemaFile <- partsMap.optPartValue(SchemaFileParameter.name) + optSchemaFormat <- SchemaFormat.fromRequestParams( + SchemaFormatParameter.name, + partsMap + ) schemaEngine <- partsMap.optPartValue(SchemaEngineParameter.name) targetSchemaEngine <- partsMap.optPartValue( TargetSchemaEngineParameter.name @@ -223,36 +226,17 @@ object SchemaParam extends LazyLogging { activeSchemaTab <- partsMap.optPartValue(ActiveSchemaTabParameter.name) } yield { SchemaParam( - schema, - schemaURL, - schemaFile, - schemaFormat, - schemaEngine, - targetSchemaEngine, - targetSchemaFormat, - activeSchemaTab + schema = schema, + schemaURL = schemaURL, + schemaFile = schemaFile, + schemaFormat = optSchemaFormat.getOrElse(SchemaFormat.defaultFormat), + schemaEngine = schemaEngine, + targetSchemaEngine = targetSchemaEngine, + targetSchemaFormat = targetSchemaFormat, + activeSchemaTab = activeSchemaTab ) } - /** Try to build a {@link es.weso.rdfshape.server.api.format.SchemaFormat} object from a request's parameters - * - * @param parameter Name of the parameter with the format name - * @param parameterMap Request parameters - * @return The SchemaFormat found or the default one - */ - private def getSchemaFormat( - parameter: String, - parameterMap: PartsMap - ): IO[SchemaFormat] = { - for { - maybeFormat <- PartsMap.getFormat(parameter, parameterMap) - } yield maybeFormat match { - case None => SchemaFormat.defaultFormat - case Some(format) => new SchemaFormat(format) - } - - } - private[api] def empty: SchemaParam = SchemaParam( schema = None, diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/TriggerMode.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/TriggerMode.scala new file mode 100644 index 00000000..f152a0a7 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/TriggerMode.scala @@ -0,0 +1,76 @@ +package es.weso.rdfshape.server.api.routes.schema.service + +import cats.effect.IO +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdfshape.server.api.definitions.ApiDefaults +import es.weso.rdfshape.server.api.format.dataFormats.ShapeMapFormat +import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMap +import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ +import es.weso.rdfshape.server.api.utils.parameters.PartsMap +import es.weso.shapemaps.{ShapeMap => ShapeMapW} + +/** Data class representing a TriggerMode and its current source. + * + * @param triggerModeStr Trigger mode name + * @param shapeMap Inner shapemap associated to the TriggerMode + */ +sealed case class TriggerMode private ( + triggerModeStr: String, + shapeMap: ShapeMap +) extends LazyLogging { + + /** Inner shapemap structure of the shapemap contained in this instance + * + * @return A ShapeMap instance used by WESO libraries in validation + */ + lazy val innerShapeMap: Either[String, ShapeMapW] = shapeMap.innerShapeMap +} + +private[api] object TriggerMode extends LazyLogging { + + /** Given a request's parameters, try to extract a TriggerMode instance from them + * + * @param partsMap Request's parameters + * @return Either the trigger mode or an error message + */ + def getTriggerModeParam( + partsMap: PartsMap + ): IO[Either[String, TriggerMode]] = { + for { + // Get data sent in que query + triggerMode <- partsMap.optPartValue(TriggerModeParameter.name) + shapeMapStr <- partsMap.optPartValue(ShapeMapTextParameter.name) + shapeMapUrl <- partsMap.optPartValue(ShapeMapUrlParameter.name) + shapeMapFile <- partsMap.optPartValue(ShapeMapFileParameter.name) + + shapeMapFormat <- ShapeMapFormat.fromRequestParams( + ShapeMapFormatParameter.name, + partsMap + ) + activeShapeMapTab <- partsMap.optPartValue( + ActiveShapeMapTabParameter.name + ) + + // Get companion shapemap + maybeShapeMap = ShapeMap.mkShapeMap( + shapeMapStr, + shapeMapUrl, + shapeMapFile, + shapeMapFormat, + None, + activeShapeMapTab + ) + + } yield { + maybeShapeMap.map(shapeMap => + TriggerMode( + triggerModeStr = + triggerMode.getOrElse(ApiDefaults.defaultTriggerMode), + shapeMap = shapeMap + ) + ) + } + + } + +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala index ee31bab5..24cbf966 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala @@ -7,7 +7,7 @@ import es.weso.rdf.jena.RDFAsJenaModel import es.weso.rdfshape.server.api.definitions.ApiDefaults import es.weso.rdfshape.server.api.definitions.ApiDefaults.defaultSchemaEngine import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api -import es.weso.rdfshape.server.api.format._ +import es.weso.rdfshape.server.api.format.dataFormats.SchemaFormat import es.weso.rdfshape.server.api.routes.ApiService import es.weso.rdfshape.server.api.routes.data.logic.DataParam import es.weso.rdfshape.server.api.routes.schema.logic.SchemaOperations._ @@ -17,7 +17,7 @@ import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.utils.json.JsonUtils.errorResponseJson import es.weso.schema._ -import es.weso.utils.IOUtils._ +import es.weso.utils.IOUtils.io2f import io.circe.Json import org.http4s._ import org.http4s.circe._ @@ -283,7 +283,7 @@ class SchemaService(client: Client[IO]) req.decode[Multipart[IO]] { m => { val partsMap = PartsMap(m.parts) - val r: IO[Json] = for { + val r = for { dataPair <- DataParam.mkData(partsMap, relativeBase) (resourceRdf, dp) = dataPair res <- for { @@ -291,25 +291,42 @@ class SchemaService(client: Client[IO]) vv <- (resourceRdf, emptyRes).tupled.use { case (rdf, builder) => for { schemaPair <- SchemaParam.mkSchema(partsMap, Some(rdf)) - (schema, sp) = schemaPair - tp <- TriggerModeParam.mkTriggerModeParam(partsMap) - newRdf <- applyInference(rdf, dp.inference) - r <- io2f( - schemaValidate(newRdf, schema, tp, relativeBase, builder) - ) - json <- io2f(schemaResult2json(r._1)) - } yield json + (schema, _) = schemaPair + maybeTriggerMode <- TriggerMode.getTriggerModeParam(partsMap) + newRdf <- applyInference(rdf, dp.inference) + ret <- maybeTriggerMode match { + case Left(err) => + IO.raiseError( + new RuntimeException( + s"Could not obtain validation trigger: $err" + ) + ) + case Right(triggerMode) => + for { + r <- io2f( + schemaValidate( + newRdf, + schema, + triggerMode, + relativeBase, + builder + ) + ) + json <- io2f(schemaResult2json(r._1)) + } yield json + } + } yield ret } } yield vv } yield res for { e <- r.attempt - v <- e.fold( - t => errorResponseJson(t.getMessage, BadRequest), + res <- e.fold( + exc => errorResponseJson(exc.getMessage, BadRequest), json => Ok(json) ) - } yield v + } yield res } } } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/TriggerModeParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/TriggerModeParam.scala deleted file mode 100644 index 7d065296..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/TriggerModeParam.scala +++ /dev/null @@ -1,187 +0,0 @@ -package es.weso.rdfshape.server.api.routes.schema.service - -import cats.effect.IO -import cats.implicits._ -import com.typesafe.scalalogging.LazyLogging -import es.weso.rdf.PrefixMap -import es.weso.rdfshape.server.api.definitions.ApiDefaults.defaultActiveShapeMapTab -import es.weso.rdfshape.server.api.format.ShapeMapFormat -import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ -import es.weso.rdfshape.server.api.utils.parameters.PartsMap -import es.weso.shapemaps.ShapeMap - -case class TriggerModeParam( - triggerMode: Option[String], - shapeMap: Option[String], - shapeMapURL: Option[String], - shapeMapFile: Option[String], - shapeMapFormat: ShapeMapFormat, - activeShapeMapTab: Option[String] -) extends LazyLogging { - - def getShapeMap( - nodesPrefixMap: PrefixMap, - shapesPrefixMap: PrefixMap - ): IO[(Option[String], Either[String, ShapeMap])] = { - val inputType = parseShapeMapTab( - activeShapeMapTab.getOrElse(defaultActiveShapeMapTab) - ) - logger.debug(s"input type: $inputType") - inputType match { - case Right(`shapeMapUrlType`) => - logger.debug(s"ShapeMap input type: shapeMapUrlType") - - shapeMapURL match { - case None => IO.pure((None, Left(s"No value for shapeMapURL"))) - case Some(shapeMapUrl) => - logger.trace(s"ShapeMapUrl: $shapeMapUrl") - - ShapeMap - .fromURI( - shapeMapUrl, - shapeMapFormat.name, - None, - nodesPrefixMap, - shapesPrefixMap - ) - .map { - case Left(str) => - ( - None, - Left( - s"Error obtaining $shapeMapUrl with $shapeMapFormat: $str" - ) - ) - case Right(shapeMap) => - (Some(shapeMap.toString), Right(shapeMap)) - } - } - case Right(`shapeMapFileType`) => - logger.debug(s"ShapeMap input type: shapeMapFileType") - - shapeMapFile match { - case None => IO.pure((None, Left(s"No value for shapeMapFile"))) - case Some(shapeMapStr) => - logger.trace(s"ShapeMapFile: $shapeMapStr") - - ShapeMap.fromString(shapeMapStr, shapeMapFormat.name, None) match { - case Left(ls) => - IO.pure((Some(shapeMapStr), Left(ls.toList.mkString("\n")))) - case Right(parsedShapeMap) => - IO.pure((Some(shapeMapStr), Right(parsedShapeMap))) - } - } - case Right(`shapeMapTextAreaType`) => - logger.debug(s"ShapeMap input type: shapeMapTextAreType") - - shapeMap match { - case None => IO.pure((None, Right(ShapeMap.empty))) - case Some(shapeMapStr) => - logger.trace(s"ShapeMapText: $shapeMapStr") - - ShapeMap.fromString(shapeMapStr, shapeMapFormat.name, None) match { - case Left(ls) => - IO.pure((Some(shapeMapStr), Left(ls.toList.mkString("\n")))) - case Right(parsedShapeMap) => - IO.pure((Some(shapeMapStr), Right(parsedShapeMap))) - } - } - case Right(other) => - val msg = s"Unknown value for activeShapeMapTab: $other" - logger.warn(msg) - IO.pure((None, Left(msg))) - case Left(msg) => IO.pure((None, Left(msg))) - } - } - - def parseShapeMapTab(tab: String): Either[String, ShapeMapInputType] = { - val inputTypes = - List(shapeMapUrlType, shapeMapFileType, shapeMapTextAreaType) - inputTypes.find(_.id == tab) match { - case Some(x) => Right(x) - case None => - Left( - s"Wrong value of tab: $tab, must be one of [${inputTypes.map(_.id).mkString(",")}]" - ) - } - } - - sealed abstract class ShapeMapInputType { - val id: String - } - - case object shapeMapUrlType extends ShapeMapInputType { - override val id = "#shapeMapUrl" - } - - case object shapeMapFileType extends ShapeMapInputType { - override val id = "#shapeMapFile" - } - - case object shapeMapTextAreaType extends ShapeMapInputType { - override val id = "#shapeMapTextArea" - } - -} - -object TriggerModeParam extends LazyLogging { - - def mkTriggerModeParam(partsMap: PartsMap): IO[TriggerModeParam] = { - val tp: IO[TriggerModeParam] = for { - optTriggerMode <- partsMap.optPartValue(TriggerModeParameter.name) - optShapeMap <- partsMap.optPartValue(ShapeMapTextParameter.name) - optShapeMapURL <- partsMap.optPartValue(ShapeMapUrlParameter.name) - optShapeMapFile <- partsMap.optPartValue(ShapeMapFileParameter.name) - - shapeMapFormat <- getShapeMapFormat( - ShapeMapFormatParameter.name, - partsMap - ) - optActiveShapeMapTab <- partsMap.optPartValue( - ActiveShapeMapTabParameter.name - ) - } yield { - logger.debug(s"optTriggerMode: $optTriggerMode") - logger.debug(s"optShapeMap: $optShapeMap") - logger.debug(s"optActiveShapeMapTab: $optActiveShapeMapTab") - logger.debug(s"optShapeMapFormat: $shapeMapFormat") - TriggerModeParam( - triggerMode = optTriggerMode, - shapeMap = optShapeMap, - shapeMapURL = optShapeMapURL, - shapeMapFile = optShapeMapFile, - shapeMapFormat = shapeMapFormat, - activeShapeMapTab = optActiveShapeMapTab - ) - } - val r: IO[Either[String, TriggerModeParam]] = tp.map(_.asRight[String]) - r.flatMap( - _.fold( - str => - IO.raiseError( - new RuntimeException(s"Error obtaining validation trigger: $str") - ), - IO.pure - ) - ) - } - - /** Try to build a {@link es.weso.rdfshape.server.api.format.ShapeMapFormat} object from a request's parameters - * - * @param parameter Name of the parameter with the format name - * @param parameterMap Request parameters - * @return The ShapeMapFormat found or the default one - */ - private def getShapeMapFormat( - parameter: String, - parameterMap: PartsMap - ): IO[ShapeMapFormat] = { - for { - maybeFormat <- PartsMap.getFormat(parameter, parameterMap) - } yield maybeFormat match { - case None => ShapeMapFormat.defaultFormat - case Some(format) => new ShapeMapFormat(format) - } - - } -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala index c5bea5ee..3bb9168e 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala @@ -2,7 +2,8 @@ package es.weso.rdfshape.server.api.routes.shapemap.logic import cats.effect.IO import com.typesafe.scalalogging.LazyLogging -import es.weso.rdfshape.server.api.format.{Compact, ShapeMapFormat} +import es.weso.rdfshape.server.api.format.dataFormats.{Compact, ShapeMapFormat} +import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMapTab.ShapeMapTab import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.utils.error.exceptions.JsonConversionException @@ -12,27 +13,27 @@ import es.weso.shapemaps.{ShapeMap => ShapeMapW} import io.circe.Json /** Data class representing a ShapeMap and its current source. - * @note Invalid initial data is accepted, but may cause exceptions when operating with it (like converting to JSON). * - * @param shapeMap Shapemap raw text + * @note Invalid initial data is accepted, but may cause exceptions when operating with it (like converting to JSON). + * @param shapeMapRaw Shapemap raw text * @param shapeMapFormat Shapemap format * @param targetShapeMapFormat Shapemap target format (only present in conversion operations) * @param activeShapeMapTab Active tab, used to know which source the shapemap comes from */ sealed case class ShapeMap private ( - shapeMap: String, + shapeMapRaw: String, shapeMapFormat: ShapeMapFormat, targetShapeMapFormat: ShapeMapFormat, activeShapeMapTab: String ) { - /** Construct the inner shapemap structure from the data in this class + /** Inner shapemap structure of the data in this instance * * @return A ShapeMap instance used by WESO libraries in validation */ lazy val innerShapeMap: Either[String, ShapeMapW] = { ShapeMapW - .fromString(shapeMap, shapeMapFormat.name) match { + .fromString(shapeMapRaw, shapeMapFormat.name) match { case Left(errorList) => Left(errorList.toList.mkString("\n")) case Right(shapeMap) => Right(shapeMap) } @@ -48,7 +49,7 @@ sealed case class ShapeMap private ( case Left(err) => throw JsonConversionException(err) case Right(dataShapeMap) => Json.fromFields( - maybeField("shapeMap", Some(shapeMap), Json.fromString) ++ + maybeField("shapeMap", Some(shapeMapRaw), Json.fromString) ++ maybeField( "shapeMapFormat", Some(shapeMapFormat), @@ -69,7 +70,7 @@ private[api] object ShapeMap extends LazyLogging { /** Placeholder value used for the shapemap whenever an empty shapemap is issued/needed. */ - private val emptyShapeMapValue = "" + val emptyShapeMapValue = "" /** Default shapemap format used when no alternatives are present */ @@ -88,11 +89,11 @@ private[api] object ShapeMap extends LazyLogging { shapeMapStr <- partsMap.optPartValue(ShapeMapTextParameter.name) shapeMapUrl <- partsMap.optPartValue(ShapeMapUrlParameter.name) shapeMapFile <- partsMap.optPartValue(ShapeMapFileParameter.name) - shapeMapFormat <- getShapeMapFormat( + shapeMapFormat <- ShapeMapFormat.fromRequestParams( ShapeMapFormatParameter.name, partsMap ) - targetShapeMapFormat <- getShapeMapFormat( + targetShapeMapFormat <- ShapeMapFormat.fromRequestParams( TargetShapeMapFormatParameter.name, partsMap ) @@ -105,88 +106,105 @@ private[api] object ShapeMap extends LazyLogging { ) // Create the shapemap depending on the client's selected method - maybeShapeMap: Either[String, ShapeMap] = activeShapeMapTab.getOrElse( - ShapeMapTab.defaultActiveShapeMapTab - ) match { - case ShapeMapTab.TEXT => - shapeMapStr match { - case None => Left("No value for the ShapeMap string") - case Some(shapeMapRaw) => - Right( - ShapeMap( - shapeMapRaw, - shapeMapFormat, - targetShapeMapFormat, - ShapeMapTab.TEXT - ) - ) - } - - case ShapeMapTab.URL => - shapeMapUrl match { - case None => Left(s"No value for the shapemap URL") - case Some(url) => - getUrlContents(url) match { - case Right(shapeMapRaw) => - Right( - ShapeMap( - shapeMapRaw, - shapeMapFormat, - targetShapeMapFormat, - ShapeMapTab.URL - ) - ) - case Left(err) => Left(err) - } - } - case ShapeMapTab.FILE => - shapeMapFile match { - case None => Left(s"No value for the shapemap file") - case Some(shapeMapRaw) => - Right( - ShapeMap( - shapeMapRaw, - shapeMapFormat, - targetShapeMapFormat, - ShapeMapTab.FILE - ) - ) - } - case other => - val msg = s"Unknown value for activeShapemapTab: $other" - logger.warn(msg) - Left(msg) - } + maybeShapeMap: Either[String, ShapeMap] = mkShapeMap( + shapeMapStr, + shapeMapUrl, + shapeMapFile, + shapeMapFormat, + targetShapeMapFormat, + activeShapeMapTab + ) } yield maybeShapeMap } - /** Try to build a {@link es.weso.rdfshape.server.api.format.ShapeMapFormat} object from a request's parameters - * - * @param parameter Name of the parameter with the format name - * @param parameterMap Request parameters - * @return The ShapeMap format found or the default one + /** Create a ShapeMap instance, given its source and format + * @param shapeMapStr Optionally, the raw contents of the shapemap + * @param shapeMapUrl Optionally, the URL with the contents of the shapemap + * @param shapeMapFile Optionally, the file with the contents of the shapemap + * @param shapeMapFormat Optionally, the format of the shapemap + * @param targetShapeMapFormat Optionally, the target format of the shapemap (for conversions) + * @param activeShapeMapTab Optionally, the indicator of the shapemap source (raw, url or file) + * @return */ - private def getShapeMapFormat( - parameter: String, - parameterMap: PartsMap - ): IO[ShapeMapFormat] = { - for { - maybeFormat <- PartsMap.getFormat(parameter, parameterMap) - } yield maybeFormat match { - case None => ShapeMapFormat.defaultFormat - case Some(format) => new ShapeMapFormat(format) + def mkShapeMap( + shapeMapStr: Option[String], + shapeMapUrl: Option[String], + shapeMapFile: Option[String], + shapeMapFormat: Option[ShapeMapFormat], + targetShapeMapFormat: Option[ShapeMapFormat], + activeShapeMapTab: Option[ShapeMapTab] + ): Either[String, ShapeMap] = { + // Confirm chosen formats + val format = + shapeMapFormat.getOrElse(ShapeMapFormat.defaultFormat) + val targetFormat = + targetShapeMapFormat.getOrElse(ShapeMapFormat.defaultFormat) + + // Create the shapemap depending on the client's selected method + val maybeShapeMap: Either[String, ShapeMap] = activeShapeMapTab.getOrElse( + ShapeMapTab.defaultActiveShapeMapTab + ) match { + case ShapeMapTab.TEXT => + shapeMapStr match { + case None => Left("No value for the ShapeMap string") + case Some(shapeMapRaw) => + Right( + ShapeMap( + shapeMapRaw, + format, + targetFormat, + ShapeMapTab.TEXT + ) + ) + } + + case ShapeMapTab.URL => + shapeMapUrl match { + case None => Left(s"No value for the shapemap URL") + case Some(url) => + getUrlContents(url) match { + case Right(shapeMapRaw) => + Right( + ShapeMap( + shapeMapRaw, + format, + targetFormat, + ShapeMapTab.URL + ) + ) + case Left(err) => Left(err) + } + } + case ShapeMapTab.FILE => + shapeMapFile match { + case None => Left(s"No value for the shapemap file") + case Some(shapeMapRaw) => + Right( + ShapeMap( + shapeMapRaw, + format, + targetFormat, + ShapeMapTab.FILE + ) + ) + } + case other => + val msg = s"Unknown value for activeShapemapTab: $other" + logger.warn(msg) + Left(msg) } + maybeShapeMap } /** Empty shapemap representation, with no inner data and all defaults * * @return */ - def empty: ShapeMap = + private def empty: ShapeMap = ShapeMap( - shapeMap = emptyShapeMapValue, + shapeMapRaw = emptyShapeMapValue, shapeMapFormat = defaultShapeMapFormat, targetShapeMapFormat = defaultShapeMapFormat, activeShapeMapTab = ShapeMapTab.defaultActiveShapeMapTab @@ -199,7 +217,7 @@ private[api] object ShapeMap extends LazyLogging { * to be fetched or as a text file containing the shapemap. * In case the client submits the shapemap in several formats, the selected tab will indicate the preferred one. */ -private[this] object ShapeMapTab extends Enumeration { +private[api] object ShapeMapTab extends Enumeration { type ShapeMapTab = String val TEXT = "#shapeMapTextArea" diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala index 4c6ff8a1..507d9d6a 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala @@ -3,7 +3,7 @@ package es.weso.rdfshape.server.api.routes.shapemap.service import cats.effect._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api -import es.weso.rdfshape.server.api.format.ShapeMapFormat +import es.weso.rdfshape.server.api.format.dataFormats.ShapeMapFormat import es.weso.rdfshape.server.api.routes.ApiService import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMap import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMap.getShapeMap From eb8df736271928f56146def24a7828286bdc65c4 Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Wed, 15 Sep 2021 17:04:26 +0200 Subject: [PATCH 21/32] Before data refactor. --- .../server/api/merged/DataElement.scala | 4 +- .../logic/{DataParam.scala => Data.scala} | 202 +++++++++++------- .../api/routes/data/service/DataService.scala | 10 +- .../routes/permalink/logic/Permalink.scala | 7 +- .../logic/{SchemaParam.scala => Schema.scala} | 28 +-- .../schema/logic/SchemaOperations.scala | 7 +- .../api/routes/schema/logic/TriggerMode.scala | 2 +- .../routes/schema/service/SchemaService.scala | 16 +- .../api/routes/shapemap/logic/ShapeMap.scala | 19 +- .../wikibase/logic/WikibaseSchemaParam.scala | 18 +- 10 files changed, 177 insertions(+), 136 deletions(-) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/{DataParam.scala => Data.scala} (68%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/{SchemaParam.scala => Schema.scala} (93%) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala index 0f7f5ca0..b9efbaf9 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala @@ -20,8 +20,8 @@ import io.circe._ case class DataElement( data: Option[String], dataUrl: Option[String], - endpoint: Option[String], dataFile: Option[String], + endpoint: Option[String], dataFormat: DataFormat, activeDataTab: ActiveDataTab ) extends LazyLogging { @@ -69,8 +69,8 @@ object DataElement extends LazyLogging { val empty: DataElement = DataElement( data = None, dataUrl = None, - endpoint = None, dataFile = None, + endpoint = None, ApiDefaults.defaultDataFormat, ActiveDataTab.default ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/Data.scala similarity index 68% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataParam.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/Data.scala index 1661dd2c..982284b7 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/Data.scala @@ -16,12 +16,25 @@ import es.weso.rdfshape.server.utils.networking.NetworkingUtils.getUrlContents import es.weso.utils.IOUtils.err import java.net.URI - -sealed case class DataParam( +import scala.util.matching.Regex + +/** Data class representing RDF data and its current source + * + * @param data + * @param dataURL + * @param dataFile + * @param optEndpoint + * @param optDataFormat Data format + * @param inference Data inference + * @param targetDataFormat Data target format (only for conversion operations) + * @param activeDataTab Active tab, used to know which source the data comes from + * @param compoundData + */ +sealed case class Data( data: Option[String], dataURL: Option[String], dataFile: Option[String], - maybeEndpoint: Option[String], + optEndpoint: Option[String], optDataFormat: Option[DataFormat], inference: Option[String], targetDataFormat: Option[DataFormat], @@ -44,13 +57,13 @@ sealed case class DataParam( val base = relativeBase.map(_.str) logger.debug(s"ActiveDataTab: $activeDataTab") val inputType = activeDataTab match { - case Some(a) => parseDataTab(a) - case None if compoundData.isDefined => Right(compoundDataType) - case None if data.isDefined => Right(dataTextAreaType) - case None if dataURL.isDefined => Right(dataUrlType) - case None if dataFile.isDefined => Right(dataFileType) - case None if maybeEndpoint.isDefined => Right(dataEndpointType) - case None => Right(dataTextAreaType) + case Some(a) => parseDataTab(a) + case None if compoundData.isDefined => Right(compoundDataType) + case None if data.isDefined => Right(dataTextAreaType) + case None if dataURL.isDefined => Right(dataUrlType) + case None if dataFile.isDefined => Right(dataFileType) + case None if optEndpoint.isDefined => Right(dataEndpointType) + case None => Right(dataTextAreaType) } logger.debug(s"Input type: $inputType") val x: IO[(Option[String], Resource[IO, RDFReasoner])] = inputType match { @@ -97,7 +110,7 @@ sealed case class DataParam( case Right(`dataEndpointType`) => logger.debug(s"Input - dataEndpointType: $data") - maybeEndpoint match { + optEndpoint match { case None => err(s"No value for endpoint") case Some(endpointUrl) => for { @@ -210,13 +223,6 @@ sealed case class DataParam( } - private def applyInference( - rdf: Resource[IO, RDFReasoner], - inference: Option[String], - dataFormat: Format - ): Resource[IO, RDFReasoner] = - extendWithInference(rdf, inference) - private def extendWithInference( resourceRdf: Resource[IO, RDFReasoner], optInference: Option[String] @@ -238,6 +244,13 @@ sealed case class DataParam( } } + private def applyInference( + rdf: Resource[IO, RDFReasoner], + inference: Option[String], + dataFormat: Format + ): Resource[IO, RDFReasoner] = + extendWithInference(rdf, inference) + private def mkBaseIri( maybeBase: Option[String] ): Either[String, Option[IRI]] = maybeBase match { @@ -271,57 +284,51 @@ sealed case class DataParam( } -object DataParam extends LazyLogging { +private[api] object Data extends LazyLogging { + + /** Regular expressions used for identifying if a custom endpoint was given for this data sample + */ + private val endpointRegex: Regex = "Endpoint: (.+)".r - private[api] def mkData( + def mkData( partsMap: PartsMap, relativeBase: Option[IRI] - ): IO[(Resource[IO, RDFReasoner], DataParam)] = { + ): IO[(Resource[IO, RDFReasoner], Data)] = { - val r: IO[(Resource[IO, RDFReasoner], DataParam)] = for { - dp <- mkDataParam(partsMap) - pair <- dp.getData(relativeBase) + val r: IO[(Resource[IO, RDFReasoner], Data)] = for { + data <- mkData(partsMap) + pair <- data.getData(relativeBase) } yield { val (optStr, rdf) = pair - (rdf, dp.copy(data = optStr)) + (rdf, data.copy(data = optStr)) } r } - private[api] def mkDataParam(partsMap: PartsMap): IO[DataParam] = for { - data <- partsMap.optPartValue(DataParameter.name) - compoundData <- partsMap.optPartValue(CompoundDataParameter.name) - dataURL <- partsMap.optPartValue(DataURLParameter.name) - dataFile <- partsMap.optPartValue(DataFileParameter.name) - endpoint <- partsMap.optPartValue(EndpointParameter.name) - dataFormat <- getDataFormat(DataFormatParameter.name, partsMap) - inference <- partsMap.optPartValue(InferenceParameter.name) - targetDataFormat <- getDataFormat(TargetDataFormatParameter.name, partsMap) - activeDataTab <- partsMap.optPartValue(ActiveDataTabParameter.name) + def mkData(partsMap: PartsMap): IO[Data] = for { + data <- partsMap.optPartValue(DataParameter.name) + dataURL <- partsMap.optPartValue(DataURLParameter.name) + dataFile <- partsMap.optPartValue(DataFileParameter.name) + compoundData <- partsMap.optPartValue(CompoundDataParameter.name) + endpoint <- partsMap.optPartValue(EndpointParameter.name) + dataFormat <- DataFormat.fromRequestParams( + DataFormatParameter.name, + partsMap + ) + inference <- partsMap.optPartValue(InferenceParameter.name) + targetDataFormat <- DataFormat.fromRequestParams( + TargetDataFormatParameter.name, + partsMap + ) + activeDataTab <- partsMap.optPartValue(ActiveDataTabParameter.name) } yield { - logger.debug(s"data: $data") - logger.debug(s"compoundData: $compoundData") - logger.debug(s"dataFormat: $dataFormat") - logger.debug(s"dataURL: $dataURL") - logger.debug(s"endpoint: $endpoint") - logger.debug(s"activeDataTab: $activeDataTab") - logger.debug(s"targetDataFormat: $targetDataFormat") - logger.debug(s"inference: $inference") - - val endpointRegex = "Endpoint: (.+)".r - val finalEndpoint = endpoint.fold(data match { - case None => None - case Some(str) => - str match { - case endpointRegex(ep) => Some(ep) - case _ => None - } - })(Some(_)) + val finalEndpoint = getEndpoint(endpoint) + val finalActiveDataTab = activeDataTab logger.debug(s"Final endpoint: $finalEndpoint") - val dp = DataParam( + val dp = Data( data, dataURL, dataFile, @@ -335,31 +342,70 @@ object DataParam extends LazyLogging { dp } - private def getDataFormat( - name: String, - partsMap: PartsMap - ): IO[Option[DataFormat]] = for { - maybeStr <- partsMap.optPartValue(name) - } yield maybeStr match { - case None => None - case Some(str) => - DataFormat - .fromString(str) - .fold( - err => { - logger.warn(s"Unsupported dataFormat for $name: $str") - None - }, - df => Some(df) - ) + // def mkData(partsMap: PartsMap): IO[Data] = for { + // data <- partsMap.optPartValue(DataParameter.name) + // compoundData <- partsMap.optPartValue(CompoundDataParameter.name) + // dataURL <- partsMap.optPartValue(DataURLParameter.name) + // dataFile <- partsMap.optPartValue(DataFileParameter.name) + // endpoint <- partsMap.optPartValue(EndpointParameter.name) + // dataFormat <- DataFormat.fromRequestParams( + // DataFormatParameter.name, + // partsMap + // ) + // inference <- partsMap.optPartValue(InferenceParameter.name) + // targetDataFormat <- DataFormat.fromRequestParams( + // TargetDataFormatParameter.name, + // partsMap + // ) + // activeDataTab <- partsMap.optPartValue(ActiveDataTabParameter.name) + // } yield { + // + // val finalEndpoint = getEndpoint(endpoint) + // + // val finalActiveDataTab = activeDataTab + // logger.debug(s"Final endpoint: $finalEndpoint") + // + // val dp = Data( + // data, + // dataURL, + // dataFile, + // finalEndpoint, + // dataFormat, + // inference, + // targetDataFormat, + // finalActiveDataTab, + // compoundData + // ) + // dp + // } + + /** @param endpointStr String containing the endpoint + * @param endpointRegex Regex used to look for the endpoint in the string + * @return Optionally, the endpoint contained in a given data string + */ + private def getEndpoint( + endpointStr: Option[String], + endpointRegex: Regex = endpointRegex + ): Option[String] = { + endpointStr match { + case None => None + case Some(endpoint) => + endpoint match { + case endpointRegex(endpoint) => Some(endpoint) + case _ => None + } + + } } - private[api] def empty: DataParam = - DataParam( + /** @return Empty data representation, with no inner data and all defaults to none + */ + def empty: Data = + Data( data = None, dataURL = None, dataFile = None, - maybeEndpoint = None, + optEndpoint = None, optDataFormat = None, inference = None, targetDataFormat = None, @@ -376,9 +422,11 @@ object DataParam extends LazyLogging { private[logic] object DataTab extends Enumeration { type DataTab = String - val TEXT = "#dataTextArea" - val URL = "#dataUrl" - val FILE = "#dataFile" + val TEXT = "#dataTextArea" + val URL = "#dataUrl" + val FILE = "#dataFile" + val COMPOUND = "#compoundData" + val ENDPOINT = "#dataEndpoint" val defaultActiveShapeMapTab: DataTab = TEXT } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala index 6cbb9f0a..58287489 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala @@ -17,7 +17,7 @@ import es.weso.rdfshape.server.api.routes.data.logic.DataInfo.{ dataInfoFromString } import es.weso.rdfshape.server.api.routes.data.logic.DataOperations.dataFormatOrDefault -import es.weso.rdfshape.server.api.routes.data.logic.{DataConversion, DataParam} +import es.weso.rdfshape.server.api.routes.data.logic.{Data, DataConversion} import es.weso.rdfshape.server.api.routes.endpoint.logic.SparqlQuery import es.weso.rdfshape.server.api.utils.OptEitherF._ import es.weso.rdfshape.server.api.utils.parameters.PartsMap @@ -110,7 +110,7 @@ class DataService(client: Client[IO]) req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) for { - dataParam <- DataParam.mkData(partsMap, relativeBase) + dataParam <- Data.mkData(partsMap, relativeBase) (resourceRdf, dp) = dataParam dataFormat = dataFormatOrDefault(dp.optDataFormat.map(_.name)) response <- dp.data match { @@ -160,7 +160,7 @@ class DataService(client: Client[IO]) req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) for { - dataParam <- DataParam.mkData(partsMap, relativeBase) + dataParam <- Data.mkData(partsMap, relativeBase) (resourceRdf, dp) = dataParam targetFormat = dp.targetDataFormat.getOrElse(defaultDataFormat).name dataFormat = dp.optDataFormat.getOrElse(defaultDataFormat) @@ -214,7 +214,7 @@ class DataService(client: Client[IO]) for { /* TODO: an error is thrown on bad query URLs (IO.raise...), but it is * not controlled */ - dataParam <- DataParam.mkData(partsMap, relativeBase) + dataParam <- Data.mkData(partsMap, relativeBase) (resourceRdf, dp) = dataParam maybeQuery <- SparqlQuery.getSparqlQuery(partsMap) @@ -261,7 +261,7 @@ class DataService(client: Client[IO]) req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) for { - maybeData <- DataParam.mkData(partsMap, relativeBase).attempt + maybeData <- Data.mkData(partsMap, relativeBase).attempt schemaEngine <- partsMap.optPartValue("schemaEngine") optSchemaFormatStr <- partsMap.optPartValue("schemaFormat") inference <- partsMap.optPartValue("inference") diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/logic/Permalink.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/logic/Permalink.scala index eab5298a..2239af64 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/logic/Permalink.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/permalink/logic/Permalink.scala @@ -18,9 +18,4 @@ sealed case class Permalink( editionDate: Date ) -private[api] object Permalink extends LazyLogging { - - /** Placeholder value used for the permalink query whenever an empty target is issued/needed. - */ - private val emptyTargetValue = "" -} +private[api] object Permalink extends LazyLogging {} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/Schema.scala similarity index 93% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaParam.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/Schema.scala index f64649d5..82bef377 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/Schema.scala @@ -9,12 +9,12 @@ import es.weso.rdfshape.server.api.format.dataFormats.SchemaFormat import es.weso.rdfshape.server.api.routes.schema.logic.SchemaOperations.getBase import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ import es.weso.rdfshape.server.api.utils.parameters.PartsMap -import es.weso.schema.{Schema, Schemas} +import es.weso.schema.{Schemas, Schema => SchemaW} import scala.io.Source import scala.util.Try -sealed case class SchemaParam( +sealed case class Schema( schema: Option[String], schemaURL: Option[String], schemaFile: Option[String], @@ -27,7 +27,7 @@ sealed case class SchemaParam( def getSchema( data: Option[RDFReasoner] - ): IO[(Option[String], Either[String, Schema])] = { + ): IO[(Option[String], Either[String, SchemaW])] = { logger.debug(s"activeSchemaTab: $activeSchemaTab") logger.debug(s"schemaEngine: $schemaEngine") @@ -39,14 +39,14 @@ sealed case class SchemaParam( case None => Right(SchemaTextAreaType) } logger.debug(s"inputType: $inputType") - val maybeSchema: IO[(Option[String], Either[String, Schema])] = + val maybeSchema: IO[(Option[String], Either[String, SchemaW])] = inputType match { case Right(`SchemaUrlType`) => logger.debug("Schema input type - SchemaUrlType") schemaURL match { case None => IO((None, Left(s"Non value for schemaURL"))) case Some(schemaUrl) => - val e: IO[(String, Schema)] = for { + val e: IO[(String, SchemaW)] = for { str <- IO.fromEither( Try(Source.fromURL(schemaUrl).mkString).toEither ) @@ -62,7 +62,7 @@ sealed case class SchemaParam( } yield (str, schema) e.attempt.map( _.fold( - s => (none[String], s.getMessage.asLeft[Schema]), + s => (none[String], s.getMessage.asLeft[SchemaW]), pair => { val (str, schema) = pair (Some(str), Right(schema)) @@ -171,15 +171,15 @@ sealed case class SchemaParam( } -object SchemaParam extends LazyLogging { +object Schema extends LazyLogging { private[api] def mkSchema( partsMap: PartsMap, data: Option[RDFReasoner] - ): IO[(Schema, SchemaParam)] = { - val result: IO[Either[String, (Schema, SchemaParam)]] = for { + ): IO[(SchemaW, Schema)] = { + val result: IO[Either[String, (SchemaW, Schema)]] = for { sp <- { - mkSchemaParam(partsMap) + mkSchema(partsMap) } eitherPair <- sp.getSchema(data).attempt resp <- eitherPair.fold( @@ -208,7 +208,7 @@ object SchemaParam extends LazyLogging { ) } - private[api] def mkSchemaParam(partsMap: PartsMap): IO[SchemaParam] = for { + private[api] def mkSchema(partsMap: PartsMap): IO[Schema] = for { schema <- partsMap.optPartValue(SchemaParameter.name) schemaURL <- partsMap.optPartValue(SchemaURLParameter.name) schemaFile <- partsMap.optPartValue(SchemaFileParameter.name) @@ -225,7 +225,7 @@ object SchemaParam extends LazyLogging { ) activeSchemaTab <- partsMap.optPartValue(ActiveSchemaTabParameter.name) } yield { - SchemaParam( + Schema( schema = schema, schemaURL = schemaURL, schemaFile = schemaFile, @@ -237,8 +237,8 @@ object SchemaParam extends LazyLogging { ) } - private[api] def empty: SchemaParam = - SchemaParam( + private[api] def empty: Schema = + Schema( schema = None, schemaURL = None, schemaFile = None, diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala index d6bbc136..2d80d9f9 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala @@ -9,8 +9,7 @@ import es.weso.rdf.{InferenceEngine, RDFBuilder, RDFReasoner} import es.weso.rdfshape.server.api.definitions.ApiDefaults import es.weso.rdfshape.server.api.definitions.UmlDefinitions.umlOptions import es.weso.rdfshape.server.api.format.dataFormats.{DataFormat, SchemaFormat} -import es.weso.rdfshape.server.api.routes.data.logic.DataParam -import es.weso.rdfshape.server.api.routes.schema.service.TriggerMode +import es.weso.rdfshape.server.api.routes.data.logic.Data import es.weso.schema.{Result, Schema, ShaclexSchema, ValidationTrigger} import es.weso.shacl.converter.Shacl2ShEx import es.weso.shapemaps.ShapeMap @@ -147,12 +146,12 @@ private[api] object SchemaOperations extends LazyLogging { relativeBase: Option[IRI], builder: RDFBuilder ): IO[(Result, Option[ValidationTrigger], Long)] = { - val dp = DataParam.empty.copy( + val dp = Data.empty.copy( data = Some(data), optDataFormat = optDataFormat, inference = optInference ) - val sp = SchemaParam.empty.copy( + val sp = Schema.empty.copy( schema = optSchema, schemaFormat = optSchemaFormat.getOrElse(SchemaFormat.defaultFormat), schemaEngine = optSchemaEngine diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/TriggerMode.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/TriggerMode.scala index f152a0a7..0c407b58 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/TriggerMode.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/TriggerMode.scala @@ -1,4 +1,4 @@ -package es.weso.rdfshape.server.api.routes.schema.service +package es.weso.rdfshape.server.api.routes.schema.logic import cats.effect.IO import com.typesafe.scalalogging.LazyLogging diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala index 24cbf966..d1b944df 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala @@ -9,9 +9,9 @@ import es.weso.rdfshape.server.api.definitions.ApiDefaults.defaultSchemaEngine import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.format.dataFormats.SchemaFormat import es.weso.rdfshape.server.api.routes.ApiService -import es.weso.rdfshape.server.api.routes.data.logic.DataParam +import es.weso.rdfshape.server.api.routes.data.logic.Data import es.weso.rdfshape.server.api.routes.schema.logic.SchemaOperations._ -import es.weso.rdfshape.server.api.routes.schema.logic.SchemaParam +import es.weso.rdfshape.server.api.routes.schema.logic.{Schema, TriggerMode} import es.weso.rdfshape.server.api.utils.OptEitherF._ import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ import es.weso.rdfshape.server.api.utils.parameters.PartsMap @@ -120,7 +120,7 @@ class SchemaService(client: Client[IO]) val partsMap = PartsMap(m.parts) logger.info(s"POST info partsMap. $partsMap") val r: IO[Json] = for { - schemaPair <- SchemaParam.mkSchema(partsMap, None) + schemaPair <- Schema.mkSchema(partsMap, None) (schema, sp) = schemaPair } yield { schemaInfo(schema).toJson @@ -164,7 +164,7 @@ class SchemaService(client: Client[IO]) val partsMap = PartsMap(m.parts) logger.info(s"POST info partsMap. $partsMap") val r: IO[Json] = for { - schemaPair <- SchemaParam.mkSchema(partsMap, None) + schemaPair <- Schema.mkSchema(partsMap, None) (schema, sp) = schemaPair targetSchemaFormat <- optEither2f( @@ -211,7 +211,7 @@ class SchemaService(client: Client[IO]) { val partsMap = PartsMap(m.parts) val r: IO[Json] = for { - schemaPair <- SchemaParam.mkSchema(partsMap, None) + schemaPair <- Schema.mkSchema(partsMap, None) (schema, _) = schemaPair v <- schemaVisualize(schema) } yield { @@ -231,7 +231,7 @@ class SchemaService(client: Client[IO]) val partsMap = PartsMap(m.parts) logger.info(s"POST info partsMap. $partsMap") val r: IO[Json] = for { - schemaPair <- SchemaParam.mkSchema(partsMap, None) + schemaPair <- Schema.mkSchema(partsMap, None) (schema, _) = schemaPair } yield { schemaCytoscape(schema) @@ -284,13 +284,13 @@ class SchemaService(client: Client[IO]) { val partsMap = PartsMap(m.parts) val r = for { - dataPair <- DataParam.mkData(partsMap, relativeBase) + dataPair <- Data.mkData(partsMap, relativeBase) (resourceRdf, dp) = dataPair res <- for { emptyRes <- RDFAsJenaModel.empty vv <- (resourceRdf, emptyRes).tupled.use { case (rdf, builder) => for { - schemaPair <- SchemaParam.mkSchema(partsMap, Some(rdf)) + schemaPair <- Schema.mkSchema(partsMap, Some(rdf)) (schema, _) = schemaPair maybeTriggerMode <- TriggerMode.getTriggerModeParam(partsMap) newRdf <- applyInference(rdf, dp.inference) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala index 3bb9168e..08aa56e1 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala @@ -15,9 +15,9 @@ import io.circe.Json /** Data class representing a ShapeMap and its current source. * * @note Invalid initial data is accepted, but may cause exceptions when operating with it (like converting to JSON). - * @param shapeMapRaw Shapemap raw text + * @param shapeMapRaw Shapemap raw text * @param shapeMapFormat Shapemap format - * @param targetShapeMapFormat Shapemap target format (only present in conversion operations) + * @param targetShapeMapFormat Shapemap target format (only for conversion operations) * @param activeShapeMapTab Active tab, used to know which source the shapemap comes from */ sealed case class ShapeMap private ( @@ -119,12 +119,13 @@ private[api] object ShapeMap extends LazyLogging { } /** Create a ShapeMap instance, given its source and format - * @param shapeMapStr Optionally, the raw contents of the shapemap - * @param shapeMapUrl Optionally, the URL with the contents of the shapemap - * @param shapeMapFile Optionally, the file with the contents of the shapemap - * @param shapeMapFormat Optionally, the format of the shapemap + * + * @param shapeMapStr Optionally, the raw contents of the shapemap + * @param shapeMapUrl Optionally, the URL with the contents of the shapemap + * @param shapeMapFile Optionally, the file with the contents of the shapemap + * @param shapeMapFormat Optionally, the format of the shapemap * @param targetShapeMapFormat Optionally, the target format of the shapemap (for conversions) - * @param activeShapeMapTab Optionally, the indicator of the shapemap source (raw, url or file) + * @param activeShapeMapTab Optionally, the indicator of the shapemap source (raw, url or file) * @return */ def mkShapeMap( @@ -198,9 +199,7 @@ private[api] object ShapeMap extends LazyLogging { maybeShapeMap } - /** Empty shapemap representation, with no inner data and all defaults - * - * @return + /** @return Empty shapemap representation, with no inner data and all defaults */ private def empty: ShapeMap = ShapeMap( diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseSchemaParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseSchemaParam.scala index 3c83d541..58151820 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseSchemaParam.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseSchemaParam.scala @@ -2,17 +2,17 @@ package es.weso.rdfshape.server.api.routes.wikibase.logic import cats.effect._ import es.weso.rdf.RDFReasoner -import es.weso.rdfshape.server.api.routes.schema.logic.SchemaParam +import es.weso.rdfshape.server.api.routes.schema.logic.Schema import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters.WdSchemaParameter import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.wikibase._ -import es.weso.schema.{Schema, Schemas} +import es.weso.schema.{Schemas, Schema => SchemaW} import org.http4s._ import org.http4s.client._ import org.http4s.dsl.io._ case class WikibaseSchemaParam( - maybeSchemaParam: Option[SchemaParam], + maybeSchemaParam: Option[Schema], maybeEntitySchema: Option[String], schemaStr: Option[String], wikidata: Wikibase = Wikidata @@ -21,7 +21,7 @@ case class WikibaseSchemaParam( def getSchema( maybeData: Option[RDFReasoner], client: Client[IO] - ): IO[(Option[String], Either[String, Schema])] = { + ): IO[(Option[String], Either[String, SchemaW])] = { (maybeSchemaParam, maybeEntitySchema) match { case (None, None) => IO.pure((None, Left(s"No values for entity schema or schema"))) @@ -37,9 +37,9 @@ case class WikibaseSchemaParam( def schemaFromEntitySchema( es: String, client: Client[IO] - ): IO[(Option[String], Either[String, Schema])] = { + ): IO[(Option[String], Either[String, SchemaW])] = { val uriSchema = wikidata.schemaEntityUri(es) - val r: IO[(Schema, String)] = for { + val r: IO[(SchemaW, String)] = for { strSchema <- deref(uriSchema, client) schema <- Schemas.fromString(strSchema, "ShEXC", "ShEx") } yield (schema, strSchema) @@ -64,8 +64,8 @@ object WikibaseSchemaParam { partsMap: PartsMap, data: Option[RDFReasoner], client: Client[IO] - ): IO[(Schema, WikibaseSchemaParam)] = { - val r: IO[(Schema, WikibaseSchemaParam)] = for { + ): IO[(SchemaW, WikibaseSchemaParam)] = { + val r: IO[(SchemaW, WikibaseSchemaParam)] = for { sp <- mkWikibaseSchemaParam(partsMap) p <- sp.getSchema(data, client) (maybeStr, maybeSchema) = p @@ -87,7 +87,7 @@ object WikibaseSchemaParam { maybeSchema <- partsMap.eitherPartValue(WdSchemaParameter.name) // endpointStr <- partsMap.partValue("endpoint") // endpoint <- either2f(IRI.fromString(endpointStr)) - maybeSchemaParam <- SchemaParam.mkSchemaParam(partsMap).attempt + maybeSchemaParam <- Schema.mkSchema(partsMap).attempt result <- (maybeSchema, maybeSchemaParam) match { case (Left(_), Right(sp)) => ok_f(WikibaseSchemaParam.empty.copy(maybeSchemaParam = Some(sp))) From 904cbb9ddda9d5233632d2d766609298d4b75ded Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Wed, 15 Sep 2021 17:47:16 +0200 Subject: [PATCH 22/32] Before data refactor (2). --- docs/api-usage/usage_examples.md | 2 +- .../server/api/merged/DataElement.scala | 6 +- .../server/api/routes/data/logic/Data.scala | 44 ++++++------- .../routes/endpoint/logic/SparqlQuery.scala | 30 ++++----- .../api/routes/schema/logic/Schema.scala | 24 +++---- .../api/routes/schema/logic/TriggerMode.scala | 33 +++++++--- .../routes/schema/service/SchemaService.scala | 2 +- .../api/routes/shapemap/logic/ShapeMap.scala | 30 ++++----- .../IncomingRequestParameters.scala | 64 +++++++++---------- 9 files changed, 124 insertions(+), 111 deletions(-) diff --git a/docs/api-usage/usage_examples.md b/docs/api-usage/usage_examples.md index dada56d1..b63d21b8 100644 --- a/docs/api-usage/usage_examples.md +++ b/docs/api-usage/usage_examples.md @@ -37,7 +37,7 @@ the [Cytoscape component](https://github.com/plotly/react-cytoscapejs). ``` curl -k -i -X POST -H "Content-type:multipart/form-data" 'https://api.rdfshape.weso.es/api/data/convert' \ - --form-string 'dataURL=http://tb.plazi.org/GgServer/rdf/9D767B515A0BFFC3C0F7919FF301FC8D' \ + --form-string 'dataUrl=http://tb.plazi.org/GgServer/rdf/9D767B515A0BFFC3C0F7919FF301FC8D' \ --form-string 'dataFormatUrl=rdf/xml' --form-string 'targetDataFormat=JSON' \ ``` diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala index b9efbaf9..88752ec9 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala @@ -88,7 +88,7 @@ object DataElement extends LazyLogging { ) case DataUrl => Json.obj( - ("dataURL", Json.fromString(a.dataUrl.getOrElse(""))), + ("dataUrl", Json.fromString(a.dataUrl.getOrElse(""))), ("activeTab", Json.fromString(a.activeDataTab.id)), ("dataFormat", Json.fromString(a.dataFormat.name)) ) @@ -133,9 +133,9 @@ object DataElement extends LazyLogging { dataFile <- cursor.downField("dataFile").as[String] } yield base.copy(dataFile = Some(dataFile)) case DataUrl => - logger.debug("Data element decoder - DaraUrl") + logger.debug("Data element decoder - DataUrl") for { - dataUrl <- cursor.downField("dataURL").as[String] + dataUrl <- cursor.downField("dataUrl").as[String] } yield base.copy(dataUrl = Some(dataUrl)) case DataEndpoint => logger.debug("Data element decoder - DataEndpoint") diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/Data.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/Data.scala index 982284b7..0127f118 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/Data.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/Data.scala @@ -21,7 +21,7 @@ import scala.util.matching.Regex /** Data class representing RDF data and its current source * * @param data - * @param dataURL + * @param dataUrl * @param dataFile * @param optEndpoint * @param optDataFormat Data format @@ -32,7 +32,7 @@ import scala.util.matching.Regex */ sealed case class Data( data: Option[String], - dataURL: Option[String], + dataUrl: Option[String], dataFile: Option[String], optEndpoint: Option[String], optDataFormat: Option[DataFormat], @@ -60,7 +60,7 @@ sealed case class Data( case Some(a) => parseDataTab(a) case None if compoundData.isDefined => Right(compoundDataType) case None if data.isDefined => Right(dataTextAreaType) - case None if dataURL.isDefined => Right(dataUrlType) + case None if dataUrl.isDefined => Right(dataUrlType) case None if dataFile.isDefined => Right(dataFileType) case None if optEndpoint.isDefined => Right(dataEndpointType) case None => Right(dataTextAreaType) @@ -81,8 +81,8 @@ sealed case class Data( case Right(`dataUrlType`) => logger.debug(s"Input - dataUrlType: $data") - dataURL match { - case None => err(s"Non value for dataURL") + dataUrl match { + case None => err(s"Non value for dataUrl") case Some(dataUrl) => for { rdf <- rdfFromUri( @@ -223,6 +223,13 @@ sealed case class Data( } + private def applyInference( + rdf: Resource[IO, RDFReasoner], + inference: Option[String], + dataFormat: Format + ): Resource[IO, RDFReasoner] = + extendWithInference(rdf, inference) + private def extendWithInference( resourceRdf: Resource[IO, RDFReasoner], optInference: Option[String] @@ -244,13 +251,6 @@ sealed case class Data( } } - private def applyInference( - rdf: Resource[IO, RDFReasoner], - inference: Option[String], - dataFormat: Format - ): Resource[IO, RDFReasoner] = - extendWithInference(rdf, inference) - private def mkBaseIri( maybeBase: Option[String] ): Either[String, Option[IRI]] = maybeBase match { @@ -307,7 +307,7 @@ private[api] object Data extends LazyLogging { def mkData(partsMap: PartsMap): IO[Data] = for { data <- partsMap.optPartValue(DataParameter.name) - dataURL <- partsMap.optPartValue(DataURLParameter.name) + dataUrl <- partsMap.optPartValue(DataUrlParameter.name) dataFile <- partsMap.optPartValue(DataFileParameter.name) compoundData <- partsMap.optPartValue(CompoundDataParameter.name) endpoint <- partsMap.optPartValue(EndpointParameter.name) @@ -320,7 +320,7 @@ private[api] object Data extends LazyLogging { TargetDataFormatParameter.name, partsMap ) - activeDataTab <- partsMap.optPartValue(ActiveDataTabParameter.name) + activeDataTab <- partsMap.optPartValue(ActiveDataSourceParameter.name) } yield { val finalEndpoint = getEndpoint(endpoint) @@ -330,7 +330,7 @@ private[api] object Data extends LazyLogging { val dp = Data( data, - dataURL, + dataUrl, dataFile, finalEndpoint, dataFormat, @@ -345,7 +345,7 @@ private[api] object Data extends LazyLogging { // def mkData(partsMap: PartsMap): IO[Data] = for { // data <- partsMap.optPartValue(DataParameter.name) // compoundData <- partsMap.optPartValue(CompoundDataParameter.name) - // dataURL <- partsMap.optPartValue(DataURLParameter.name) + // dataUrl <- partsMap.optPartValue(DataURLParameter.name) // dataFile <- partsMap.optPartValue(DataFileParameter.name) // endpoint <- partsMap.optPartValue(EndpointParameter.name) // dataFormat <- DataFormat.fromRequestParams( @@ -367,7 +367,7 @@ private[api] object Data extends LazyLogging { // // val dp = Data( // data, - // dataURL, + // dataUrl, // dataFile, // finalEndpoint, // dataFormat, @@ -403,7 +403,7 @@ private[api] object Data extends LazyLogging { def empty: Data = Data( data = None, - dataURL = None, + dataUrl = None, dataFile = None, optEndpoint = None, optDataFormat = None, @@ -414,12 +414,12 @@ private[api] object Data extends LazyLogging { ) } -/** Enumeration of the different possible Schema tabs sent by the client. - * The tab sent indicates the API if the schema was sent in raw text, as a URL +/** Enumeration of the different possible Data sources sent by the client. + * The source sent indicates the API if the schema was sent in raw text, as a URL * to be fetched or as a text file containing the schema. - * In case the client submits the schema in several formats, the selected tab will indicate the preferred one. + * In case the client submits the data in several formats, the selected source will indicate the preferred one. */ -private[logic] object DataTab extends Enumeration { +private[logic] object DataSource extends Enumeration { type DataTab = String val TEXT = "#dataTextArea" diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/SparqlQuery.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/SparqlQuery.scala index 3bd144d2..cb08c8d9 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/SparqlQuery.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/SparqlQuery.scala @@ -2,15 +2,15 @@ package es.weso.rdfshape.server.api.routes.endpoint.logic import cats.effect.IO import com.typesafe.scalalogging.LazyLogging -import es.weso.rdfshape.server.api.routes.endpoint.logic.SparqlQueryTab.{ +import es.weso.rdfshape.server.api.routes.endpoint.logic.SparqlQuerySource.{ SparqlQueryTab, defaultActiveQueryTab } import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters.{ - ActiveQueryTabParameter, + ActiveQuerySourceParameter, QueryFileParameter, QueryParameter, - QueryURLParameter + QueryUrlParameter } import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.utils.networking.NetworkingUtils.getUrlContents @@ -41,9 +41,9 @@ private[api] object SparqlQuery extends LazyLogging { ): IO[Either[String, SparqlQuery]] = for { queryStr <- partsMap.optPartValue(QueryParameter.name) - queryUrl <- partsMap.optPartValue(QueryURLParameter.name) + queryUrl <- partsMap.optPartValue(QueryUrlParameter.name) queryFile <- partsMap.optPartValue(QueryFileParameter.name) - activeQueryTab <- partsMap.optPartValue(ActiveQueryTabParameter.name) + activeQueryTab <- partsMap.optPartValue(ActiveQuerySourceParameter.name) _ = logger.debug( s"Getting SPARQL from params. Query tab: $activeQueryTab" @@ -77,28 +77,28 @@ private[api] object SparqlQuery extends LazyLogging { val maybeQuery: Either[String, SparqlQuery] = activeQueryTab.getOrElse( defaultActiveQueryTab ) match { - case SparqlQueryTab.TEXT => + case SparqlQuerySource.TEXT => queryStr match { case None => Left("No value for the query string") case Some(queryRaw) => - Right(SparqlQuery(queryRaw, SparqlQueryTab.TEXT)) + Right(SparqlQuery(queryRaw, SparqlQuerySource.TEXT)) } - case SparqlQueryTab.URL => + case SparqlQuerySource.URL => queryUrl match { case None => Left(s"No value for the query URL") case Some(queryUrl) => getUrlContents(queryUrl) match { case Right(queryRaw) => - Right(SparqlQuery(queryRaw, SparqlQueryTab.URL)) + Right(SparqlQuery(queryRaw, SparqlQuerySource.URL)) case Left(err) => Left(err) } } - case SparqlQueryTab.FILE => + case SparqlQuerySource.FILE => queryFile match { case None => Left(s"No value for the query file") case Some(queryRaw) => - Right(SparqlQuery(queryRaw, SparqlQueryTab.FILE)) + Right(SparqlQuery(queryRaw, SparqlQuerySource.FILE)) } case other => @@ -113,12 +113,12 @@ private[api] object SparqlQuery extends LazyLogging { } -/** Enumeration of the different possible QueryTabs sent by the client. - * The tab sent indicates the API if the Query was sent in raw text, as a URL +/** Enumeration of the different possible Query sources by the client. + * The source sent indicates the API if the Query was sent in raw text, as a URL * to be fetched or as a text file containing the query. - * In case the client submits the query in several formats, the selected tab will indicate the one format. + * In case the client submits the query in several formats, the selected source will indicate the one format. */ -private[logic] object SparqlQueryTab extends Enumeration { +private[logic] object SparqlQuerySource extends Enumeration { type SparqlQueryTab = String val TEXT = "#queryTextArea" diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/Schema.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/Schema.scala index 82bef377..c96d3632 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/Schema.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/Schema.scala @@ -16,7 +16,7 @@ import scala.util.Try sealed case class Schema( schema: Option[String], - schemaURL: Option[String], + schemaUrl: Option[String], schemaFile: Option[String], schemaFormat: SchemaFormat, schemaEngine: Option[String], @@ -34,7 +34,7 @@ sealed case class Schema( val inputType = activeSchemaTab match { case Some(a) => parseSchemaTab(a) case None if schema.isDefined => Right(SchemaTextAreaType) - case None if schemaURL.isDefined => Right(SchemaUrlType) + case None if schemaUrl.isDefined => Right(SchemaUrlType) case None if schemaFile.isDefined => Right(SchemaFileType) case None => Right(SchemaTextAreaType) } @@ -43,8 +43,8 @@ sealed case class Schema( inputType match { case Right(`SchemaUrlType`) => logger.debug("Schema input type - SchemaUrlType") - schemaURL match { - case None => IO((None, Left(s"Non value for schemaURL"))) + schemaUrl match { + case None => IO((None, Left(s"Non value for schemaUrl"))) case Some(schemaUrl) => val e: IO[(String, SchemaW)] = for { str <- IO.fromEither( @@ -210,7 +210,7 @@ object Schema extends LazyLogging { private[api] def mkSchema(partsMap: PartsMap): IO[Schema] = for { schema <- partsMap.optPartValue(SchemaParameter.name) - schemaURL <- partsMap.optPartValue(SchemaURLParameter.name) + schemaUrl <- partsMap.optPartValue(SchemaUrlParameter.name) schemaFile <- partsMap.optPartValue(SchemaFileParameter.name) optSchemaFormat <- SchemaFormat.fromRequestParams( SchemaFormatParameter.name, @@ -223,11 +223,11 @@ object Schema extends LazyLogging { targetSchemaFormat <- partsMap.optPartValue( TargetSchemaFormatParameter.name ) - activeSchemaTab <- partsMap.optPartValue(ActiveSchemaTabParameter.name) + activeSchemaTab <- partsMap.optPartValue(ActiveSchemaSourceParameter.name) } yield { Schema( schema = schema, - schemaURL = schemaURL, + schemaUrl = schemaUrl, schemaFile = schemaFile, schemaFormat = optSchemaFormat.getOrElse(SchemaFormat.defaultFormat), schemaEngine = schemaEngine, @@ -240,7 +240,7 @@ object Schema extends LazyLogging { private[api] def empty: Schema = Schema( schema = None, - schemaURL = None, + schemaUrl = None, schemaFile = None, schemaFormat = SchemaFormat.defaultFormat, schemaEngine = None, @@ -251,12 +251,12 @@ object Schema extends LazyLogging { } -/** Enumeration of the different possible Schema tabs sent by the client. - * The tab sent indicates the API if the schema was sent in raw text, as a URL +/** Enumeration of the different possible Schema sources sent by the client. + * The source sent indicates the API if the schema was sent in raw text, as a URL * to be fetched or as a text file containing the schema. - * In case the client submits the schema in several formats, the selected tab will indicate the preferred one. + * In case the client submits the schema in several formats, the selected source will indicate the preferred one. */ -private[logic] object SchemaTab extends Enumeration { +private[logic] object SchemaSource extends Enumeration { type SchemaTab = String val TEXT = "#schemaTextArea" diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/TriggerMode.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/TriggerMode.scala index 0c407b58..28067572 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/TriggerMode.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/TriggerMode.scala @@ -12,7 +12,7 @@ import es.weso.shapemaps.{ShapeMap => ShapeMapW} /** Data class representing a TriggerMode and its current source. * * @param triggerModeStr Trigger mode name - * @param shapeMap Inner shapemap associated to the TriggerMode + * @param shapeMap Inner shapemap associated to the TriggerMode */ sealed case class TriggerMode private ( triggerModeStr: String, @@ -33,7 +33,7 @@ private[api] object TriggerMode extends LazyLogging { * @param partsMap Request's parameters * @return Either the trigger mode or an error message */ - def getTriggerModeParam( + def mkTriggerMode( partsMap: PartsMap ): IO[Either[String, TriggerMode]] = { for { @@ -48,7 +48,7 @@ private[api] object TriggerMode extends LazyLogging { partsMap ) activeShapeMapTab <- partsMap.optPartValue( - ActiveShapeMapTabParameter.name + ActiveShapeSourceTabParameter.name ) // Get companion shapemap @@ -62,15 +62,28 @@ private[api] object TriggerMode extends LazyLogging { ) } yield { - maybeShapeMap.map(shapeMap => - TriggerMode( - triggerModeStr = - triggerMode.getOrElse(ApiDefaults.defaultTriggerMode), - shapeMap = shapeMap - ) - ) + maybeShapeMap.flatMap(sm => mkTriggerMode(triggerMode, sm)) } } + /** Create a TriggerMode instance, given its mode and shapemap + * + * @param triggerMode Optionally, the trigger mode name + * @param shapeMap Optionally, the inner shapemap associated to the TriggerMode + * @return A new TriggerMode based on the given parameters + */ + def mkTriggerMode( + triggerMode: Option[String], + shapeMap: ShapeMap + ): Either[String, TriggerMode] = { + Right( + TriggerMode( + triggerModeStr = triggerMode.getOrElse(ApiDefaults.defaultTriggerMode), + shapeMap = shapeMap + ) + ) + + } + } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala index d1b944df..1cfb7381 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala @@ -292,7 +292,7 @@ class SchemaService(client: Client[IO]) for { schemaPair <- Schema.mkSchema(partsMap, Some(rdf)) (schema, _) = schemaPair - maybeTriggerMode <- TriggerMode.getTriggerModeParam(partsMap) + maybeTriggerMode <- TriggerMode.mkTriggerMode(partsMap) newRdf <- applyInference(rdf, dp.inference) ret <- maybeTriggerMode match { case Left(err) => diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala index 08aa56e1..b62b5c85 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala @@ -3,7 +3,7 @@ package es.weso.rdfshape.server.api.routes.shapemap.logic import cats.effect.IO import com.typesafe.scalalogging.LazyLogging import es.weso.rdfshape.server.api.format.dataFormats.{Compact, ShapeMapFormat} -import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMapTab.ShapeMapTab +import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMapSource.ShapeMapTab import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.utils.error.exceptions.JsonConversionException @@ -98,7 +98,7 @@ private[api] object ShapeMap extends LazyLogging { partsMap ) activeShapeMapTab <- partsMap.optPartValue( - ActiveShapeMapTabParameter.name + ActiveShapeSourceTabParameter.name ) _ = logger.debug( @@ -126,7 +126,7 @@ private[api] object ShapeMap extends LazyLogging { * @param shapeMapFormat Optionally, the format of the shapemap * @param targetShapeMapFormat Optionally, the target format of the shapemap (for conversions) * @param activeShapeMapTab Optionally, the indicator of the shapemap source (raw, url or file) - * @return + * @return A new ShapeMap based on the given parameters */ def mkShapeMap( shapeMapStr: Option[String], @@ -144,9 +144,9 @@ private[api] object ShapeMap extends LazyLogging { // Create the shapemap depending on the client's selected method val maybeShapeMap: Either[String, ShapeMap] = activeShapeMapTab.getOrElse( - ShapeMapTab.defaultActiveShapeMapTab + ShapeMapSource.defaultActiveShapeMapTab ) match { - case ShapeMapTab.TEXT => + case ShapeMapSource.TEXT => shapeMapStr match { case None => Left("No value for the ShapeMap string") case Some(shapeMapRaw) => @@ -155,12 +155,12 @@ private[api] object ShapeMap extends LazyLogging { shapeMapRaw, format, targetFormat, - ShapeMapTab.TEXT + ShapeMapSource.TEXT ) ) } - case ShapeMapTab.URL => + case ShapeMapSource.URL => shapeMapUrl match { case None => Left(s"No value for the shapemap URL") case Some(url) => @@ -171,13 +171,13 @@ private[api] object ShapeMap extends LazyLogging { shapeMapRaw, format, targetFormat, - ShapeMapTab.URL + ShapeMapSource.URL ) ) case Left(err) => Left(err) } } - case ShapeMapTab.FILE => + case ShapeMapSource.FILE => shapeMapFile match { case None => Left(s"No value for the shapemap file") case Some(shapeMapRaw) => @@ -186,7 +186,7 @@ private[api] object ShapeMap extends LazyLogging { shapeMapRaw, format, targetFormat, - ShapeMapTab.FILE + ShapeMapSource.FILE ) ) } @@ -206,17 +206,17 @@ private[api] object ShapeMap extends LazyLogging { shapeMapRaw = emptyShapeMapValue, shapeMapFormat = defaultShapeMapFormat, targetShapeMapFormat = defaultShapeMapFormat, - activeShapeMapTab = ShapeMapTab.defaultActiveShapeMapTab + activeShapeMapTab = ShapeMapSource.defaultActiveShapeMapTab ) } -/** Enumeration of the different possible ShapeMap tabs sent by the client. - * The tab sent indicates the API if the shapemap was sent in raw text, as a URL +/** Enumeration of the different possible ShapeMap sources sent by the client. + * The source sent indicates the API if the shapemap was sent in raw text, as a URL * to be fetched or as a text file containing the shapemap. - * In case the client submits the shapemap in several formats, the selected tab will indicate the preferred one. + * In case the client submits the shapemap in several formats, the selected source will indicate the preferred one. */ -private[api] object ShapeMapTab extends Enumeration { +private[api] object ShapeMapSource extends Enumeration { type ShapeMapTab = String val TEXT = "#shapeMapTextArea" diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala index aedcd7ff..a2ef823b 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala @@ -12,13 +12,13 @@ object IncomingRequestParameters { // String constants representing each parameter name expected by the server lazy val data = "data" lazy val compoundData = "compoundData" - lazy val dataURL = "dataURL" + lazy val dataUrl = "dataUrl" lazy val dataFile = "dataFile" lazy val dataFormat = "dataFormat" lazy val targetDataFormat = "targetDataFormat" lazy val schema = "schema" - lazy val schemaURL = "schemaURL" + lazy val schemaUrl = "schemaUrl" lazy val schemaFile = "schemaFile" lazy val schemaFormat = "schemaFormat" lazy val schemaEngine = "schemaEngine" @@ -34,21 +34,21 @@ object IncomingRequestParameters { lazy val shapeMap = "shapeMap" lazy val shape_map = "shape-map" - lazy val shapeMapURL = "shapeMapURL" + lazy val shapeMapUrl = "shapeMapUrl" lazy val shapeMapFile = "shapeMapFile" lazy val shapeMapFormat = "shapeMapFormat" lazy val targetShapeMapFormat = "targetShapeMapFormat" lazy val query = "query" - lazy val queryURL = "queryURL" + lazy val queryUrl = "queryUrl" lazy val queryFile = "queryFile" lazy val endpoint = "endpoint" - lazy val activeDataTab = "activeDataTab" - lazy val activeSchemaTab = "activeSchemaTab" - lazy val activeShapeMapTab = "activeShapeMapTab" - lazy val activeQueryTab = "activeQueryTab" + lazy val activeDataSource = "activeDataSource" + lazy val activeSchemaSource = "activeSchemaSource" + lazy val activeShapeMapSource = "activeShapeMapSource" + lazy val activeQuerySource = "activeQuerySource" lazy val wdEntity = "wdEntity" lazy val wdSchema = "wdSchema" @@ -58,7 +58,7 @@ object IncomingRequestParameters { lazy val hostname = "hostname" lazy val view = "view" lazy val examples = "examples" - lazy val manifestURL = "manifestURL" + lazy val manifestUrl = "manifestUrl" lazy val language = "language" lazy val label = "label" lazy val limit = "limit" @@ -80,9 +80,9 @@ object IncomingRequestParameters { /** Parameter expected to contain a URL where RDF data is located */ - object DataURLParameter - extends OptionalQueryParamDecoderMatcher[String](dataURL) { - val name: String = dataURL + object DataUrlParameter + extends OptionalQueryParamDecoderMatcher[String](dataUrl) { + val name: String = dataUrl } /** Parameter expected to contain a file where RDF data is located @@ -115,9 +115,9 @@ object IncomingRequestParameters { /** Parameter expected to contain a URL where a validation schema is located */ - object SchemaURLParameter - extends OptionalQueryParamDecoderMatcher[String](schemaURL) { - val name: String = schemaURL + object SchemaUrlParameter + extends OptionalQueryParamDecoderMatcher[String](schemaUrl) { + val name: String = schemaUrl } /** Parameter expected to contain the contents a file where a validation schema is located @@ -213,8 +213,8 @@ object IncomingRequestParameters { /** Parameter expected to contain a URL where a shapemap is located */ object ShapeMapUrlParameter - extends OptionalQueryParamDecoderMatcher[String](shapeMapURL) { - val name: String = shapeMapURL + extends OptionalQueryParamDecoderMatcher[String](shapeMapUrl) { + val name: String = shapeMapUrl } /** Parameter expected to contain a file where a shapemap is located @@ -247,9 +247,9 @@ object IncomingRequestParameters { /** Parameter expected to contain a URL where a SPARQL query is located */ - object QueryURLParameter - extends OptionalQueryParamDecoderMatcher[String](queryURL) { - val name: String = queryURL + object QueryUrlParameter + extends OptionalQueryParamDecoderMatcher[String](queryUrl) { + val name: String = queryUrl } /** Parameter expected to contain a file where a SPARQL query is located @@ -269,33 +269,33 @@ object IncomingRequestParameters { /** Parameter expected to contain a valid identifier of the source of the data sent by the client (currently raw data, a URL or a file) * in data-related operations */ - object ActiveDataTabParameter - extends OptionalQueryParamDecoderMatcher[String](activeDataTab) { - val name: String = activeDataTab + object ActiveDataSourceParameter + extends OptionalQueryParamDecoderMatcher[String](activeDataSource) { + val name: String = activeDataSource } /** Parameter expected to contain a valid identifier of the source of the data sent by the client (currently raw data, a URL or a file) * in schema-related operations */ - object ActiveSchemaTabParameter - extends OptionalQueryParamDecoderMatcher[String](activeSchemaTab) { - val name: String = activeSchemaTab + object ActiveSchemaSourceParameter + extends OptionalQueryParamDecoderMatcher[String](activeSchemaSource) { + val name: String = activeSchemaSource } /** Parameter expected to contain a valid identifier of the source of the data sent by the client (currently raw data, a URL or a file) * in shapemap-related operations */ - object ActiveShapeMapTabParameter - extends OptionalQueryParamDecoderMatcher[String](activeShapeMapTab) { - val name: String = activeShapeMapTab + object ActiveShapeSourceTabParameter + extends OptionalQueryParamDecoderMatcher[String](activeShapeMapSource) { + val name: String = activeShapeMapSource } /** Parameter expected to contain a valid identifier of the source of the data sent by the client (currently raw data, a URL or a file) * in query-related operations */ - object ActiveQueryTabParameter - extends OptionalQueryParamDecoderMatcher[String](activeQueryTab) { - val name: String = activeQueryTab + object ActiveQuerySourceParameter + extends OptionalQueryParamDecoderMatcher[String](activeQuerySource) { + val name: String = activeQuerySource } /** Parameter expected to contain a valid identifier/name/label of a wikidata entity From f75e13a0c44c335d386741f4b86735239bc01a6b Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Tue, 28 Sep 2021 18:54:50 +0200 Subject: [PATCH 23/32] Refactor datatypes. --- .../rdfshape/server/api/format/Format.scala | 20 + .../server/api/merged/ActiveDataTab.scala | 69 --- .../server/api/merged/CompoundData.scala | 66 --- .../server/api/merged/DataElement.scala | 181 -------- .../server/api/routes/data/logic/Data.scala | 432 ------------------ .../routes/data/logic/DataOperations.scala | 30 -- .../api/routes/data/logic/DataSource.scala | 3 + .../{ => operations}/DataConversion.scala | 6 +- .../logic/{ => operations}/DataExtract.scala | 0 .../logic/{ => operations}/DataInfo.scala | 0 .../logic/operations/DataOperations.scala | 3 + .../api/routes/data/logic/types/Data.scala | 88 ++++ .../data/logic/types/EndpointData.scala | 119 +++++ .../routes/data/logic/types/SimpleData.scala | 310 +++++++++++++ .../logic/types/merged/CompoundData.scala | 122 +++++ .../logic/types}/merged/MergedModels.scala | 6 +- .../api/routes/data/service/DataService.scala | 21 +- .../logic/{ => query}/SparqlQuery.scala | 41 +- .../logic/query/SparqlQuerySource.scala | 16 + .../endpoint/service/EndpointService.scala | 9 +- .../api/routes/schema/logic/Schema.scala | 18 +- .../schema/logic/SchemaOperations.scala | 4 +- .../routes/schema/service/SchemaService.scala | 4 +- .../api/routes/shapemap/logic/ShapeMap.scala | 142 +++--- .../shapemap/logic/ShapeMapSource.scala | 16 + .../rdfshape/server/html2rdf/HTML2RDF.scala | 4 +- 26 files changed, 809 insertions(+), 921 deletions(-) delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/ActiveDataTab.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/CompoundData.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/Data.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataOperations.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataSource.scala rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/{ => operations}/DataConversion.scala (97%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/{ => operations}/DataExtract.scala (100%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/{ => operations}/DataInfo.scala (100%) create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataOperations.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/Data.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/EndpointData.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/SimpleData.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/CompoundData.scala rename modules/server/src/main/scala/es/weso/rdfshape/server/api/{ => routes/data/logic/types}/merged/MergedModels.scala (99%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/{ => query}/SparqlQuery.scala (68%) create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/query/SparqlQuerySource.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMapSource.scala diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala index cf8035c7..b23cf9c1 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala @@ -4,6 +4,7 @@ import cats.effect.IO import com.typesafe.scalalogging.LazyLogging import es.weso.rdfshape.server.api.format.dataFormats.DataFormat import es.weso.rdfshape.server.api.utils.parameters.PartsMap +import io.circe.{Decoder, Encoder, HCursor, Json} import org.http4s.MediaType /** Generic interface for any format any data transmitted to/from the API may have @@ -33,6 +34,7 @@ object Format extends FormatCompanion[Format] { } /** Static utilities to be used with formats + * * @tparam F Specific format type that we are handling */ trait FormatCompanion[F <: Format] extends LazyLogging { @@ -45,6 +47,24 @@ trait FormatCompanion[F <: Format] extends LazyLogging { */ val availableFormats: List[F] + implicit val encodeFormat: Encoder[F] = (format: F) => { + Json.obj( + ("name", Json.fromString(format.name)), + ( + "mimeType", + Json.fromString( + s"${format.mimeType.mainType}/${format.mimeType.subType}" + ) + ) + ) + } + + implicit val decodeFormat: Decoder[F] = (cursor: HCursor) => + for { + formatStr <- cursor.downField("name").as[String] + format: F = fromString(formatStr).toOption.getOrElse(defaultFormat) + } yield format + /** Try to build a Format object from a request's parameters * * @param parameter Name of the parameter with the format name diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/ActiveDataTab.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/ActiveDataTab.scala deleted file mode 100644 index 623b754b..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/ActiveDataTab.scala +++ /dev/null @@ -1,69 +0,0 @@ -package es.weso.rdfshape.server.api.merged - -import com.typesafe.scalalogging.LazyLogging - -/** Abstract representation of the active tab in the client that sent the request. - * Used to distinguish whether the submitted text, URL or file should be prioritized in case several are present. - */ -sealed abstract class ActiveDataTab { - - /** Unique identifier of the active tab - */ - val id: String -} - -/** Indicates the client uploaded RDF data as raw text - */ -case object DataTextArea extends ActiveDataTab { - override val id = "#dataTextArea" -} - -/** Indicates the client uploaded RDF data by indicating the URL where it lives - */ -case object DataUrl extends ActiveDataTab { - override val id = "#dataUrl" -} - -case object DataEndpoint extends ActiveDataTab { - override val id = "#dataEndpoint" -} - -/** Indicates the client uploaded RDF data by uploading a file containing it - */ -case object DataFile extends ActiveDataTab { - override val id = "#dataFile" -} - -object ActiveDataTab extends LazyLogging { - - /** Default value to use if none is present - */ - lazy val default: ActiveDataTab = dataTabValues.head - - /** All possible values the DataTab may acquire - */ - private val dataTabValues = - List(DataTextArea, DataUrl, DataFile, DataEndpoint) - - /** Given a tab identifier (name), find and returning the corresponding data tab with that id - * - * @param tabId Id of the Tab to be returned - * @return The corresponding ActiveDataTab if the tabName exists, otherwise an error message - */ - def fromString(tabId: String): Either[String, ActiveDataTab] = { - - dataTabValues.collectFirst { - case value if value.id == tabId => value - } match { - case Some(v) => Right(v) - case None => - val errorMsg = mkErrorMessage(tabId) - logger.error(errorMsg) - Left(errorMsg) - } - } - - private def mkErrorMessage(id: String): String = { - s"Unknown value for activeDataTab: $id. Available values: ${dataTabValues.map(_.id).mkString(",")}" - } -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/CompoundData.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/CompoundData.scala deleted file mode 100644 index 6ef60b3d..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/CompoundData.scala +++ /dev/null @@ -1,66 +0,0 @@ -package es.weso.rdfshape.server.api.merged - -import cats.effect._ -import cats.implicits._ -import es.weso.rdf.RDFReasoner -import es.weso.rdf.jena.RDFAsJenaModel -import es.weso.rdfshape.server.api.merged.DataElement._ -import io.circe._ -import io.circe.parser._ -import io.circe.syntax._ - -/** Data class representing the merge of several RDF data into a single compound - * - * @param elements List of the individual DataElements conforming a CompoundData instance - */ -case class CompoundData(elements: List[DataElement]) { - def toRDF: IO[Resource[IO, RDFReasoner]] = { - val rs = elements.map(_.toRDF).sequence - - def combine( - ls: List[Resource[IO, RDFAsJenaModel]] - ): Resource[IO, List[RDFAsJenaModel]] = ls.sequence - - /** Whole compound value resulting from merging the individual elements - */ - val value = rs.flatMap(lsRs => - IO(combine(lsRs).evalMap(ls => MergedModels.fromList(ls))) - ) - value - } -} - -object CompoundData { - - def fromString(str: String): Either[String, CompoundData] = for { - json <- parse(str).leftMap(pe => - s"CompoundData.fromString: error parsing $str as JSON: $pe" - ) - cd <- json - .as[CompoundData] - .leftMap(de => - s"CompoundData.fromString: error decoding json to compoundData: $de\nJSON obtained: \n${json.spaces2}" - ) - } yield cd - - /** Encoder used to transform CompoundData instances to JSON values - */ - implicit val encodeCompoundData: Encoder[CompoundData] = - (a: CompoundData) => Json.fromValues(a.elements.map(_.asJson)) - - /** Decoder used to extract CompoundData instances from JSON values - */ - implicit val decodeCompoundData: Decoder[CompoundData] = - (cursor: HCursor) => { - cursor.values match { - case None => - DecodingFailure("Empty list for compound data", List()) - .asLeft[CompoundData] - case Some(vs) => - val xs: Decoder.Result[List[DataElement]] = - vs.toList.map(_.as[DataElement]).sequence - xs.map(CompoundData(_)) - } - } - -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala deleted file mode 100644 index 88752ec9..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/DataElement.scala +++ /dev/null @@ -1,181 +0,0 @@ -package es.weso.rdfshape.server.api.merged - -import cats.effect._ -import cats.implicits._ -import com.typesafe.scalalogging.LazyLogging -import es.weso.rdf.jena.RDFAsJenaModel -import es.weso.rdfshape.server.api.definitions.ApiDefaults -import es.weso.rdfshape.server.api.format.dataFormats.DataFormat -import io.circe._ - -/** Represent each chunk of RDF data submitted (mainly on RDF-merging operations) - * - * @param data Raw RDF data (plain text) - * @param dataUrl URL containing the RDF data - * @param endpoint RDF data endpoint to use - * @param dataFile File containing the RDF data - * @param dataFormat Format of the RDF data - * @param activeDataTab Active tab in the client's view, used to choose which RDF source should be read - */ -case class DataElement( - data: Option[String], - dataUrl: Option[String], - dataFile: Option[String], - endpoint: Option[String], - dataFormat: DataFormat, - activeDataTab: ActiveDataTab -) extends LazyLogging { - - /** Given an RDF source of data sent by a client, try to parse it and get the RDF model representation - * - * @return RDF (Jena) model of RDF data received from a client - * @note Iteratively compares the different possible values of activeDataTab against the one the client attached to decide an extracting strategy - */ - def toRDF: IO[Resource[IO, RDFAsJenaModel]] = activeDataTab match { - - case DataTextArea => - for { - rdf <- RDFAsJenaModel.fromString( - data.getOrElse(""), - dataFormat.name, - None, - useBNodeLabels = false - ) - } yield rdf - - case DataUrl => - for { - rdf <- RDFAsJenaModel.fromURI( - dataUrl.getOrElse(""), - dataFormat.name, - None - ) - } yield rdf - - case _ => - logger.error(s"Data element error") - IO.raiseError( - new RuntimeException( - s"Not implemented yet compound with activeTab: $activeDataTab" - ) - ) - } -} - -object DataElement extends LazyLogging { - - /** Empty and most basic data element - */ - val empty: DataElement = DataElement( - data = None, - dataUrl = None, - dataFile = None, - endpoint = None, - ApiDefaults.defaultDataFormat, - ActiveDataTab.default - ) - - /** Encoder used to transform DataElement instances to JSON values - */ - implicit val encodeDataElement: Encoder[DataElement] = - (a: DataElement) => - a.activeDataTab match { - case DataTextArea => - Json.obj( - ("data", Json.fromString(a.data.getOrElse(""))), - ("activeTab", Json.fromString(a.activeDataTab.id)), - ("dataFormat", Json.fromString(a.dataFormat.name)) - ) - case DataUrl => - Json.obj( - ("dataUrl", Json.fromString(a.dataUrl.getOrElse(""))), - ("activeTab", Json.fromString(a.activeDataTab.id)), - ("dataFormat", Json.fromString(a.dataFormat.name)) - ) - case DataFile => - Json.obj( - ("dataFile", Json.fromString(a.dataFile.getOrElse(""))), - ("activeTab", Json.fromString(a.activeDataTab.id)), - ("dataFormat", Json.fromString(a.dataFormat.name)) - ) - case DataEndpoint => - Json.obj( - ("endpoint", Json.fromString(a.endpoint.getOrElse(""))), - ("activeTab", Json.fromString(a.activeDataTab.id)), - ("dataFormat", Json.fromString(a.dataFormat.name)) - ) - } - - /** Decoder used to extract DataElement instances from JSON values - */ - implicit val decodeDataElement: Decoder[DataElement] = - new Decoder[DataElement] { - final def apply(cursor: HCursor): Decoder.Result[DataElement] = { - for { - dataActiveTab <- parseActiveTab(cursor) - dataFormat <- parseDataFormat(cursor) - base = DataElement.empty.copy( - dataFormat = dataFormat, - activeDataTab = dataActiveTab - ) - rest <- dataActiveTab match { - case DataTextArea => - logger.debug("Data element decoder - DataTextArea") - for { - data <- cursor.downField("data").as[String] - } yield base.copy(data = Some(data)) - case DataFile => - logger.debug("Data element decoder - DataFile") - /* TODO: either send the file text through the request (bad idea) - * or decode the file appropriately */ - logger.debug(cursor.downField("dataFile").toString) - for { - dataFile <- cursor.downField("dataFile").as[String] - } yield base.copy(dataFile = Some(dataFile)) - case DataUrl => - logger.debug("Data element decoder - DataUrl") - for { - dataUrl <- cursor.downField("dataUrl").as[String] - } yield base.copy(dataUrl = Some(dataUrl)) - case DataEndpoint => - logger.debug("Data element decoder - DataEndpoint") - for { - endpoint <- cursor.downField("endpoint").as[String] - } yield base.copy(endpoint = Some(endpoint)) - } - } yield rest - } - - /** @param cursor Cursor to operate JSON abstractions - * @return The ActiveDataTab specified in a JSON encoded DataElement - */ - private def parseActiveTab( - cursor: HCursor - ): Decoder.Result[ActiveDataTab] = { - for { - activeTabId <- cursor.downField("activeTab").as[String] orElse Right( - ActiveDataTab.default.id - ) - a <- ActiveDataTab - .fromString(activeTabId) - .leftMap(DecodingFailure(_, List())) - } yield a - } - - /** @param cursor Cursor to operate JSON abstractions - * @return The DataFormat specified in a JSON encoded DataElement - */ - private def parseDataFormat(cursor: HCursor): Decoder.Result[DataFormat] = - for { - dataFormatStr <- cursor - .downField("dataFormat") - .as[String] - .orElse(Right(ApiDefaults.defaultDataFormat.name)) - dataFormat <- DataFormat - .fromString(dataFormatStr) - .leftMap(s => - DecodingFailure(s"Non supported dataFormat: $s", List()) - ) - } yield dataFormat - } -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/Data.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/Data.scala deleted file mode 100644 index 0127f118..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/Data.scala +++ /dev/null @@ -1,432 +0,0 @@ -package es.weso.rdfshape.server.api.routes.data.logic - -import cats.effect._ -import cats.implicits._ -import com.typesafe.scalalogging.LazyLogging -import es.weso.rdf.jena._ -import es.weso.rdf.nodes.IRI -import es.weso.rdf.{InferenceEngine, RDFReasoner} -import es.weso.rdfshape.server.api.format._ -import es.weso.rdfshape.server.api.format.dataFormats.DataFormat -import es.weso.rdfshape.server.api.merged.CompoundData -import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ -import es.weso.rdfshape.server.api.utils.parameters.PartsMap -import es.weso.rdfshape.server.html2rdf.HTML2RDF -import es.weso.rdfshape.server.utils.networking.NetworkingUtils.getUrlContents -import es.weso.utils.IOUtils.err - -import java.net.URI -import scala.util.matching.Regex - -/** Data class representing RDF data and its current source - * - * @param data - * @param dataUrl - * @param dataFile - * @param optEndpoint - * @param optDataFormat Data format - * @param inference Data inference - * @param targetDataFormat Data target format (only for conversion operations) - * @param activeDataTab Active tab, used to know which source the data comes from - * @param compoundData - */ -sealed case class Data( - data: Option[String], - dataUrl: Option[String], - dataFile: Option[String], - optEndpoint: Option[String], - optDataFormat: Option[DataFormat], - inference: Option[String], - targetDataFormat: Option[DataFormat], - activeDataTab: Option[String], - compoundData: Option[String] -) extends LazyLogging { - val dataFormat: DataFormat = optDataFormat.getOrElse( - DataFormat.defaultFormat - ) - - /** get RDF data from data parameters - * - * @return a pair where the first value can be Some(string) - * if it has string representation and the second parameter - * is the resource with the RDF data - */ - def getData( - relativeBase: Option[IRI] - ): IO[(Option[String], Resource[IO, RDFReasoner])] = { - val base = relativeBase.map(_.str) - logger.debug(s"ActiveDataTab: $activeDataTab") - val inputType = activeDataTab match { - case Some(a) => parseDataTab(a) - case None if compoundData.isDefined => Right(compoundDataType) - case None if data.isDefined => Right(dataTextAreaType) - case None if dataUrl.isDefined => Right(dataUrlType) - case None if dataFile.isDefined => Right(dataFileType) - case None if optEndpoint.isDefined => Right(dataEndpointType) - case None => Right(dataTextAreaType) - } - logger.debug(s"Input type: $inputType") - val x: IO[(Option[String], Resource[IO, RDFReasoner])] = inputType match { - - case Right(`compoundDataType`) => - logger.debug(s"Input - compoundDataType: $data") - for { - cd <- IO.fromEither( - CompoundData - .fromString(compoundData.getOrElse("")) - .leftMap(s => new RuntimeException(s)) - ) - res <- cd.toRDF - } yield (None, res) - - case Right(`dataUrlType`) => - logger.debug(s"Input - dataUrlType: $data") - dataUrl match { - case None => err(s"Non value for dataUrl") - case Some(dataUrl) => - for { - rdf <- rdfFromUri( - new URI(dataUrl), - dataFormat, - base - ) - } yield (None, rdf) - } - case Right(`dataFileType`) => - logger.debug(s"Input - dataFileType: $data") - dataFile match { - case None => err(s"No value for dataFile") - case Some(dataStr) => - for { - iriBase <- mkBase(base) - res <- RDFAsJenaModel.fromString( - dataStr, - dataFormat.name, - iriBase - ) - res2 = extendWithInference(res, inference) - } yield (None, res2) - } - - case Right(`dataEndpointType`) => - logger.debug(s"Input - dataEndpointType: $data") - optEndpoint match { - case None => err(s"No value for endpoint") - case Some(endpointUrl) => - for { - endpoint <- Endpoint.fromString(endpointUrl) - // newRdf <- extendWithInference(endpoint, inference) - } yield (None, Resource.pure[IO, RDFReasoner](endpoint)) - } - case Right(`dataTextAreaType`) => - logger.debug(s"Input - dataTextAreaType: $data") - data match { - case None => RDFAsJenaModel.empty.flatMap(e => IO((None, e))) - case d @ Some(data) => - val x: IO[(Option[String], Resource[IO, RDFReasoner])] = for { - res <- rdfFromString(data, dataFormat, base) - res2 = extendWithInference( - res.onFinalize(showFinalize), - inference - ) - } yield (d, res2) - x - } - - case Right(other) => - val msg = s"Unknown value for activeDataTab: $other" - logger.error(msg) - err(msg) - - case Left(msg) => - logger.error(msg) - err(msg) - } - x - } - - def parseDataTab(tab: String): Either[String, DataInputType] = { - logger.debug(s"parseDataTab: tab = $tab") - val inputTypes = - List(dataUrlType, dataFileType, dataEndpointType, dataTextAreaType) - inputTypes.find(_.id == tab) match { - case Some(x) => Right(x) - case None => - Left( - s"Wrong value of tab: $tab, must be one of [${inputTypes.map(_.id).mkString(",")}]" - ) - } - } - - private def showFinalize: IO[Unit] = IO { - logger.debug("Closing RDF data") - } - - private def rdfFromString( - str: String, - format: Format, - base: Option[String] - ): IO[Resource[IO, RDFReasoner]] = { - logger.debug(s"RDF from string with format: $format") - format.name match { - case formatName if HTML2RDF.availableExtractorNames contains formatName => - IO( - HTML2RDF.extractFromString(str, formatName) - ) /*for { - eitherRdf <- - } yield eitherRdf */ - case _ => - for { - baseIri <- mkBase(base) - res <- RDFAsJenaModel.fromChars(str, format.name, baseIri) - } yield res - } - } - - private def mkBase(base: Option[String]): IO[Option[IRI]] = base match { - case None => IO(None) - case Some(str) => - IRI - .fromString(str) - .fold( - e => IO.raiseError(new RuntimeException(s"Cannot get IRI from $str")), - (iri: IRI) => IO(Some(iri)) - ) - } - - private def rdfFromUri( - uri: URI, - format: Format, - base: Option[String] - ): IO[Resource[IO, RDFReasoner]] = { - - getUrlContents(uri.toString) match { - case Left(errMsg) => IO.raiseError(new RuntimeException(errMsg)) - case _ => - format.name.toLowerCase match { - case formatName - if HTML2RDF.availableExtractorNames contains formatName => - IO( - HTML2RDF.extractFromUrl( - uri.toString, - formatName - ) - ) - case _ => - for { - baseIri <- mkBase(base) - res <- RDFAsJenaModel.fromURI(uri.toString, format.name, baseIri) - } yield res - } - - } - - } - - private def applyInference( - rdf: Resource[IO, RDFReasoner], - inference: Option[String], - dataFormat: Format - ): Resource[IO, RDFReasoner] = - extendWithInference(rdf, inference) - - private def extendWithInference( - resourceRdf: Resource[IO, RDFReasoner], - optInference: Option[String] - ): Resource[IO, RDFReasoner] = { - logger.debug(s"Applying inference $optInference") - optInference match { - case None => resourceRdf - case Some(str) => - InferenceEngine.fromString(str) match { - case Right(engine) => - resourceRdf.evalMap(rdf => rdf.applyInference(engine)) - case Left(err) => - // TODO: Check how to invoke using Resource.raiseError... - throw new RuntimeException( - s"Error parsing inference engine param ($str): $err" - ) - } - - } - } - - private def mkBaseIri( - maybeBase: Option[String] - ): Either[String, Option[IRI]] = maybeBase match { - case None => Right(None) - case Some(str) => IRI.fromString(str).map(Some(_)) - } - - sealed abstract class DataInputType { - val id: String - } - - case object dataUrlType extends DataInputType { - override val id = "#dataUrl" - } - - case object dataFileType extends DataInputType { - override val id = "#dataFile" - } - - case object dataEndpointType extends DataInputType { - override val id = "#dataEndpoint" - } - - case object dataTextAreaType extends DataInputType { - override val id = "#dataTextArea" - } - - case object compoundDataType extends DataInputType { - override val id = "#compoundData" - } - -} - -private[api] object Data extends LazyLogging { - - /** Regular expressions used for identifying if a custom endpoint was given for this data sample - */ - private val endpointRegex: Regex = "Endpoint: (.+)".r - - def mkData( - partsMap: PartsMap, - relativeBase: Option[IRI] - ): IO[(Resource[IO, RDFReasoner], Data)] = { - - val r: IO[(Resource[IO, RDFReasoner], Data)] = for { - data <- mkData(partsMap) - pair <- data.getData(relativeBase) - } yield { - val (optStr, rdf) = pair - (rdf, data.copy(data = optStr)) - } - r - } - - def mkData(partsMap: PartsMap): IO[Data] = for { - data <- partsMap.optPartValue(DataParameter.name) - dataUrl <- partsMap.optPartValue(DataUrlParameter.name) - dataFile <- partsMap.optPartValue(DataFileParameter.name) - compoundData <- partsMap.optPartValue(CompoundDataParameter.name) - endpoint <- partsMap.optPartValue(EndpointParameter.name) - dataFormat <- DataFormat.fromRequestParams( - DataFormatParameter.name, - partsMap - ) - inference <- partsMap.optPartValue(InferenceParameter.name) - targetDataFormat <- DataFormat.fromRequestParams( - TargetDataFormatParameter.name, - partsMap - ) - activeDataTab <- partsMap.optPartValue(ActiveDataSourceParameter.name) - } yield { - - val finalEndpoint = getEndpoint(endpoint) - - val finalActiveDataTab = activeDataTab - logger.debug(s"Final endpoint: $finalEndpoint") - - val dp = Data( - data, - dataUrl, - dataFile, - finalEndpoint, - dataFormat, - inference, - targetDataFormat, - finalActiveDataTab, - compoundData - ) - dp - } - - // def mkData(partsMap: PartsMap): IO[Data] = for { - // data <- partsMap.optPartValue(DataParameter.name) - // compoundData <- partsMap.optPartValue(CompoundDataParameter.name) - // dataUrl <- partsMap.optPartValue(DataURLParameter.name) - // dataFile <- partsMap.optPartValue(DataFileParameter.name) - // endpoint <- partsMap.optPartValue(EndpointParameter.name) - // dataFormat <- DataFormat.fromRequestParams( - // DataFormatParameter.name, - // partsMap - // ) - // inference <- partsMap.optPartValue(InferenceParameter.name) - // targetDataFormat <- DataFormat.fromRequestParams( - // TargetDataFormatParameter.name, - // partsMap - // ) - // activeDataTab <- partsMap.optPartValue(ActiveDataTabParameter.name) - // } yield { - // - // val finalEndpoint = getEndpoint(endpoint) - // - // val finalActiveDataTab = activeDataTab - // logger.debug(s"Final endpoint: $finalEndpoint") - // - // val dp = Data( - // data, - // dataUrl, - // dataFile, - // finalEndpoint, - // dataFormat, - // inference, - // targetDataFormat, - // finalActiveDataTab, - // compoundData - // ) - // dp - // } - - /** @param endpointStr String containing the endpoint - * @param endpointRegex Regex used to look for the endpoint in the string - * @return Optionally, the endpoint contained in a given data string - */ - private def getEndpoint( - endpointStr: Option[String], - endpointRegex: Regex = endpointRegex - ): Option[String] = { - endpointStr match { - case None => None - case Some(endpoint) => - endpoint match { - case endpointRegex(endpoint) => Some(endpoint) - case _ => None - } - - } - } - - /** @return Empty data representation, with no inner data and all defaults to none - */ - def empty: Data = - Data( - data = None, - dataUrl = None, - dataFile = None, - optEndpoint = None, - optDataFormat = None, - inference = None, - targetDataFormat = None, - activeDataTab = None, - compoundData = None - ) -} - -/** Enumeration of the different possible Data sources sent by the client. - * The source sent indicates the API if the schema was sent in raw text, as a URL - * to be fetched or as a text file containing the schema. - * In case the client submits the data in several formats, the selected source will indicate the preferred one. - */ -private[logic] object DataSource extends Enumeration { - type DataTab = String - - val TEXT = "#dataTextArea" - val URL = "#dataUrl" - val FILE = "#dataFile" - val COMPOUND = "#compoundData" - val ENDPOINT = "#dataEndpoint" - - val defaultActiveShapeMapTab: DataTab = TEXT -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataOperations.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataOperations.scala deleted file mode 100644 index f7700791..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataOperations.scala +++ /dev/null @@ -1,30 +0,0 @@ -package es.weso.rdfshape.server.api.routes.data.logic - -import com.typesafe.scalalogging.LazyLogging -import es.weso.rdf.PrefixMap -import es.weso.schema.DataFormats -import io.circe.Json - -/** Static utilities used by the {@link es.weso.rdfshape.server.api.routes.data.service.DataService} - * to operate on RDF data - */ -private[api] object DataOperations extends LazyLogging { - - /** @param df Data format - * @return The given data format or the default one in case none was provided - */ - def dataFormatOrDefault(df: Option[String]): String = - df.getOrElse(DataFormats.defaultFormatName) - - /** Convert a given prefix map to JSON format for API operations - * - * @param prefixMap Input prefix map - * @return JSON representation of the prefix map - */ - private[api] def prefixMap2Json(prefixMap: PrefixMap): Json = { - Json.fromFields(prefixMap.pm.map { case (prefix, iri) => - (prefix.str, Json.fromString(iri.getLexicalForm)) - }) - } - -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataSource.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataSource.scala new file mode 100644 index 00000000..24d209c7 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataSource.scala @@ -0,0 +1,3 @@ +package es.weso.rdfshape.server.api.routes.data.logic object DataSource { + +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConversion.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataConversion.scala similarity index 97% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConversion.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataConversion.scala index b7f649e0..ecd43f3a 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataConversion.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataConversion.scala @@ -6,7 +6,7 @@ import es.weso.rdf.RDFReasoner import es.weso.rdf.jena.RDFAsJenaModel import es.weso.rdf.sgraph.{RDF2SGraph, RDFDotPreferences} import es.weso.rdfshape.server.api.format.dataFormats.DataFormat -import es.weso.rdfshape.server.api.merged.CompoundData +import es.weso.rdfshape.server.api.routes.data.logic.data.CompoundData import es.weso.rdfshape.server.utils.json.JsonUtils.maybeField import es.weso.utils.IOUtils.{either2io, err} import guru.nidi.graphviz.engine.{Format, Graphviz} @@ -83,9 +83,9 @@ private[api] object DataConversion extends LazyLogging { err(s"dataConvert: no data and no compoundData parameters") case Some(compoundDataStr) => for { - ecd <- either2io(CompoundData.fromString(compoundDataStr)) + ecd <- either2io(CompoundData.fromJsonString(compoundDataStr)) cd <- cnvEither(ecd, str => s"dataConvert: Error: $str") - result <- cd.toRDF.flatMap( + result <- cd.toRdf.flatMap( _.use(rdf => rdfConvert(rdf, None, dataFormat, targetFormat).attempt.map( _.fold(exc => Left(exc.getMessage), dc => Right(dc)) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataExtract.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataExtract.scala similarity index 100% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataExtract.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataExtract.scala diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataInfo.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataInfo.scala similarity index 100% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataInfo.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataInfo.scala diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataOperations.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataOperations.scala new file mode 100644 index 00000000..387bff38 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataOperations.scala @@ -0,0 +1,3 @@ +package es.weso.rdfshape.server.api.routes.data.logic.data.operations object DataOperations { + +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/Data.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/Data.scala new file mode 100644 index 00000000..4be85149 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/Data.scala @@ -0,0 +1,88 @@ +package es.weso.rdfshape.server.api.routes.data.logic.data + +import cats.effect.{IO, Resource} +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf.RDFReasoner +import es.weso.rdf.nodes.IRI +import es.weso.rdfshape.server.api.routes.data.logic.data.DataSource.DataSource +import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters.{ + CompoundDataParameter, + EndpointParameter +} +import es.weso.rdfshape.server.api.utils.parameters.PartsMap +import io.circe.{Decoder, Encoder, HCursor, Json} + +/** Common trait to all data, whichever its nature (single, compound, endpoint...) + */ +trait Data { + + /** Source where the data comes from + */ + val dataSource: DataSource + + /** Given an RDF source of data, try to parse it and get the RDF model representation + * + * @return RDF logical model of the data contained + */ + def toRdf(relativeBase: Option[IRI] = None): IO[Resource[IO, RDFReasoner]] +} + +object Data extends DataCompanion[Data] { + + /** Dummy implementation meant to be overridden + */ + override val emptyData: Data = SimpleData.emptyData + + /** Dummy implementation meant to be overridden + */ + override implicit val encodeData: Encoder[Data] = _ => Json.fromString("") + + /** Dummy implementation meant to be overridden + */ + override implicit val decodeData: Decoder[Data] = (_: HCursor) => + Right(emptyData) + + /** General implementation delegating on subclasses + */ + override def mkData(partsMap: PartsMap): IO[Either[String, Data]] = for { + compoundData <- partsMap.optPartValue(CompoundDataParameter.name) + paramEndpoint <- partsMap.optPartValue(EndpointParameter.name) + + maybeData <- { + // Create one of: Simple Data, Compound Data or Endpoint Data + // 1. Compound data + if(compoundData.isDefined) CompoundData.mkData(partsMap) + // 2. Endpoint data + else if(paramEndpoint.isDefined) EndpointData.mkData(partsMap) + // 3. Simple data or unknown + else SimpleData.mkData(partsMap) + } + + } yield maybeData +} + +/** Static utilities to be used with Data representations + * + * @tparam D Specific data representation to be handled + */ +trait DataCompanion[D <: Data] extends LazyLogging { + + /** Empty instance of the data representation in use + */ + val emptyData: D + + /** Encoder used to transform Data instances to JSON values + */ + implicit val encodeData: Encoder[D] + + /** Decoder used to extract Data instances from JSON values + */ + implicit val decodeData: Decoder[D] + + /** Given a request's parameters, try to extract an instance of Data (type D) from them + * + * @param partsMap Request's parameters + * @return Either the Data instance or an error message + */ + def mkData(partsMap: PartsMap): IO[Either[String, D]] +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/EndpointData.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/EndpointData.scala new file mode 100644 index 00000000..2c0a9897 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/EndpointData.scala @@ -0,0 +1,119 @@ +package es.weso.rdfshape.server.api.routes.data.logic.data + +import cats.effect.{IO, Resource} +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf.jena.RDFAsJenaModel +import es.weso.rdf.nodes.IRI +import es.weso.rdfshape.server.api.definitions.ApiDefaults +import es.weso.rdfshape.server.api.format.dataFormats.DataFormat +import es.weso.rdfshape.server.api.routes.data.logic.data.DataSource.DataSource +import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters.{ + DataFormatParameter, + EndpointParameter +} +import es.weso.rdfshape.server.api.utils.parameters.PartsMap +import io.circe.syntax.EncoderOps +import io.circe.{Decoder, Encoder, HCursor, Json} + +import scala.util.matching.Regex + +/** RDF data obtained from a given endpoint + * + * @param endpoint IRI with the RDF data + * @param dataFormat Data format + */ +case class EndpointData( + endpoint: IRI, + dataFormat: DataFormat = DataFormat.defaultFormat +) extends Data + with LazyLogging { + + override val dataSource: DataSource = DataSource.ENDPOINT + + override def toRdf( + relativeBase: Option[IRI] + ): IO[Resource[IO, RDFAsJenaModel]] = { + RDFAsJenaModel.fromIRI(this.endpoint, dataFormat.name, relativeBase) + } +} + +private[api] object EndpointData extends DataCompanion[EndpointData] { + override lazy val emptyData: EndpointData = EndpointData(IRI(defaultIri)) + private val defaultIri = "http://www.example.org" + + /** Regular expressions used for identifying if a custom endpoint was given for this data sample + */ + private val endpointRegex: Regex = "Endpoint: (.+)".r + + override implicit val encodeData: Encoder[EndpointData] = + (data: EndpointData) => + Json.obj( + ("endpoint", Json.fromString(data.endpoint.str)), + ("source", Json.fromString(DataSource.ENDPOINT)), + ("format", data.dataFormat.asJson) + ) + + override implicit val decodeData: Decoder[EndpointData] = (cursor: HCursor) => + { + for { + endpoint <- cursor.downField("endpoint").as[String] + + dataFormat <- cursor + .downField("format") + .as[DataFormat] + .orElse(Right(ApiDefaults.defaultDataFormat)) + + base = EndpointData.emptyData.copy( + endpoint = IRI.fromString(endpoint).getOrElse(defaultIri), + dataFormat = dataFormat + ) + + } yield base + } + + override def mkData(partsMap: PartsMap): IO[Either[String, EndpointData]] = + for { + // Parse params + paramEndpoint <- partsMap.optPartValue(EndpointParameter.name) + paramFormat <- DataFormat.fromRequestParams( + DataFormatParameter.name, + partsMap + ) + + // Get final endpoint and format + endpoint = getEndpoint(paramEndpoint) + format = paramFormat.getOrElse(ApiDefaults.defaultDataFormat) + + // Try to create data + maybeData: Either[String, EndpointData] = // 2. Endpoint data + if(endpoint.isDefined) { + logger.debug(s"RDF Data received - Endpoint Data: ${endpoint.get}") + IRI + .fromString(endpoint.get) + .fold( + err => Left(s"Could not read endpoint data: $err"), + iri => Right(EndpointData(iri, format)) + ) + + } else Left("No endpoint provided") + } yield maybeData + + /** @param endpointStr String containing the endpoint + * @param endpointRegex Regex used to look for the endpoint in the string + * @return Optionally, the endpoint contained in a given data string + */ + private def getEndpoint( + endpointStr: Option[String], + endpointRegex: Regex = endpointRegex + ): Option[String] = { + endpointStr match { + case None => None + case Some(endpoint) => + endpoint match { + case endpointRegex(endpoint) => Some(endpoint) + case _ => None + } + + } + } +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/SimpleData.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/SimpleData.scala new file mode 100644 index 00000000..8ed45eaf --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/SimpleData.scala @@ -0,0 +1,310 @@ +package es.weso.rdfshape.server.api.routes.data.logic.data + +import cats.effect._ +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf.jena._ +import es.weso.rdf.nodes.IRI +import es.weso.rdf.{InferenceEngine, NONE, RDFReasoner} +import es.weso.rdfshape.server.api.definitions.ApiDefaults +import es.weso.rdfshape.server.api.format.dataFormats.DataFormat +import es.weso.rdfshape.server.api.routes.data.logic.data.DataSource.DataSource +import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ +import es.weso.rdfshape.server.api.utils.parameters.PartsMap +import es.weso.rdfshape.server.html2rdf.HTML2RDF +import es.weso.rdfshape.server.utils.networking.NetworkingUtils.getUrlContents +import es.weso.utils.IOUtils.err +import io.circe._ +import io.circe.syntax.EncoderOps + +/** Data class representing a single RDF data instance with its current format and source + * + * @note Invalid initial data is accepted, but may cause errors when operating with it. + * @param dataRaw RDF data raw text + * @param optEndpoint TODO: remove eventually. Optionally, a data endpoint + * @param dataFormat Data format + * @param inference Data inference + * @param targetDataFormat Data target format (only for conversion operations) + * @param activeDataSource Active source, used to know which source the data comes from + */ +sealed case class SimpleData( + dataRaw: String, + optEndpoint: Option[String], + dataFormat: DataFormat, + inference: InferenceEngine, + targetDataFormat: Option[DataFormat], + activeDataSource: DataSource +) extends Data + with LazyLogging { + + override val dataSource: DataSource = activeDataSource + + /** Given an RDF source of data, try to get the RDF model representation + * + * @return RDF (Jena) model of RDF data received from a client + * @note Iteratively compares the different possible values of activeDataTab against the one the client attached to decide an extracting strategy + */ + override def toRdf( + relativeBase: Option[IRI] = None + ): IO[Resource[IO, RDFAsJenaModel]] = { + + if(dataRaw.isBlank) + RDFAsJenaModel.empty.flatMap(e => IO(e)) + else + for { + rdf <- rdfFromString(dataRaw, dataFormat, relativeBase.map(_.str)) + result = rdf.evalMap(rdf => rdf.applyInference(inference)) + } yield result + } + + /** @param data RDF data as a raw string + * @param format RDF data format + * @param base Base + * @return An RDF model extracted from the input data + */ + private def rdfFromString( + data: String, + format: DataFormat, + base: Option[String] + ): IO[Resource[IO, RDFAsJenaModel]] = { + logger.debug(s"RDF from string with format: $format") + val formatName = format.name + if(HTML2RDF.availableExtractorNames contains formatName) + IO(HTML2RDF.extractFromString(data, formatName)) + else + for { + baseIri <- mkBase(base) + res <- RDFAsJenaModel.fromChars(data, format.name, baseIri) + } yield res + + } + + /** @param base RDF base + * @return For a given base, an IRI representing it + */ + private def mkBase(base: Option[String]): IO[Option[IRI]] = base match { + case None => IO(None) + case Some(str) => + IRI + .fromString(str) + .fold( + _ => + IO.raiseError(new RuntimeException(s"Could not get IRI from $str")), + (iri: IRI) => IO(Some(iri)) + ) + } + + // def toRdfJena: IO[Resource[IO, RDFAsJenaModel]] = { + // for { + // rdf <- RDFAsJenaModel.fromString( + // dataRaw, + // dataFormat.name, + // None, + /* useBNodeLabels = if(activeDataSource != DataSource.URL) false else true */ + // ) + // } yield rdf + // } + + /** Get RDF data from data parameters + * + * @return The resource capable of reading the RDF data + */ + def getRdfResource( + relativeBase: Option[IRI] + ): IO[Resource[IO, RDFReasoner]] = { + val base = relativeBase.map(_.str) + + val x: IO[Resource[IO, RDFReasoner]] = + activeDataSource match { + + case DataSource.TEXT | DataSource.URL | DataSource.FILE => + logger.debug(s"Input - $activeDataSource: $dataRaw") + if(dataRaw.isBlank) + RDFAsJenaModel.empty.flatMap(e => IO(e)) + else + for { + rdf <- rdfFromString(dataRaw, dataFormat, base) + result = rdf.evalMap(rdf => rdf.applyInference(inference)) + } yield result + + case other => + val msg = s"Unknown value for data source: $other" + logger.error(msg) + err(msg) + } + x + } + +} + +private[api] object SimpleData + extends DataCompanion[SimpleData] + with LazyLogging { + + /** Empty data representation, with no inner data and all defaults to none + */ + override lazy val emptyData: SimpleData = + SimpleData( + dataRaw = emptyDataValue, + optEndpoint = None, + dataFormat = DataFormat.defaultFormat, + inference = NONE, + targetDataFormat = None, + activeDataSource = DataSource.defaultActiveDataSource + ) + + /** Placeholder value used for the raw data whenever an empty data is issued/needed. + */ + val emptyDataValue = "" + + /** Auxiliar encoder for data inference + */ + private implicit val encodeInference: Encoder[InferenceEngine] = + (inference: InferenceEngine) => { + Json.obj(("name", Json.fromString(inference.name))) + } + + /** Auxiliar decoder for data inference + */ + private implicit val decodeInference: Decoder[InferenceEngine] = + (cursor: HCursor) => + for { + inferenceName <- cursor.downField("name").as[String] + inference = InferenceEngine + .fromString(inferenceName) + .toOption + .getOrElse(NONE) + } yield inference + + override implicit val encodeData: Encoder[SimpleData] = (data: SimpleData) => + { + Json.obj( + ("data", Json.fromString(data.dataRaw)), + ("source", Json.fromString(data.activeDataSource)), + ("format", data.dataFormat.asJson), + ("targetFormat", data.targetDataFormat.asJson), + ("inference", data.inference.asJson) + ) + + } + override implicit val decodeData: Decoder[SimpleData] = + (cursor: HCursor) => { + for { + data <- cursor.downField("data").as[String] + + dataFormat <- cursor + .downField("format") + .as[DataFormat] + .orElse(Right(ApiDefaults.defaultDataFormat)) + + targetDataFormat <- cursor + .downField("targetFormat") + .as[Option[DataFormat]] + + dataInference <- + cursor + .downField("inference") + .as[Option[InferenceEngine]] + + dataSource <- cursor + .downField("source") + .as[DataSource] + .orElse(Right(DataSource.defaultActiveDataSource)) + + base = SimpleData.emptyData.copy( + dataRaw = data, + dataFormat = dataFormat, + targetDataFormat = targetDataFormat, + activeDataSource = dataSource, + inference = dataInference.getOrElse(NONE) + ) + + } yield base + } + + override def mkData(partsMap: PartsMap): IO[Either[String, SimpleData]] = + for { + dataStr <- partsMap.optPartValue(DataParameter.name) + dataUrl <- partsMap.optPartValue(DataUrlParameter.name) + dataFile <- partsMap.optPartValue(DataFileParameter.name) + paramFormat <- DataFormat.fromRequestParams( + DataFormatParameter.name, + partsMap + ) + paramInference <- partsMap.optPartValue(InferenceParameter.name) + targetDataFormat <- DataFormat.fromRequestParams( + TargetDataFormatParameter.name, + partsMap + ) + paramDataSource <- partsMap.optPartValue(ActiveDataSourceParameter.name) + + // Confirm final format and inference + inference = getInference(paramInference).getOrElse(NONE) + format = paramFormat.getOrElse(ApiDefaults.defaultDataFormat) + + // Check the client's selected source + dataSource = paramDataSource.getOrElse(DataSource.defaultActiveDataSource) + _ = logger.debug(s"RDF Data received - Source: $dataSource") + + // Base for the result + base = SimpleData.emptyData.copy( + dataFormat = format, + inference = inference, + targetDataFormat = targetDataFormat + ) + + // Create the data + maybeData: Either[String, SimpleData] = dataSource match { + case DataSource.TEXT => + dataStr match { + case None => Left("No value for the data string") + case Some(dataRaw) => + Right( + base.copy( + dataRaw = dataRaw.trim, + activeDataSource = DataSource.TEXT + ) + ) + } + case DataSource.URL => + dataUrl match { + case None => Left("No value for the data url") + case Some(url) => + getUrlContents(url) match { + case Right(dataRaw) => + Right( + base.copy( + dataRaw = dataRaw.trim, + activeDataSource = DataSource.URL + ) + ) + case Left(err) => Left(s"Could not read data: $err") + } + } + case DataSource.FILE => + dataFile match { + case None => Left("No value for the data file") + case Some(dataRaw) => + Right( + base.copy( + dataRaw = dataRaw.trim, + activeDataSource = DataSource.FILE + ) + ) + } + case other => + val msg = s"Unknown data source: $other" + logger.warn(msg) + Left(msg) + } + } yield maybeData + + /** @param inferenceStr String representing the inference value + * @return Optionally, the inference contained in a given data string + */ + private def getInference( + inferenceStr: Option[String] + ): Option[InferenceEngine] = { + inferenceStr.flatMap(InferenceEngine.fromString(_).toOption) + } + +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/CompoundData.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/CompoundData.scala new file mode 100644 index 00000000..1b196a15 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/CompoundData.scala @@ -0,0 +1,122 @@ +package es.weso.rdfshape.server.api.routes.data.logic.data.merged + +import cats.effect._ +import cats.implicits._ +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf.RDFReasoner +import es.weso.rdf.jena.RDFAsJenaModel +import es.weso.rdf.nodes.IRI +import es.weso.rdfshape.server.api.routes.data.logic.data.DataSource.DataSource +import es.weso.rdfshape.server.api.routes.data.logic.data._ +import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters.CompoundDataParameter +import es.weso.rdfshape.server.api.utils.parameters.PartsMap +import io.circe._ +import io.circe.parser._ +import io.circe.syntax._ + +/** Data class representing the merge of several RDF data into a single compound + * + * @param elements List of the individual({@linkplain es.weso.rdfshape.server.api.routes.data.logic.data.SimpleData SimpleData}) conforming a CompoundData instance + */ +case class CompoundData(elements: List[Data]) extends Data with LazyLogging { + + override val dataSource: DataSource = DataSource.COMPOUND + + /** @return RDF logical model of the data contained in the compound + */ + override def toRdf( + relativeBase: Option[IRI] = None + ): IO[Resource[IO, RDFReasoner]] = { + val jenaModels = getJenaModels.sequence + + // Whole compound value resulting from merging the individual elements + val value = jenaModels.flatMap(lsRs => + IO(lsRs.sequence.evalMap(ls => MergedModels.fromList(ls))) + ) + value + } + + /** Recursively process the data in the compound to extract all individual RDF Jena models to a single list + * + * @return List of RDF Jena models in each of the elements of the compound + */ + private def getJenaModels: List[IO[Resource[IO, RDFAsJenaModel]]] = { + elements.flatMap { + // Single data: straight extraction + case sd: SimpleData => List(sd.toRdf()) + case ed: EndpointData => List(ed.toRdf()) + case cd: CompoundData => + cd.getJenaModels // Compound data: recursive extraction + } + } + +} + +private[api] object CompoundData + extends DataCompanion[CompoundData] + with LazyLogging { + + override lazy val emptyData: CompoundData = CompoundData(List()) + + override def mkData(partsMap: PartsMap): IO[Either[String, CompoundData]] = + for { + // Parse params + compoundData <- partsMap.optPartValue(CompoundDataParameter.name) + + // Try to create data + maybeData: Either[String, CompoundData] = + if(compoundData.isDefined) { + logger.debug( + s"RDF Data received - Compound Data: ${compoundData.get}" + ) + CompoundData + .fromJsonString(compoundData.get) + .leftMap(err => s"Could not read compound data: $err") + } else Left("No compound data provided") + } yield maybeData + + /** Encoder used to transform CompoundData instances to JSON values + */ + override implicit val encodeData: Encoder[CompoundData] = + (a: CompoundData) => Json.fromValues(a.elements.map(_.asJson)) + + /** Decoder used to extract CompoundData instances from JSON values + */ + override implicit val decodeData: Decoder[CompoundData] = + (cursor: HCursor) => { + cursor.values match { + case None => + DecodingFailure("Empty list for compound data", List()) + .asLeft[CompoundData] + case Some(vs) => + val xs: Decoder.Result[List[Data]] = + vs.toList.map(_.as[Data]).sequence + xs.map(CompoundData(_)) + } + } + + /** Try to build a CompoundData instance from a JSON string + * + * @param jsonStr String in JSON format containing the information to build the CompoundData + * @return Either a new CompoundData instance or an error message + * @note Internally resorts to the decoding method in this class + */ + def fromJsonString(jsonStr: String): Either[String, CompoundData] = for { + json <- parse(jsonStr).leftMap(parseError => + s"CompoundData.fromString: error parsing $jsonStr as JSON: $parseError" + ) + cd <- json + .as[CompoundData] + .leftMap(decodeError => + s"Error decoding json to compoundData: $decodeError\nJSON obtained: \n${json.spaces2}" + ) + } yield cd + + // 1. Compound data + // if(compoundData.isDefined) { + // logger.debug(s"RDF Data received - Compound Data: ${compoundData.get}") + // CompoundData + // .fromJsonString(compoundData.get) + // .leftMap(err => s"Could not read compound data: $err") + // } +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/MergedModels.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/MergedModels.scala similarity index 99% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/MergedModels.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/MergedModels.scala index f1466e1a..a1819ab6 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/merged/MergedModels.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/MergedModels.scala @@ -1,4 +1,4 @@ -package es.weso.rdfshape.server.api.merged +package es.weso.rdfshape.server.api.routes.data.logic.data.merged import _root_.es.weso.rdf._ import _root_.es.weso.rdf.jena.RDFAsJenaModel @@ -141,6 +141,8 @@ case class MergedModels( override def asRDFBuilder: RDFRead[RDFBuilder] = getModel.flatMap(_.asRDFBuilder) + def getModel: IO[RDFAsJenaModel] = mergedModel.get + override def rdfReaderName: String = s"MergedModels" override def sourceIRI: Option[IRI] = None @@ -148,8 +150,6 @@ case class MergedModels( override def hasPredicateWithSubject(n: RDFNode, p: IRI): RDFRead[Boolean] = getModel.flatMap(_.hasPredicateWithSubject(n, p)) - def getModel: IO[RDFAsJenaModel] = mergedModel.get - } /** Static utilities to work with several RDF models diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala index 58287489..1917380d 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala @@ -11,14 +11,15 @@ import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.format.dataFormats.{DataFormat, SchemaFormat} import es.weso.rdfshape.server.api.routes.ApiService +import es.weso.rdfshape.server.api.routes.data.logic.DataConversion import es.weso.rdfshape.server.api.routes.data.logic.DataExtract.dataExtract import es.weso.rdfshape.server.api.routes.data.logic.DataInfo.{ dataInfoFromRdf, dataInfoFromString } import es.weso.rdfshape.server.api.routes.data.logic.DataOperations.dataFormatOrDefault -import es.weso.rdfshape.server.api.routes.data.logic.{Data, DataConversion} -import es.weso.rdfshape.server.api.routes.endpoint.logic.SparqlQuery +import es.weso.rdfshape.server.api.routes.data.logic.data.SimpleData +import es.weso.rdfshape.server.api.routes.endpoint.logic.query.SparqlQuery import es.weso.rdfshape.server.api.utils.OptEitherF._ import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.utils.json.JsonUtils.errorResponseJson @@ -110,10 +111,10 @@ class DataService(client: Client[IO]) req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) for { - dataParam <- Data.mkData(partsMap, relativeBase) - (resourceRdf, dp) = dataParam - dataFormat = dataFormatOrDefault(dp.optDataFormat.map(_.name)) - response <- dp.data match { + dataTuple <- SimpleData.getData(partsMap, relativeBase) + (resourceRdf, simpleData) = dataTuple + dataFormat = dataFormatOrDefault(simpleData.optDataFormat.map(_.name)) + response <- simpleData.data match { case Some(data) => for { result <- dataInfoFromString(data, dataFormat) @@ -126,7 +127,7 @@ class DataService(client: Client[IO]) for { maybeData <- resourceRdf.use(rdf => - dataInfoFromRdf(rdf, None, dp.optDataFormat) + dataInfoFromRdf(rdf, None, simpleData.optDataFormat) ) response <- maybeData match { case Left(err) => errorResponseJson(err, InternalServerError) @@ -160,7 +161,7 @@ class DataService(client: Client[IO]) req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) for { - dataParam <- Data.mkData(partsMap, relativeBase) + dataParam <- SimpleData.getData(partsMap, relativeBase) (resourceRdf, dp) = dataParam targetFormat = dp.targetDataFormat.getOrElse(defaultDataFormat).name dataFormat = dp.optDataFormat.getOrElse(defaultDataFormat) @@ -214,7 +215,7 @@ class DataService(client: Client[IO]) for { /* TODO: an error is thrown on bad query URLs (IO.raise...), but it is * not controlled */ - dataParam <- Data.mkData(partsMap, relativeBase) + dataParam <- SimpleData.getData(partsMap, relativeBase) (resourceRdf, dp) = dataParam maybeQuery <- SparqlQuery.getSparqlQuery(partsMap) @@ -261,7 +262,7 @@ class DataService(client: Client[IO]) req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) for { - maybeData <- Data.mkData(partsMap, relativeBase).attempt + maybeData <- SimpleData.getData(partsMap, relativeBase).attempt schemaEngine <- partsMap.optPartValue("schemaEngine") optSchemaFormatStr <- partsMap.optPartValue("schemaFormat") inference <- partsMap.optPartValue("inference") diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/SparqlQuery.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/query/SparqlQuery.scala similarity index 68% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/SparqlQuery.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/query/SparqlQuery.scala index cb08c8d9..b4c6df91 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/SparqlQuery.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/query/SparqlQuery.scala @@ -1,10 +1,10 @@ -package es.weso.rdfshape.server.api.routes.endpoint.logic +package es.weso.rdfshape.server.api.routes.endpoint.logic.query import cats.effect.IO import com.typesafe.scalalogging.LazyLogging -import es.weso.rdfshape.server.api.routes.endpoint.logic.SparqlQuerySource.{ - SparqlQueryTab, - defaultActiveQueryTab +import es.weso.rdfshape.server.api.routes.endpoint.logic.query.SparqlQuerySource.{ + SparqlQuerySource, + defaultActiveQuerySource } import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters.{ ActiveQuerySourceParameter, @@ -18,11 +18,11 @@ import es.weso.rdfshape.server.utils.networking.NetworkingUtils.getUrlContents /** Data class representing a SPARQL query and its current source * * @param queryRaw Query raw text - * @param activeQueryTab Active tab, used to know which source the query comes from + * @param activeQuerySource Active source, used to know which source the query comes from */ sealed case class SparqlQuery private ( queryRaw: String, - activeQueryTab: SparqlQueryTab + activeQuerySource: SparqlQuerySource ) private[api] object SparqlQuery extends LazyLogging { @@ -60,22 +60,22 @@ private[api] object SparqlQuery extends LazyLogging { /** Create a SparqlQuery instance, given its source and data * - * @param queryStr Optionally, the raw contents of the query - * @param queryUrl Optionally, the URL with the contents of the query - * @param queryFile Optionally, the file with the contents of the query - * @param activeQueryTab Optionally, the indicator of the query source (raw, url or file) + * @param queryStr Optionally, the raw contents of the query + * @param queryUrl Optionally, the URL with the contents of the query + * @param queryFile Optionally, the file with the contents of the query + * @param activeQuerySource Optionally, the indicator of the query source (raw, url or file) * @return */ def mkSparqlQuery( queryStr: Option[String], queryUrl: Option[String], queryFile: Option[String], - activeQueryTab: Option[SparqlQueryTab] + activeQuerySource: Option[SparqlQuerySource] ): Either[String, SparqlQuery] = { // Create the query depending on the client's selected method - val maybeQuery: Either[String, SparqlQuery] = activeQueryTab.getOrElse( - defaultActiveQueryTab + val maybeQuery: Either[String, SparqlQuery] = activeQuerySource.getOrElse( + defaultActiveQuerySource ) match { case SparqlQuerySource.TEXT => queryStr match { @@ -112,18 +112,3 @@ private[api] object SparqlQuery extends LazyLogging { } } - -/** Enumeration of the different possible Query sources by the client. - * The source sent indicates the API if the Query was sent in raw text, as a URL - * to be fetched or as a text file containing the query. - * In case the client submits the query in several formats, the selected source will indicate the one format. - */ -private[logic] object SparqlQuerySource extends Enumeration { - type SparqlQueryTab = String - - val TEXT = "#queryTextArea" - val URL = "#queryUrl" - val FILE = "#queryFile" - - val defaultActiveQueryTab: SparqlQueryTab = TEXT -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/query/SparqlQuerySource.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/query/SparqlQuerySource.scala new file mode 100644 index 00000000..ada122c7 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/query/SparqlQuerySource.scala @@ -0,0 +1,16 @@ +package es.weso.rdfshape.server.api.routes.endpoint.logic.query + +/** Enumeration of the different possible Query sources by the client. + * The source sent indicates the API if the Query was sent in raw text, as a URL + * to be fetched or as a text file containing the query. + * In case the client submits the query in several formats, the selected source will indicate the one format. + */ +private[logic] object SparqlQuerySource extends Enumeration { + type SparqlQuerySource = String + + val TEXT = "#queryTextArea" + val URL = "#queryUrl" + val FILE = "#queryFile" + + val defaultActiveQuerySource: SparqlQuerySource = TEXT +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala index e1815a81..d97bfa05 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala @@ -12,12 +12,9 @@ import es.weso.rdfshape.server.api.routes.endpoint.logic.Endpoint.{ } import es.weso.rdfshape.server.api.routes.endpoint.logic.EndpointStatus._ import es.weso.rdfshape.server.api.routes.endpoint.logic.Outgoing.getOutgoing -import es.weso.rdfshape.server.api.routes.endpoint.logic.SparqlQuery.getSparqlQuery -import es.weso.rdfshape.server.api.routes.endpoint.logic.{ - Endpoint, - Outgoing, - SparqlQuery -} +import es.weso.rdfshape.server.api.routes.endpoint.logic.query.SparqlQuery +import es.weso.rdfshape.server.api.routes.endpoint.logic.query.SparqlQuery.getSparqlQuery +import es.weso.rdfshape.server.api.routes.endpoint.logic.{Endpoint, Outgoing} import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters.{ EndpointParameter, LimitParameter, diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/Schema.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/Schema.scala index c96d3632..8dcafbb7 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/Schema.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/Schema.scala @@ -22,16 +22,16 @@ sealed case class Schema( schemaEngine: Option[String], targetSchemaEngine: Option[String], targetSchemaFormat: Option[String], - activeSchemaTab: Option[String] + activeSchemaSource: Option[String] ) extends LazyLogging { def getSchema( data: Option[RDFReasoner] ): IO[(Option[String], Either[String, SchemaW])] = { - logger.debug(s"activeSchemaTab: $activeSchemaTab") + logger.debug(s"activeSchemaTab: $activeSchemaSource") logger.debug(s"schemaEngine: $schemaEngine") - val inputType = activeSchemaTab match { + val inputType = activeSchemaSource match { case Some(a) => parseSchemaTab(a) case None if schema.isDefined => Right(SchemaTextAreaType) case None if schemaUrl.isDefined => Right(SchemaUrlType) @@ -223,7 +223,9 @@ object Schema extends LazyLogging { targetSchemaFormat <- partsMap.optPartValue( TargetSchemaFormatParameter.name ) - activeSchemaTab <- partsMap.optPartValue(ActiveSchemaSourceParameter.name) + activeSchemaSource <- partsMap.optPartValue( + ActiveSchemaSourceParameter.name + ) } yield { Schema( schema = schema, @@ -233,7 +235,7 @@ object Schema extends LazyLogging { schemaEngine = schemaEngine, targetSchemaEngine = targetSchemaEngine, targetSchemaFormat = targetSchemaFormat, - activeSchemaTab = activeSchemaTab + activeSchemaSource = activeSchemaSource ) } @@ -246,7 +248,7 @@ object Schema extends LazyLogging { schemaEngine = None, targetSchemaEngine = None, targetSchemaFormat = None, - activeSchemaTab = None + activeSchemaSource = None ) } @@ -257,11 +259,11 @@ object Schema extends LazyLogging { * In case the client submits the schema in several formats, the selected source will indicate the preferred one. */ private[logic] object SchemaSource extends Enumeration { - type SchemaTab = String + type SchemaSource = String val TEXT = "#schemaTextArea" val URL = "#schemaUrl" val FILE = "#schemaFile" - val defaultActiveShapeMapTab: SchemaTab = TEXT + val defaultActiveSchemaSource: SchemaSource = TEXT } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala index 2d80d9f9..4d1bdea1 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala @@ -9,7 +9,7 @@ import es.weso.rdf.{InferenceEngine, RDFBuilder, RDFReasoner} import es.weso.rdfshape.server.api.definitions.ApiDefaults import es.weso.rdfshape.server.api.definitions.UmlDefinitions.umlOptions import es.weso.rdfshape.server.api.format.dataFormats.{DataFormat, SchemaFormat} -import es.weso.rdfshape.server.api.routes.data.logic.Data +import es.weso.rdfshape.server.api.routes.data.logic.data.SimpleData import es.weso.schema.{Result, Schema, ShaclexSchema, ValidationTrigger} import es.weso.shacl.converter.Shacl2ShEx import es.weso.shapemaps.ShapeMap @@ -146,7 +146,7 @@ private[api] object SchemaOperations extends LazyLogging { relativeBase: Option[IRI], builder: RDFBuilder ): IO[(Result, Option[ValidationTrigger], Long)] = { - val dp = Data.empty.copy( + val dp = SimpleData.empty.copy( data = Some(data), optDataFormat = optDataFormat, inference = optInference diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala index 1cfb7381..318a0c07 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala @@ -9,7 +9,7 @@ import es.weso.rdfshape.server.api.definitions.ApiDefaults.defaultSchemaEngine import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.format.dataFormats.SchemaFormat import es.weso.rdfshape.server.api.routes.ApiService -import es.weso.rdfshape.server.api.routes.data.logic.Data +import es.weso.rdfshape.server.api.routes.data.logic.data.SimpleData import es.weso.rdfshape.server.api.routes.schema.logic.SchemaOperations._ import es.weso.rdfshape.server.api.routes.schema.logic.{Schema, TriggerMode} import es.weso.rdfshape.server.api.utils.OptEitherF._ @@ -284,7 +284,7 @@ class SchemaService(client: Client[IO]) { val partsMap = PartsMap(m.parts) val r = for { - dataPair <- Data.mkData(partsMap, relativeBase) + dataPair <- SimpleData.getData(partsMap, relativeBase) (resourceRdf, dp) = dataPair res <- for { emptyRes <- RDFAsJenaModel.empty diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala index b62b5c85..703eeda7 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala @@ -3,7 +3,7 @@ package es.weso.rdfshape.server.api.routes.shapemap.logic import cats.effect.IO import com.typesafe.scalalogging.LazyLogging import es.weso.rdfshape.server.api.format.dataFormats.{Compact, ShapeMapFormat} -import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMapSource.ShapeMapTab +import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMapSource.ShapeMapSource import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.utils.error.exceptions.JsonConversionException @@ -17,14 +17,14 @@ import io.circe.Json * @note Invalid initial data is accepted, but may cause exceptions when operating with it (like converting to JSON). * @param shapeMapRaw Shapemap raw text * @param shapeMapFormat Shapemap format - * @param targetShapeMapFormat Shapemap target format (only for conversion operations) - * @param activeShapeMapTab Active tab, used to know which source the shapemap comes from + * @param targetShapeMapFormat Optionally, the shapemap target format (only for conversion operations) + * @param activeShapeMapSource Active source, used to know which source the shapemap comes from */ sealed case class ShapeMap private ( shapeMapRaw: String, shapeMapFormat: ShapeMapFormat, - targetShapeMapFormat: ShapeMapFormat, - activeShapeMapTab: String + targetShapeMapFormat: Option[ShapeMapFormat], + activeShapeMapSource: String ) { /** Inner shapemap structure of the data in this instance @@ -97,12 +97,12 @@ private[api] object ShapeMap extends LazyLogging { TargetShapeMapFormatParameter.name, partsMap ) - activeShapeMapTab <- partsMap.optPartValue( + activeShapeMapSource <- partsMap.optPartValue( ActiveShapeSourceTabParameter.name ) _ = logger.debug( - s"Getting ShapeMap from params. ShapeMap tab: $activeShapeMapTab" + s"Getting ShapeMap from params. ShapeMap tab: $activeShapeMapSource" ) // Create the shapemap depending on the client's selected method @@ -112,7 +112,7 @@ private[api] object ShapeMap extends LazyLogging { shapeMapFile, shapeMapFormat, targetShapeMapFormat, - activeShapeMapTab + activeShapeMapSource ) } yield maybeShapeMap @@ -125,7 +125,7 @@ private[api] object ShapeMap extends LazyLogging { * @param shapeMapFile Optionally, the file with the contents of the shapemap * @param shapeMapFormat Optionally, the format of the shapemap * @param targetShapeMapFormat Optionally, the target format of the shapemap (for conversions) - * @param activeShapeMapTab Optionally, the indicator of the shapemap source (raw, url or file) + * @param activeShapeMapSource Optionally, the indicator of the shapemap source (raw, url or file) * @return A new ShapeMap based on the given parameters */ def mkShapeMap( @@ -134,67 +134,66 @@ private[api] object ShapeMap extends LazyLogging { shapeMapFile: Option[String], shapeMapFormat: Option[ShapeMapFormat], targetShapeMapFormat: Option[ShapeMapFormat], - activeShapeMapTab: Option[ShapeMapTab] + activeShapeMapSource: Option[ShapeMapSource] ): Either[String, ShapeMap] = { // Confirm chosen formats val format = shapeMapFormat.getOrElse(ShapeMapFormat.defaultFormat) - val targetFormat = - targetShapeMapFormat.getOrElse(ShapeMapFormat.defaultFormat) - - // Create the shapemap depending on the client's selected method - val maybeShapeMap: Either[String, ShapeMap] = activeShapeMapTab.getOrElse( - ShapeMapSource.defaultActiveShapeMapTab - ) match { - case ShapeMapSource.TEXT => - shapeMapStr match { - case None => Left("No value for the ShapeMap string") - case Some(shapeMapRaw) => - Right( - ShapeMap( - shapeMapRaw, - format, - targetFormat, - ShapeMapSource.TEXT + + // Create the shapemap depending on the client's selected source + val maybeShapeMap: Either[String, ShapeMap] = + activeShapeMapSource.getOrElse( + ShapeMapSource.defaultActiveShapeMapSource + ) match { + case ShapeMapSource.TEXT => + shapeMapStr match { + case None => Left("No value for the ShapeMap string") + case Some(shapeMapRaw) => + Right( + ShapeMap( + shapeMapRaw = shapeMapRaw, + shapeMapFormat = format, + targetShapeMapFormat = targetShapeMapFormat, + activeShapeMapSource = ShapeMapSource.TEXT + ) ) - ) - } - - case ShapeMapSource.URL => - shapeMapUrl match { - case None => Left(s"No value for the shapemap URL") - case Some(url) => - getUrlContents(url) match { - case Right(shapeMapRaw) => - Right( - ShapeMap( - shapeMapRaw, - format, - targetFormat, - ShapeMapSource.URL + } + + case ShapeMapSource.URL => + shapeMapUrl match { + case None => Left(s"No value for the shapemap URL") + case Some(url) => + getUrlContents(url) match { + case Right(shapeMapRaw) => + Right( + ShapeMap( + shapeMapRaw = shapeMapRaw, + shapeMapFormat = format, + targetShapeMapFormat = targetShapeMapFormat, + activeShapeMapSource = ShapeMapSource.URL + ) ) + case Left(err) => Left(err) + } + } + case ShapeMapSource.FILE => + shapeMapFile match { + case None => Left(s"No value for the shapemap file") + case Some(shapeMapRaw) => + Right( + ShapeMap( + shapeMapRaw = shapeMapRaw, + shapeMapFormat = format, + targetShapeMapFormat = targetShapeMapFormat, + activeShapeMapSource = ShapeMapSource.FILE ) - case Left(err) => Left(err) - } - } - case ShapeMapSource.FILE => - shapeMapFile match { - case None => Left(s"No value for the shapemap file") - case Some(shapeMapRaw) => - Right( - ShapeMap( - shapeMapRaw, - format, - targetFormat, - ShapeMapSource.FILE ) - ) - } - case other => - val msg = s"Unknown value for activeShapemapTab: $other" - logger.warn(msg) - Left(msg) - } + } + case other => + val msg = s"Unknown shapemap source: $other" + logger.warn(msg) + Left(msg) + } maybeShapeMap } @@ -205,23 +204,8 @@ private[api] object ShapeMap extends LazyLogging { ShapeMap( shapeMapRaw = emptyShapeMapValue, shapeMapFormat = defaultShapeMapFormat, - targetShapeMapFormat = defaultShapeMapFormat, - activeShapeMapTab = ShapeMapSource.defaultActiveShapeMapTab + targetShapeMapFormat = None, + activeShapeMapSource = ShapeMapSource.defaultActiveShapeMapSource ) } - -/** Enumeration of the different possible ShapeMap sources sent by the client. - * The source sent indicates the API if the shapemap was sent in raw text, as a URL - * to be fetched or as a text file containing the shapemap. - * In case the client submits the shapemap in several formats, the selected source will indicate the preferred one. - */ -private[api] object ShapeMapSource extends Enumeration { - type ShapeMapTab = String - - val TEXT = "#shapeMapTextArea" - val URL = "#shapeMapUrl" - val FILE = "#shapeMapFile" - - val defaultActiveShapeMapTab: ShapeMapTab = TEXT -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMapSource.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMapSource.scala new file mode 100644 index 00000000..00c83130 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMapSource.scala @@ -0,0 +1,16 @@ +package es.weso.rdfshape.server.api.routes.shapemap.logic + +/** Enumeration of the different possible ShapeMap sources sent by the client. + * The source sent indicates the API if the shapemap was sent in raw text, as a URL + * to be fetched or as a text file containing the shapemap. + * In case the client submits the shapemap in several formats, the selected source will indicate the preferred one. + */ +private[api] object ShapeMapSource extends Enumeration { + type ShapeMapSource = String + + val TEXT = "#shapeMapTextArea" + val URL = "#shapeMapUrl" + val FILE = "#shapeMapFile" + + val defaultActiveShapeMapSource: ShapeMapSource = TEXT +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/html2rdf/HTML2RDF.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/html2rdf/HTML2RDF.scala index e9992c72..95d002f5 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/html2rdf/HTML2RDF.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/html2rdf/HTML2RDF.scala @@ -51,7 +51,7 @@ object HTML2RDF extends LazyLogging { def extractFromString( htmlStr: String, extractorName: String - ): CatsResource[IO, RDFReasoner] = { + ): CatsResource[IO, RDFAsJenaModel] = { extractFromSource(RdfSourceTypes.STRING, htmlStr, extractorName) } @@ -77,7 +77,7 @@ object HTML2RDF extends LazyLogging { sourceType: RdfSourceTypes.Value, rdfData: String, extractorName: String - ): CatsResource[IO, RDFReasoner] = { + ): CatsResource[IO, RDFAsJenaModel] = { Try { logger.debug( s"Extracting RDF from ${sourceType.toString} with extractor $extractorName" From f9e731fa60035dcaa73ed75880d987aadea6ddbd Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Thu, 21 Oct 2021 17:51:11 +0200 Subject: [PATCH 24/32] Updated dependencies and started refactoring endpoints --- build.sbt | 40 +- .../es/weso/rdfshape/server/Server.scala | 16 +- .../server/api/definitions/ApiDefaults.scala | 25 +- .../rdfshape/server/api/format/Format.scala | 4 +- .../api/format/dataFormats/DataFormat.scala | 22 +- .../format/dataFormats/GraphicFormat.scala | 39 ++ .../api/format/dataFormats/RdfFormat.scala | 12 +- .../api/format/dataFormats/SchemaFormat.scala | 2 +- .../format/dataFormats/ShapeMapFormat.scala | 2 +- .../api/routes/data/logic/DataSource.scala | 17 +- .../logic/operations/DataConversion.scala | 325 ++++++++-------- .../data/logic/operations/DataExtract.scala | 279 +++++++------- .../data/logic/operations/DataInfo.scala | 185 +++------- .../data/logic/operations/DataOperation.scala | 21 ++ .../logic/operations/DataOperations.scala | 3 - .../api/routes/data/logic/types/Data.scala | 43 ++- ...{EndpointData.scala => DataEndpoint.scala} | 70 ++-- .../{SimpleData.scala => DataSingle.scala} | 183 ++++----- ...{CompoundData.scala => DataCompound.scala} | 77 ++-- .../logic/types/merged/MergedModels.scala | 6 +- .../api/routes/data/service/DataService.scala | 348 +++++++++--------- .../api/routes/schema/logic/Schema.scala | 25 +- .../schema/logic/SchemaOperations.scala | 87 +++-- .../routes/schema/logic/SchemaSource.scala | 16 + .../api/routes/schema/logic/TriggerMode.scala | 2 +- .../routes/schema/service/SchemaService.scala | 111 +++--- .../server/api/values/EndpointValue.scala | 1 - .../server/utils/json/JsonUtils.scala | 27 +- 28 files changed, 1027 insertions(+), 961 deletions(-) create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/GraphicFormat.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataOperation.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataOperations.scala rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/{EndpointData.scala => DataEndpoint.scala} (71%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/{SimpleData.scala => DataSingle.scala} (82%) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/{CompoundData.scala => DataCompound.scala} (60%) create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaSource.scala diff --git a/build.sbt b/build.sbt index 14453b68..ae819a74 100644 --- a/build.sbt +++ b/build.sbt @@ -80,8 +80,6 @@ lazy val scaladocSettings: Seq[Def.Setting[_]] = Seq( // Need to generate docs to publish to oss Compile / packageDoc / publishArtifact := true ) -ThisBuild / githubWorkflowJavaVersions := Seq(JavaCIVersion) -ThisBuild / githubWorkflowScalaVersions := Seq(ScalaCIVersion) // Setup Mdoc + Docusaurus settings lazy val mdocSettings = Seq( mdocVariables := Map( @@ -141,6 +139,9 @@ lazy val unidocSettings: Seq[Def.Setting[_]] = Seq( "-private" ) ) + +ThisBuild / githubWorkflowJavaVersions := Seq(JavaCIVersion) +ThisBuild / githubWorkflowScalaVersions := Seq(ScalaCIVersion) // Shared publish settings for all modules. lazy val publishSettings = Seq( organization := "es.weso", @@ -258,6 +259,7 @@ lazy val server = project http4sCirce, scalatags, umlShaclex, + shexs, shaclex, any23_core, any23_api, @@ -288,25 +290,26 @@ lazy val docs = project ) lazy val MUnitFramework = new TestFramework("munit.Framework") /* DEPENDENCY versions */ -lazy val http4sVersion = "1.0.0-M21" -lazy val catsVersion = "2.5.0" +lazy val http4sVersion = "1.0.0-M23" +lazy val catsVersion = "2.6.1" /* ------------------------------------------------------------------------- */ -lazy val mongodbVersion = "4.1.1" -lazy val any23Version = "2.2" -lazy val rdf4jVersion = "2.2.4" -lazy val graphvizJavaVersion = "0.5.2" -lazy val logbackVersion = "1.2.3" -lazy val loggingVersion = "3.9.3" +lazy val mongodbVersion = "4.3.2" +lazy val any23Version = "2.4" +lazy val rdf4jVersion = "3.7.3" +lazy val graphvizJavaVersion = "0.18.1" +lazy val logbackVersion = "1.2.6" +lazy val loggingVersion = "3.9.4" lazy val groovyVersion = "3.0.8" -lazy val munitVersion = "0.7.23" -lazy val munitEffectVersion = "1.0.2" -lazy val plantumlVersion = "1.2021.5" +lazy val munitVersion = "0.7.27" +lazy val munitEffectVersion = "1.0.6" +lazy val plantumlVersion = "8059" lazy val scalajVersion = "2.4.2" -lazy val scalatagsVersion = "0.7.0" +lazy val scalatagsVersion = "0.9.4" // WESO dependencies -lazy val shaclexVersion = "0.1.91" +lazy val shaclexVersion = "0.1.103-SNAPSHOT" +lazy val shexsVersion = "0.1.97" lazy val umlShaclexVersion = "0.0.82" -lazy val wesoUtilsVersion = "0.1.98" +lazy val wesoUtilsVersion = "0.1.99" // Dependency modules lazy val http4sDsl = "org.http4s" %% "http4s-dsl" % http4sVersion lazy val http4sBlazeServer = @@ -322,7 +325,7 @@ lazy val mongodb = "org.mongodb.scala" %% "mongo-scala-driver" % mongodbVers lazy val any23_core = "org.apache.any23" % "apache-any23-core" % any23Version lazy val any23_api = "org.apache.any23" % "apache-any23-api" % any23Version lazy val any23_scraper = - "org.apache.any23.plugins" % "apache-any23-html-scraper" % "2.2" + "org.apache.any23.plugins" % "apache-any23-html-scraper" % "2.3" lazy val rdf4j_runtime = "org.eclipse.rdf4j" % "rdf4j-runtime" % rdf4jVersion lazy val graphvizJava = "guru.nidi" % "graphviz-java" % graphvizJavaVersion lazy val plantuml = "net.sourceforge.plantuml" % "plantuml" % plantumlVersion @@ -336,7 +339,8 @@ lazy val munitEffect = lazy val scalaj = "org.scalaj" %% "scalaj-http" % scalajVersion lazy val scalatags = "com.lihaoyi" %% "scalatags" % scalatagsVersion // WESO dependencies -lazy val shaclex = "es.weso" %% "shexs" % shaclexVersion +lazy val shexs = "es.weso" %% "shexs" % shexsVersion +lazy val shaclex = "es.weso" %% "shaclex" % shaclexVersion lazy val umlShaclex = "es.weso" %% "umlshaclex" % umlShaclexVersion lazy val wesoUtils = "es.weso" %% "utilstest" % wesoUtilsVersion // "sbt-github-actions" plugin settings diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala index e078db75..758aad2b 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala @@ -17,13 +17,14 @@ import es.weso.rdfshape.server.utils.error.exceptions.SSLContextCreationExceptio import es.weso.rdfshape.server.utils.error.{ExitCodes, SysUtils} import es.weso.rdfshape.server.utils.secure.SSLHelper import fs2.Stream +import org.http4s.blaze.client.BlazeClientBuilder +import org.http4s.blaze.server.BlazeServerBuilder import org.http4s.client.Client -import org.http4s.client.blaze.BlazeClientBuilder import org.http4s.implicits.http4sKleisliResponseSyntaxOptionT -import org.http4s.server.blaze.BlazeServerBuilder import org.http4s.server.middleware.{CORS, CORSConfig, Logger} import org.http4s.{HttpApp, HttpRoutes} +import java.util.concurrent.TimeUnit import javax.net.ssl.SSLContext import scala.concurrent.ExecutionContext.global import scala.concurrent.duration._ @@ -166,12 +167,11 @@ object Server { /** Application's CORS configuration */ - private val corsConfiguration = CORSConfig( - anyOrigin = true, - anyMethod = true, - allowCredentials = true, - maxAge = 1.day.toSeconds - ) + private val corsConfiguration = CORSConfig.default + .withAnyOrigin(true) + .withAnyMethod(true) + .withAllowCredentials(true) + .withMaxAge(new FiniteDuration(1, TimeUnit.DAYS)) // Act as a server factory diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala index 1a3f2be5..f5aa1d8a 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala @@ -1,12 +1,19 @@ package es.weso.rdfshape.server.api.definitions import es.weso.rdf.nodes.IRI +import es.weso.rdf.{InferenceEngine, NONE} import es.weso.rdfshape.server.api.format.dataFormats.{ DataFormat, SchemaFormat, ShapeMapFormat } -import es.weso.schema.{Schemas, ShapeMapTrigger} +import es.weso.rdfshape.server.api.routes.data.logic.DataSource +import es.weso.rdfshape.server.api.routes.data.logic.DataSource.DataSource +import es.weso.rdfshape.server.api.routes.schema.logic.SchemaSource +import es.weso.rdfshape.server.api.routes.schema.logic.SchemaSource.SchemaSource +import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMapSource +import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMapSource.ShapeMapSource +import es.weso.schema.{Schema, Schemas, ShapeMapTrigger} import es.weso.shapemaps.ShapeMap /** Application-wide defaults @@ -16,8 +23,10 @@ case object ApiDefaults { val defaultDataFormat: DataFormat = DataFormat.defaultFormat val availableSchemaFormats: List[SchemaFormat] = SchemaFormat.availableFormats val defaultSchemaFormat: SchemaFormat = SchemaFormat.defaultFormat + val defaultSchemaFormatName: String = defaultSchemaFormat.name val availableSchemaEngines: List[String] = Schemas.availableSchemaNames - val defaultSchemaEngine: String = Schemas.defaultSchemaName + val defaultSchemaEngine: Schema = Schemas.defaultSchema + val defaultSchemaEngineName: String = defaultSchemaEngine.name val availableTriggerModes: List[String] = Schemas.availableTriggerModes val defaultTriggerMode: String = ShapeMapTrigger(ShapeMap.empty).name val availableInferenceEngines = List( @@ -25,10 +34,14 @@ case object ApiDefaults { "RDFS", "OWL" ) // TODO: Obtain from RDFAsJenaModel.empty.map(_.availableInferenceEngines).unsafeRunSync - val defaultSchemaEmbedded = false - val defaultInference: String = availableInferenceEngines.head - val defaultActiveDataTab = "#dataTextArea" - val defaultActiveSchemaTab = "#schemaTextArea" + val defaultSchemaEmbedded = false + val defaultInferenceEngine: InferenceEngine = NONE + val defaultInferenceEngineName: String = defaultInferenceEngine.name + val defaultActiveDataSource: DataSource = DataSource.defaultActiveDataSource + val defaultActiveSchemaSource: SchemaSource = + SchemaSource.defaultActiveSchemaSource + val defaultActiveShapeMapSource: ShapeMapSource = + ShapeMapSource.defaultActiveShapeMapSource val defaultShapeMapFormat: ShapeMapFormat = ShapeMapFormat.defaultFormat val availableShapeMapFormats: List[String] = ShapeMap.formats val defaultActiveShapeMapTab = "#shapeMapTextArea" diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala index b23cf9c1..043a230a 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala @@ -19,7 +19,7 @@ trait Format { */ val mimeType: MediaType - override def toString: String = s"Format $name" + override def toString: String = name } @@ -62,7 +62,7 @@ trait FormatCompanion[F <: Format] extends LazyLogging { implicit val decodeFormat: Decoder[F] = (cursor: HCursor) => for { formatStr <- cursor.downField("name").as[String] - format: F = fromString(formatStr).toOption.getOrElse(defaultFormat) + format = fromString(formatStr).toOption.getOrElse(defaultFormat) } yield format /** Try to build a Format object from a request's parameters diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/DataFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/DataFormat.scala index 49faa7f8..90fa464c 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/DataFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/DataFormat.scala @@ -37,30 +37,14 @@ object DataFormat extends FormatCompanion[DataFormat] { */ case object Json extends DataFormat( - formatName = "json", + formatName = "JSON", formatMimeType = new MediaType("application", "json") ) -/** Represents the mime-type "text/vnd.graphviz" +/** Represents the mime-type "text/vnd.graphviz", used by graphviz */ case object Dot extends DataFormat( - formatName = "dot", + formatName = "DOT", formatMimeType = new MediaType("text", "vnd.graphviz") ) - -/** Represents the mime-type "image/svg+xml" - */ -case object Svg - extends DataFormat( - formatName = "svg", - formatMimeType = MediaType.image.`svg+xml` - ) - -/** Represents the mime-type "image/png" - */ -case object Png - extends DataFormat( - formatName = "png", - formatMimeType = MediaType.image.png - ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/GraphicFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/GraphicFormat.scala new file mode 100644 index 00000000..4f64c0d9 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/GraphicFormat.scala @@ -0,0 +1,39 @@ +package es.weso.rdfshape.server.api.format.dataFormats + +import es.weso.rdfshape.server.api.format.FormatCompanion +import org.http4s.MediaType + +/** Dummy class to differentiate formats used for graphical representations from the more generic DataFormat + * + * @see {@link DataFormat} + */ +sealed class GraphicFormat(formatName: String, formatMimeType: MediaType) + extends DataFormat(formatName, formatMimeType) {} + +/** Companion object with all RDFFormat static utilities + */ +object GraphicFormat extends FormatCompanion[GraphicFormat] { + + override lazy val availableFormats: List[GraphicFormat] = + List( + Svg, + Png + ) + override val defaultFormat: GraphicFormat = Svg +} + +/** Represents the mime-type "image/svg+xml" + */ +case object Svg + extends GraphicFormat( + formatName = "SVG", + formatMimeType = MediaType.image.`svg+xml` + ) + +/** Represents the mime-type "image/png" + */ +case object Png + extends GraphicFormat( + formatName = "PNG", + formatMimeType = MediaType.image.png + ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/RdfFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/RdfFormat.scala index 0c194c24..d981c95f 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/RdfFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/RdfFormat.scala @@ -30,7 +30,7 @@ object RDFFormat extends FormatCompanion[RDFFormat] { */ case object Turtle extends RDFFormat( - formatName = "turtle", + formatName = "Turtle", formatMimeType = new MediaType("text", "turtle") ) @@ -38,7 +38,7 @@ case object Turtle */ case object NTriples extends RDFFormat( - formatName = "n-triples", + formatName = "N-Triples", formatMimeType = new MediaType("application", "n-triples") ) @@ -46,7 +46,7 @@ case object NTriples */ case object Trig extends RDFFormat( - formatName = "trig", + formatName = "TriG", formatMimeType = new MediaType("application", "trig") ) @@ -54,7 +54,7 @@ case object Trig */ case object JsonLd extends RDFFormat( - formatName = "json-ld", + formatName = "JSON-LD", formatMimeType = new MediaType("application", "ld+json") ) @@ -62,7 +62,7 @@ case object JsonLd */ case object RdfXml extends RDFFormat( - formatName = "rdf/xml", + formatName = "RDF/XML", formatMimeType = new MediaType("application", "rdf+xml") ) @@ -70,6 +70,6 @@ case object RdfXml */ case object RdfJson extends RDFFormat( - formatName = "rdf/json", + formatName = "RDF/JSON", formatMimeType = MediaType.application.json ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/SchemaFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/SchemaFormat.scala index 2bc282a6..c32829b8 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/SchemaFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/SchemaFormat.scala @@ -34,6 +34,6 @@ object SchemaFormat extends FormatCompanion[SchemaFormat] { */ case object ShExC extends SchemaFormat( - formatName = "shexc", + formatName = "ShExC", formatMimeType = new MediaType("text", "shex") ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/ShapeMapFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/ShapeMapFormat.scala index 77a83713..495df76e 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/ShapeMapFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/ShapeMapFormat.scala @@ -29,6 +29,6 @@ object ShapeMapFormat extends FormatCompanion[ShapeMapFormat] { */ case object Compact extends ShapeMapFormat( - formatName = "compact", + formatName = "Compact", formatMimeType = new MediaType("text", "shex") ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataSource.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataSource.scala index 24d209c7..0b56c8b5 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataSource.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataSource.scala @@ -1,3 +1,18 @@ -package es.weso.rdfshape.server.api.routes.data.logic object DataSource { +package es.weso.rdfshape.server.api.routes.data.logic +/** Enumeration of the different possible Data sources sent by the client. + * The source sent indicates the API if the schema was sent in raw text, as a URL + * to be fetched or as a text file containing the schema. + * In case the client submits the data in several formats, the selected source will indicate the preferred one. + */ +private[api] object DataSource extends Enumeration { + type DataSource = String + + val TEXT = "#dataTextArea" + val URL = "#dataUrl" + val FILE = "#dataFile" + val COMPOUND = "#compoundData" + val ENDPOINT = "#dataEndpoint" + + val defaultActiveDataSource: DataSource = TEXT } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataConversion.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataConversion.scala index ecd43f3a..116e6d32 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataConversion.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataConversion.scala @@ -1,18 +1,24 @@ -package es.weso.rdfshape.server.api.routes.data.logic +package es.weso.rdfshape.server.api.routes.data.logic.operations import cats.effect.IO import com.typesafe.scalalogging.LazyLogging -import es.weso.rdf.RDFReasoner import es.weso.rdf.jena.RDFAsJenaModel import es.weso.rdf.sgraph.{RDF2SGraph, RDFDotPreferences} -import es.weso.rdfshape.server.api.format.dataFormats.DataFormat -import es.weso.rdfshape.server.api.routes.data.logic.data.CompoundData -import es.weso.rdfshape.server.utils.json.JsonUtils.maybeField -import es.weso.utils.IOUtils.{either2io, err} +import es.weso.rdf.{InferenceEngine, NONE} +import es.weso.rdfshape.server.api.format.dataFormats.{ + DataFormat, + Dot, + Png, + Svg +} +import es.weso.rdfshape.server.api.routes.data.logic.operations.DataConversion.successMessage +import es.weso.rdfshape.server.api.routes.data.logic.types.{Data, DataSingle} +import es.weso.utils.IOUtils.either2io import guru.nidi.graphviz.engine.{Format, Graphviz} import guru.nidi.graphviz.model.MutableGraph import guru.nidi.graphviz.parse.Parser -import io.circe.Json +import io.circe.syntax.EncoderOps +import io.circe.{Encoder, Json} import java.io.ByteArrayOutputStream import java.util.Base64 @@ -20,180 +26,208 @@ import javax.imageio.ImageIO import scala.collection.immutable import scala.util.Try -/** Data class representing the output of a conversion operation +/** Data class representing the output of a data-conversion operation * - * @param msg Output informational message after conversion - * @param data Data to be converted - * @param dataFormat Initial data format - * @param targetFormat Target data format - * @param result Data after conversion + * @param inputData Data before conversion + * @param targetFormat Target data format + * @param result Data after conversion */ -final case class DataConversion( - msg: String, - data: Option[String], - dataFormat: DataFormat, - targetFormat: String, - result: String -) { - - /** Convert a conversion result to its JSON representation - * - * @return JSON representation of the conversion result - */ - def toJson: Json = Json.fromFields( - List( - ("message", Json.fromString(msg)), - ("result", Json.fromString(result)), - ("dataFormat", Json.fromString(dataFormat.name)), - ("targetDataFormat", Json.fromString(targetFormat)) - ) ++ - maybeField("data", data, Json.fromString) - ) -} +final case class DataConversion private ( + override val inputData: Data, + targetFormat: DataFormat, + result: Data +) extends DataOperation(successMessage, inputData) /** Static utilities for data conversion */ private[api] object DataConversion extends LazyLogging { + /** List of graph format names + */ lazy val availableGraphFormatNames: immutable.Seq[String] = availableGraphFormats.map(_.name) - private lazy val rdfDataFormats = + + /** List of available RDF format names (uppercase) + */ + private lazy val rdfDataFormatNames: immutable.Seq[String] = RDFAsJenaModel.availableFormats.map(_.toUpperCase) + + /** List of available graph formats + */ private lazy val availableGraphFormats = List( GraphFormat("SVG", "application/svg", Format.SVG), GraphFormat("PNG", "application/png", Format.PNG), GraphFormat("PS", "application/ps", Format.PS) ) - val successMessage = "Conversion successful!" - - private[api] def dataConvert( - maybeData: Option[String], - dataFormat: DataFormat, - maybeCompoundData: Option[String], - targetFormat: String - ): IO[Either[String, DataConversion]] = { - logger.debug( - s"Converting $maybeData with format $dataFormat to $targetFormat. OptTargetFormat: $targetFormat" - ) - - maybeData match { - case None => - maybeCompoundData match { - case None => - err(s"dataConvert: no data and no compoundData parameters") - case Some(compoundDataStr) => - for { - ecd <- either2io(CompoundData.fromJsonString(compoundDataStr)) - cd <- cnvEither(ecd, str => s"dataConvert: Error: $str") - result <- cd.toRdf.flatMap( - _.use(rdf => - rdfConvert(rdf, None, dataFormat, targetFormat).attempt.map( - _.fold(exc => Left(exc.getMessage), dc => Right(dc)) - ) - ) - ) - } yield result - } - case Some(data) => - val maybeConversion = - RDFAsJenaModel - .fromChars(data, dataFormat.name, None) - .flatMap( - _.use(rdf => - rdfConvert(rdf, Some(data), dataFormat, targetFormat) - ) - ) + private val successMessage = "Conversion successful" - maybeConversion.attempt.map( - _.fold(exc => Left(exc.getMessage), dc => Right(dc)) + /** Convert a conversion result to its JSON representation + * + * @return JSON representation of the conversion result + */ + implicit val encodeResult: Encoder[DataConversion] = + (dataConversion: DataConversion) => { + Json.fromFields( + List( + ("message", Json.fromString(dataConversion.successMessage)), + ("data", dataConversion.inputData.asJson), + ("result", dataConversion.result.asJson), + ("inputDataFormat", dataConversion.inputData.format.asJson), + ("targetDataFormat", dataConversion.result.format.asJson) ) - + ) } - } + /** Perform the actual conversion operation between RDF text formats + * + * @param inputData Input conversion data + * @param targetFormat Target + * @return A new Data instance + */ + def dataConvert( + inputData: Data, + targetFormat: DataFormat + ): IO[DataConversion] = { + logger.info(s"Conversion target format: $targetFormat") - private def cnvEither[A](e: Either[String, A], cnv: String => String): IO[A] = - e.fold(s => IO.raiseError(new RuntimeException(cnv(s))), IO.pure) + for { + rdf <- inputData.toRdf() + sgraph <- rdf.use(rdfReasoner => RDF2SGraph.rdf2sgraph(rdfReasoner)) + targetInference = inputData match { + case ds: DataSingle => ds.inference + case _ => NONE + } - private[api] def rdfConvert( - rdf: RDFReasoner, - data: Option[String], - dataFormat: DataFormat, - targetFormat: String - ): IO[DataConversion] = { - val doConversion: IO[String] = { - logger.info(s"Conversion target format: $targetFormat") - targetFormat.toUpperCase match { + convertedData <- targetFormat.name.toUpperCase match { + // JSON: convert to JSON String and return a DataSingle with it case "JSON" => - for { - sgraph <- RDF2SGraph.rdf2sgraph(rdf) - } yield sgraph.toJson.spaces2 + IO { + DataSingle( + dataRaw = sgraph.toJson.spaces2, + dataFormat = targetFormat, + inference = targetInference, + activeDataSource = inputData.dataSource + ) + } + case "DOT" => + IO { + DataSingle( + dataRaw = sgraph.toDot(RDFDotPreferences.defaultRDFPrefs), + dataFormat = targetFormat, + inference = targetInference, + activeDataSource = inputData.dataSource + ) + } + case tFormat if rdfDataFormatNames.contains(tFormat) => for { - sgraph <- RDF2SGraph.rdf2sgraph(rdf) - } yield sgraph.toDot(RDFDotPreferences.defaultRDFPrefs) - case t if rdfDataFormats.contains(t) => rdf.serialize(t) - case t if availableGraphFormatNames.contains(t) => - val doS: IO[String] = for { - sgraph <- RDF2SGraph.rdf2sgraph(rdf) - eitherFormat <- either2io(getTargetFormat(t)) + data <- rdf.use(_.serialize(tFormat)) + } yield DataSingle( + dataRaw = data, + dataFormat = targetFormat, + inference = targetInference, + activeDataSource = inputData.dataSource + ) + case tFormat if availableGraphFormatNames.contains(tFormat) => + for { + eitherFormat <- either2io(getTargetFormat(tFormat)) dotStr = sgraph.toDot(RDFDotPreferences.defaultRDFPrefs) - eitherConverted <- eitherFormat.fold( - e => IO.raiseError(new RuntimeException(e)), - format => either2io(dotConverter(dotStr, format)) + inputDataDot = DataSingle( + dataRaw = dotStr, + dataFormat = Dot, + inference = targetInference, + activeDataSource = inputData.dataSource + ) + _ <- eitherFormat.fold( + err => IO.raiseError(new RuntimeException(err)), + format => dotConvert(inputDataDot, format, targetInference) ) - c <- eitherFormat.fold( - e => IO.raiseError(new RuntimeException(e)), + data <- eitherFormat.fold( + err => IO.raiseError(new RuntimeException(err)), _ => IO(dotStr) ) - } yield c - doS + } yield DataSingle( + dataRaw = data, + dataFormat = targetFormat, + inference = targetInference, + activeDataSource = inputData.dataSource + ) case t => IO.raiseError(new RuntimeException(s"Unsupported format: $t")) } - } + } yield DataConversion(inputData, targetFormat, convertedData) - for { - converted <- doConversion - } yield DataConversion( - "Conversion successful!", - data, - dataFormat, - targetFormat, - converted - ) } - private[api] def dotConverter( - dot: String, - targetFormat: Format - ): Either[String, String] = { - logger.debug(s"dotConverter to $targetFormat. dot\n$dot") - Try { - val g: MutableGraph = Parser.read(dot) - targetFormat match { - case Format.SVG => - val renderer = Graphviz - .fromGraph(g) //.width(200) - .render(targetFormat) - logger.info(s"SVG converted: ${renderer.toString}") - renderer.toString - case Format.PNG => - val renderer = Graphviz.fromGraph(g).render(Format.PNG) - val image = renderer.toImage - val baos = new ByteArrayOutputStream() - ImageIO.write(image, "png", baos) - val data = Base64.getEncoder.encodeToString(baos.toByteArray) - val imageString = "data:image/png;base64," + data - "" - case _ => s"Error converting to $targetFormat" - } + /** Perform a conversion from DOT data to another format + * + * @param inputData Input Data (DOT format) to be converted + * @param targetFormat Target format (graphviz) + * @return Data after conversion + */ + def dotConvert( + inputData: Data, + targetFormat: Format, + inference: InferenceEngine = NONE + ): IO[Data] = { + logger.debug(s"dotConverter to $targetFormat. dot\n$inputData") + if(inputData.format.isEmpty) + IO.raiseError(new RuntimeException("Unspecified input data format")) + else if(inputData.rawData.isEmpty) + IO.raiseError( + new RuntimeException("Empty or malformed input data contents") + ) + else if( + inputData.format.get != es.weso.rdfshape.server.api.format.dataFormats.Dot + ) IO.raiseError(new RuntimeException("Input format is not DOT")) + else { + Try { + val g: MutableGraph = new Parser().read(inputData.rawData.get) + targetFormat match { + case Format.SVG => + val renderer = Graphviz + .fromGraph(g) //.width(200) + .render(targetFormat) + logger.debug(s"SVG converted: ${renderer.toString}") + IO { + DataSingle( + dataRaw = renderer.toString, + dataFormat = Svg, + inference = inference, + activeDataSource = inputData.dataSource + ) + } + case Format.PNG => + val renderer = Graphviz.fromGraph(g).render(Format.PNG) + val image = renderer.toImage + val baos = new ByteArrayOutputStream() + ImageIO.write(image, "png", baos) + val data = Base64.getEncoder.encodeToString(baos.toByteArray) + val imageString = "data:image/png;base64," + data + + IO { + DataSingle( + dataRaw = + "", + dataFormat = Png, + inference = inference, + activeDataSource = inputData.dataSource + ) + } - }.fold( - e => Left(e.getMessage), - s => Right(s) - ) + case _ => + IO.raiseError( + new RuntimeException( + s"Error converting from DOT to $targetFormat" + ) + ) + } + }.fold( + err => IO.raiseError(err), + identity + ) + } } private def getTargetFormat(str: String): Either[String, Format] = @@ -205,5 +239,4 @@ private[api] object DataConversion extends LazyLogging { } private case class GraphFormat(name: String, mime: String, fmt: Format) - } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataExtract.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataExtract.scala index dee150b8..78bf3e9a 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataExtract.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataExtract.scala @@ -1,185 +1,168 @@ -package es.weso.rdfshape.server.api.routes.data.logic +package es.weso.rdfshape.server.api.routes.data.logic.operations +import cats.data.EitherT import cats.effect.IO +import cats.effect.unsafe.implicits.global import com.typesafe.scalalogging.LazyLogging -import es.weso.rdf.RDFReasoner +import es.weso.rdf.InferenceEngine import es.weso.rdf.nodes.{IRI, Lang} -import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ - defaultSchemaEngine, - defaultSchemaFormat, - defaultShapeLabel -} -import es.weso.rdfshape.server.api.format.dataFormats.{DataFormat, SchemaFormat} -import es.weso.rdfshape.server.utils.json.JsonUtils._ +import es.weso.rdfshape.server.api.definitions.ApiDefaults._ +import es.weso.rdfshape.server.api.format.dataFormats.SchemaFormat +import es.weso.rdfshape.server.api.routes.data.logic.operations.DataExtract.successMessage +import es.weso.rdfshape.server.api.routes.data.logic.types.Data import es.weso.schema.Schema import es.weso.schemaInfer.{InferOptions, PossiblePrefixes, SchemaInfer} import es.weso.shapemaps.{NodeSelector, ResultShapeMap} -import es.weso.utils.IOUtils.{ESIO, either2es, io2es, run_es} -import io.circe.Json +import es.weso.utils.IOUtils.{either2es, io2es} +import io.circe.syntax.EncoderOps +import io.circe.{Encoder, Json} -/** Data class representing the output of an extraction operation (input RDF data => output schema) +/** Data class representing the output of a data-extraction operation (input RDF data => output schema) * - * @param msg Output informational message after conversion. Used in case of error. - * @param optData RDF input data from which ShEx may be extracted - * @param optDataFormat RDF input data format - * @param optSchemaFormat Target schema format - * @param optSchemaEngine Target schema engine - * @param optSchema Resulting schema - * @param optResultShapeMap Resulting shapemap + * @param inputData RDF input data from which ShEx may be extracted + * @param targetSchemaFormat Target schema format + * @param targetSchemaEngine Target schema engine + * @param schema Resulting schema + * @param shapeMap Resulting shapemap */ final case class DataExtract private ( - msg: String, - optData: Option[String], - optDataFormat: Option[DataFormat], - optSchemaFormat: Option[String], - optSchemaEngine: Option[String], - optSchema: Option[Schema], - optResultShapeMap: Option[ResultShapeMap] -) { + override val inputData: Data, + targetSchemaFormat: SchemaFormat, + targetSchemaEngine: Schema = defaultSchemaEngine, + schema: Schema, + shapeMap: ResultShapeMap +) extends DataOperation(successMessage, inputData) + +/** Static utilities to extract schemas from RDF data + */ +private[api] object DataExtract extends LazyLogging { + + private val successMessage = "Extraction successful" + + /** Common infer options to all extraction operations + */ + private val inferOptions: InferOptions = InferOptions( + inferTypePlainNode = true, + addLabelLang = Some(Lang("en")), + possiblePrefixMap = PossiblePrefixes.wikidataPrefixMap, + maxFollowOn = 1, + followOnLs = List(), + followOnThreshold = Some(1), + sortFunction = InferOptions.orderByIRI + ) /** Convert an extraction result to its JSON representation * * @return JSON representation of the extraction result */ - def toJson: IO[Json] = optSchema match { - case None => IO(Json.fromFields(List(("msg", Json.fromString(msg))))) - case Some(schema) => - val engine = optSchemaEngine.getOrElse(defaultSchemaEngine) - val schemaFormat = optSchemaFormat.getOrElse(defaultSchemaFormat.name) - for { - schemaStr <- schema.serialize(schemaFormat) - } yield Json.fromFields( + + implicit val encodeResult: Encoder[DataExtract] = + (dataExtract: DataExtract) => { + + val resultJson: Json = Json.fromFields( List( - ("message", Json.fromString(msg)), - ("inferredShape", Json.fromString(schemaStr)), - ("schemaFormat", Json.fromString(schemaFormat)), - ("schemaEngine", Json.fromString(engine)) - ) ++ - maybeField("data", optData, Json.fromString) ++ - maybeField( - "dataFormat", - optDataFormat, - (df: DataFormat) => Json.fromString(df.name) - ) ++ - maybeField( - "resultShapeMap", - optResultShapeMap, - (r: ResultShapeMap) => Json.fromString(r.toString) - ) + ( + "schema", + Json.fromString( + dataExtract.schema + .serialize(dataExtract.targetSchemaFormat.name) + .unsafeRunSync() + ) + ), + ("shapeMap", Json.fromString(dataExtract.shapeMap.toString)) + ) ) - } -} -/** Static utilities to extract schemas from RDF data - */ -object DataExtract extends LazyLogging { + Json.fromFields( + List( + ("message", Json.fromString(dataExtract.successMessage)), + ("data", dataExtract.inputData.asJson), + ("result", resultJson), + ("targetSchemaFormat", dataExtract.targetSchemaFormat.asJson), + ( + "targetSchemaEngine", + Json.fromString(dataExtract.targetSchemaEngine.name) + ) + ) + ) + } /** Extract Shex from a given RDF input * - * @param rdf Input RDF - * @param optData Input data (optional) - * @param optDataFormat Input data format (optional) - * @param optNodeSelector Node selector (optional) - * @param optInference Conversion inference (optional) - * @param optEngine Conversion engine (optional) - * @param optSchemaFormat Target schema format (optional) - * @param optLabelName Label name (optional) - * @param relativeBase Relative base + * @param inputData Input data for the extraction + * @param nodeSelector Node selector for the schema extraction + * @param inferenceEngine Inference engine + * @param targetSchemaEngine Target conversion engine + * @param targetSchemaFormat Target schema format + * @param optLabelName Label name (optional), will default to [[defaultShapeLabel]] + * @param relativeBase Relative base * @return */ def dataExtract( - rdf: RDFReasoner, - optData: Option[String], - optDataFormat: Option[DataFormat], - optNodeSelector: Option[String], - optInference: Option[String], - optEngine: Option[String], - optSchemaFormat: Option[SchemaFormat], + inputData: Data, + nodeSelector: String, + inferenceEngine: InferenceEngine, + targetSchemaEngine: Schema, + targetSchemaFormat: SchemaFormat, optLabelName: Option[String], relativeBase: Option[IRI] ): IO[DataExtract] = { - val base = relativeBase.map(_.str) - val engine = optEngine.getOrElse(defaultSchemaEngine) - val schemaFormat = optSchemaFormat.getOrElse(defaultSchemaFormat) - optNodeSelector match { - case None => - IO.pure( - DataExtract.fromMsg("DataExtract: Node selector not specified") - ) - case Some(nodeSelector) => - val es: ESIO[(Schema, ResultShapeMap)] = for { - pm <- io2es(rdf.getPrefixMap) - selector <- either2es(NodeSelector.fromString(nodeSelector, base, pm)) - eitherResult <- { - logger.debug(s"Node selector: $selector") - - val inferOptions: InferOptions = InferOptions( - inferTypePlainNode = true, - addLabelLang = Some(Lang("en")), - possiblePrefixMap = PossiblePrefixes.wikidataPrefixMap, - maxFollowOn = 1, - followOnLs = List(), - followOnThreshold = Some(1), - sortFunction = InferOptions.orderByIRI - ) - io2es( + + val base = relativeBase.map(_.str) + + for { + rdf <- inputData.toRdf() // Get rdf resource + eitherResult <- rdf.use(rdfReader => { + val results: EitherT[IO, String, (Schema, ResultShapeMap)] = for { + pm <- io2es(rdfReader.getPrefixMap) + ns <- either2es( + NodeSelector.fromString(nodeSelector, base, pm) + ) + + resultPair <- + EitherT( SchemaInfer.runInferSchema( - rdf, - selector, - engine, + rdfReader, + ns, + targetSchemaEngine.name, optLabelName.map(IRI(_)).getOrElse(defaultShapeLabel), inferOptions ) ) - } - pair <- either2es(eitherResult) - str <- io2es(pair._1.serialize("ShExC")) - _ <- io2es(IO(logger.debug(s"Extracted; $str"))) - } yield { - pair - } - for { - either <- run_es(es) - } yield either.fold( - err => DataExtract.fromMsg(err), - pair => { - val (schema, resultShapeMap) = pair - DataExtract.fromExtraction( - optData, - optDataFormat, - schemaFormat.name, - engine, - schema, - resultShapeMap + + _ <- io2es(IO(logger.debug(s"Extracted schema"))) + } yield resultPair + + results.value + }) + +// finalResult = eitherResult match { +// case Left(err) => IO.raiseError(new RuntimeException(err)) +// case Right((resultSchema, resultShapemap)) => +// DataExtract( +// inputData = inputData, +// targetSchemaFormat = targetSchemaFormat, +// targetSchemaEngine = targetSchemaEngine, +// schema = resultSchema, +// shapeMap = resultShapemap +// ) +// } + + finalResult <- eitherResult.fold( + err => IO.raiseError(new RuntimeException(err)), + pair => { + val (resultSchema, resultShapemap) = pair + IO { + DataExtract( + inputData = inputData, + targetSchemaFormat = targetSchemaFormat, + targetSchemaEngine = targetSchemaEngine, + schema = resultSchema, + shapeMap = resultShapemap ) } - ) - } + } + ) + } yield finalResult } - - /** @param msg Error message contained in the result - * @return A DataExtractResult consisting of a single error message and no data - */ - def fromMsg(msg: String): DataExtract = - DataExtract(msg, None, None, None, None, None, None) - - /** @return A DataExtractResult, given all the parameters needed to build it (input, formats and results) - */ - def fromExtraction( - optData: Option[String], - optDataFormat: Option[DataFormat], - schemaFormat: String, - schemaEngine: String, - schema: Schema, - resultShapeMap: ResultShapeMap - ): DataExtract = - DataExtract( - "Shape extracted", - optData, - optDataFormat, - optSchemaFormat = Some(schemaFormat), - optSchemaEngine = Some(schemaEngine), - optSchema = Some(schema), - optResultShapeMap = Some(resultShapeMap) - ) } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataInfo.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataInfo.scala index 3df9f8c7..04b8fdd8 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataInfo.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataInfo.scala @@ -1,146 +1,83 @@ -package es.weso.rdfshape.server.api.routes.data.logic +package es.weso.rdfshape.server.api.routes.data.logic.operations -import cats.data.EitherT import cats.effect.IO -import es.weso.rdf.jena.RDFAsJenaModel +import es.weso.rdf.PrefixMap import es.weso.rdf.nodes.IRI -import es.weso.rdf.{PrefixMap, RDFReasoner} -import es.weso.rdfshape.server.api.format.dataFormats.DataFormat -import es.weso.rdfshape.server.api.routes.data.logic.DataOperations.prefixMap2Json +import es.weso.rdfshape.server.api.routes.data.logic.operations.DataInfo.successMessage +import es.weso.rdfshape.server.api.routes.data.logic.types.Data import es.weso.rdfshape.server.utils.json.JsonUtils._ -import es.weso.utils.IOUtils.{either2es, io2es} -import io.circe.Json +import io.circe.syntax.EncoderOps +import io.circe.{Encoder, Json} -/** Data class representing the output of an "information" operation +/** Data class representing the output of a data-information operation * - * @param message Output informational message after processing. Used in case of error. - * @param data RDF input data - * @param dataFormat RDF input data format - * @param predicates List of predicates of the RDF input + * @param inputData RDF input data (contains content and format information) + * @param predicates List of predicates in the RDF input * @param numberOfStatements Number of statements in the RDF input - * @param prefixMap Prefix map of the RDF input + * @param prefixMap Prefix map in the RDF input */ + final case class DataInfo private ( - message: String, - data: Option[String], - dataFormat: Option[DataFormat], - predicates: Option[Set[IRI]], - numberOfStatements: Option[Int], - prefixMap: Option[PrefixMap] -) { + override val inputData: Data, + numberOfStatements: Int, + prefixMap: PrefixMap, + predicates: Set[IRI] +) extends DataOperation(successMessage, inputData) {} - /** Prefix map: defaults to empty. - */ - lazy val pm: PrefixMap = prefixMap.getOrElse(PrefixMap.empty) +/** Static utilities to obtain information about RDF data + */ +private[api] object DataInfo { - /** Convert an information result to its JSON representation + private val successMessage = "Well formed RDF" + + /** Given an input data, get information about it * - * @return JSON information of the extraction result + * @param data Input Data object of any type (Simple, Compound...) + * @return Either a DataInfo object about the input data or an error message */ - def toJson: Json = { - Json.fromFields( - List(("message", Json.fromString(message))) ++ - maybeField("data", data, Json.fromString) ++ - maybeField( - "dataFormat", - dataFormat, - (df: DataFormat) => Json.fromString(df.name) - ) ++ - maybeField("numberOfStatements", numberOfStatements, Json.fromInt) ++ - maybeField("prefixMap", prefixMap, prefixMap2Json) ++ - maybeField( - "predicates", - predicates, - (preds: Set[IRI]) => Json.fromValues(preds.map(iri2Json)) - ) - ) - } - /** @param iri IRI to be converted - * @return JSON representation of the IRI - */ - private def iri2Json(iri: IRI): Json = { - Json.fromString(pm.qualifyIRI(iri)) - } + def dataInfo(data: Data): IO[DataInfo] = for { + rdf <- data.toRdf() + info <- rdf.use(rdf => + for { + nStatements <- rdf.getNumberOfStatements() + predicates <- rdf.predicates().compile.toList + prefixMap <- rdf.getPrefixMap + } yield (nStatements, predicates, prefixMap) + ) -} + (nStatements, predicates, prefixMap) = info -/** Static utilities to obtain information about RDF data - */ -object DataInfo { + } yield DataInfo( + inputData = data, + numberOfStatements = nStatements, + predicates = predicates.toSet, + prefixMap = prefixMap + ) - /** Message attached to the result when created successfully - */ - val successMessage = "Well formed RDF" + implicit val encodeResult: Encoder[DataInfo] = + (dataInfo: DataInfo) => { - /** For a given RDF input (plain text), return information about it - * - * @param data Input data string - * @param dataFormatStr Input data format - * @return Information about the input RDF: statements, well-formed, etc. - */ - def dataInfoFromString( - data: String, - dataFormatStr: String - ): IO[Either[String, DataInfo]] = { - val either: EitherT[IO, String, DataInfo] = for { - dataFormat <- either2es(DataFormat.fromString(dataFormatStr)) - json <- io2es( - RDFAsJenaModel - .fromChars(data, dataFormat.name) - .flatMap( - _.use(rdf => dataInfoFromRdf(rdf, Some(data), Some(dataFormat))) + val resultJson: Json = Json.fromFields( + List( + ("numberOfStatements", dataInfo.numberOfStatements.asJson), + ("format", dataInfo.inputData.format.asJson), + ("prefixMap", prefixMap2Json(dataInfo.prefixMap)), + ( + "predicates", + Json.fromValues( + dataInfo.predicates.map(iri2Json(_, Some(dataInfo.prefixMap))) + ) ) + ) ) - ret <- EitherT.fromEither[IO](json) - } yield ret - - either.fold(e => Left(e), d => Right(d)) - } - - /** For a given RDF input, return information about it - * - * @param rdf Input RDF - * @param data Input data string - * @param dataFormat Input data format - * @return Information about the input RDF: statements, well-formed, etc. - */ - def dataInfoFromRdf( - rdf: RDFReasoner, - data: Option[String], - dataFormat: Option[DataFormat] - ): IO[Either[String, DataInfo]] = { - val either: IO[Either[Throwable, DataInfo]] = (for { - numberOfStatements <- rdf.getNumberOfStatements() - predicates <- rdf.predicates().compile.toList - pm <- rdf.getPrefixMap - } yield DataInfo.fromData( - data, - dataFormat, - predicates.toSet, - numberOfStatements, - pm - )).attempt - either.map( - _.fold(e => Left(e.getMessage), r => Right(r)) - ) - } - /** @return A DataInfoResult, given all the parameters needed to build it (input, predicates, etc.) - */ - def fromData( - data: Option[String], - dataFormat: Option[DataFormat], - predicates: Set[IRI], - numberOfStatements: Int, - prefixMap: PrefixMap - ): DataInfo = - DataInfo( - successMessage, - data, - dataFormat, - Some(predicates), - Some(numberOfStatements), - Some(prefixMap) - ) + Json.fromFields( + List( + ("message", Json.fromString(dataInfo.successMessage)), + ("data", dataInfo.inputData.asJson), + ("result", resultJson) + ) + ) + } } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataOperation.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataOperation.scala new file mode 100644 index 00000000..a5cff4bf --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataOperation.scala @@ -0,0 +1,21 @@ +package es.weso.rdfshape.server.api.routes.data.logic.operations + +import es.weso.rdfshape.server.api.routes.data.logic.operations.DataOperation.successMessage +import es.weso.rdfshape.server.api.routes.data.logic.types.Data + +/** General definition of operations that operate on Data + * + * @param successMessage Message attached to the result of the operation + * @param inputData Data operated on + */ +abstract class DataOperation( + val successMessage: String = successMessage, + val inputData: Data +) + +object DataOperation { + + /** Dummy success message + */ + private val successMessage = "Operation completed successfully" +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataOperations.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataOperations.scala deleted file mode 100644 index 387bff38..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataOperations.scala +++ /dev/null @@ -1,3 +0,0 @@ -package es.weso.rdfshape.server.api.routes.data.logic.data.operations object DataOperations { - -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/Data.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/Data.scala index 4be85149..3a4f0b3f 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/Data.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/Data.scala @@ -1,16 +1,18 @@ -package es.weso.rdfshape.server.api.routes.data.logic.data +package es.weso.rdfshape.server.api.routes.data.logic.types import cats.effect.{IO, Resource} import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.RDFReasoner import es.weso.rdf.nodes.IRI -import es.weso.rdfshape.server.api.routes.data.logic.data.DataSource.DataSource +import es.weso.rdfshape.server.api.format.dataFormats.DataFormat +import es.weso.rdfshape.server.api.routes.data.logic.DataSource.DataSource +import es.weso.rdfshape.server.api.routes.data.logic.types.merged.DataCompound import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters.{ CompoundDataParameter, EndpointParameter } import es.weso.rdfshape.server.api.utils.parameters.PartsMap -import io.circe.{Decoder, Encoder, HCursor, Json} +import io.circe.{Decoder, Encoder, HCursor} /** Common trait to all data, whichever its nature (single, compound, endpoint...) */ @@ -20,6 +22,12 @@ trait Data { */ val dataSource: DataSource + val format: Option[DataFormat] + + /** Raw RDF content represented as a String + */ + val rawData: Option[String] + /** Given an RDF source of data, try to parse it and get the RDF model representation * * @return RDF logical model of the data contained @@ -31,16 +39,29 @@ object Data extends DataCompanion[Data] { /** Dummy implementation meant to be overridden */ - override val emptyData: Data = SimpleData.emptyData + override val emptyData: Data = DataSingle.emptyData - /** Dummy implementation meant to be overridden + /** Dummy implementation meant to be overridden. + * If called on a general [[Data]] instance, pattern match among the available data types to + * use the correct implementation */ - override implicit val encodeData: Encoder[Data] = _ => Json.fromString("") + implicit val encodeData: Encoder[Data] = { + case ds: DataSingle => DataSingle.encodeData(ds) + case de: DataEndpoint => DataEndpoint.encodeData(de) + case dc: DataCompound => DataCompound.encodeData(dc) + } /** Dummy implementation meant to be overridden + * If called on a general [[Data]] instance, pattern match among the available data types to + * use the correct implementation */ - override implicit val decodeData: Decoder[Data] = (_: HCursor) => - Right(emptyData) + implicit val decodeData: Decoder[Data] = (cursor: HCursor) => { + this.getClass match { + case ds if ds == classOf[DataSingle] => DataSingle.decodeData(cursor) + case de if de == classOf[DataEndpoint] => DataEndpoint.decodeData(cursor) + case dc if dc == classOf[DataCompound] => DataCompound.decodeData(cursor) + } + } /** General implementation delegating on subclasses */ @@ -51,11 +72,11 @@ object Data extends DataCompanion[Data] { maybeData <- { // Create one of: Simple Data, Compound Data or Endpoint Data // 1. Compound data - if(compoundData.isDefined) CompoundData.mkData(partsMap) + if(compoundData.isDefined) DataCompound.mkData(partsMap) // 2. Endpoint data - else if(paramEndpoint.isDefined) EndpointData.mkData(partsMap) + else if(paramEndpoint.isDefined) DataEndpoint.mkData(partsMap) // 3. Simple data or unknown - else SimpleData.mkData(partsMap) + else DataSingle.mkData(partsMap) } } yield maybeData diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/EndpointData.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataEndpoint.scala similarity index 71% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/EndpointData.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataEndpoint.scala index 2c0a9897..82cb4f10 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/EndpointData.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataEndpoint.scala @@ -1,4 +1,4 @@ -package es.weso.rdfshape.server.api.routes.data.logic.data +package es.weso.rdfshape.server.api.routes.data.logic.types import cats.effect.{IO, Resource} import com.typesafe.scalalogging.LazyLogging @@ -6,12 +6,14 @@ import es.weso.rdf.jena.RDFAsJenaModel import es.weso.rdf.nodes.IRI import es.weso.rdfshape.server.api.definitions.ApiDefaults import es.weso.rdfshape.server.api.format.dataFormats.DataFormat -import es.weso.rdfshape.server.api.routes.data.logic.data.DataSource.DataSource +import es.weso.rdfshape.server.api.routes.data.logic.DataSource +import es.weso.rdfshape.server.api.routes.data.logic.DataSource.DataSource import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters.{ DataFormatParameter, EndpointParameter } import es.weso.rdfshape.server.api.utils.parameters.PartsMap +import es.weso.rdfshape.server.utils.networking.NetworkingUtils.getUrlContents import io.circe.syntax.EncoderOps import io.circe.{Decoder, Encoder, HCursor, Json} @@ -22,56 +24,46 @@ import scala.util.matching.Regex * @param endpoint IRI with the RDF data * @param dataFormat Data format */ -case class EndpointData( +case class DataEndpoint( endpoint: IRI, dataFormat: DataFormat = DataFormat.defaultFormat ) extends Data with LazyLogging { - override val dataSource: DataSource = DataSource.ENDPOINT + override lazy val rawData: Option[String] = getUrlContents( + endpoint.uri.toString + ).toOption + override val dataSource: DataSource = DataSource.ENDPOINT + override val format: Option[DataFormat] = Some(dataFormat) override def toRdf( relativeBase: Option[IRI] ): IO[Resource[IO, RDFAsJenaModel]] = { RDFAsJenaModel.fromIRI(this.endpoint, dataFormat.name, relativeBase) } + + override def toString: String = { + rawData.getOrElse("") + } } -private[api] object EndpointData extends DataCompanion[EndpointData] { - override lazy val emptyData: EndpointData = EndpointData(IRI(defaultIri)) - private val defaultIri = "http://www.example.org" +private[api] object DataEndpoint extends DataCompanion[DataEndpoint] { + override lazy val emptyData: DataEndpoint = DataEndpoint(defaultIri) + private val defaultIri = IRI("http://www.example.org") /** Regular expressions used for identifying if a custom endpoint was given for this data sample */ private val endpointRegex: Regex = "Endpoint: (.+)".r - override implicit val encodeData: Encoder[EndpointData] = - (data: EndpointData) => + override implicit val encodeData: Encoder[DataEndpoint] = + (data: DataEndpoint) => Json.obj( ("endpoint", Json.fromString(data.endpoint.str)), ("source", Json.fromString(DataSource.ENDPOINT)), ("format", data.dataFormat.asJson) ) - override implicit val decodeData: Decoder[EndpointData] = (cursor: HCursor) => - { - for { - endpoint <- cursor.downField("endpoint").as[String] - - dataFormat <- cursor - .downField("format") - .as[DataFormat] - .orElse(Right(ApiDefaults.defaultDataFormat)) - - base = EndpointData.emptyData.copy( - endpoint = IRI.fromString(endpoint).getOrElse(defaultIri), - dataFormat = dataFormat - ) - - } yield base - } - - override def mkData(partsMap: PartsMap): IO[Either[String, EndpointData]] = + override def mkData(partsMap: PartsMap): IO[Either[String, DataEndpoint]] = for { // Parse params paramEndpoint <- partsMap.optPartValue(EndpointParameter.name) @@ -85,19 +77,37 @@ private[api] object EndpointData extends DataCompanion[EndpointData] { format = paramFormat.getOrElse(ApiDefaults.defaultDataFormat) // Try to create data - maybeData: Either[String, EndpointData] = // 2. Endpoint data + maybeData: Either[String, DataEndpoint] = // 2. Endpoint data if(endpoint.isDefined) { logger.debug(s"RDF Data received - Endpoint Data: ${endpoint.get}") IRI .fromString(endpoint.get) .fold( err => Left(s"Could not read endpoint data: $err"), - iri => Right(EndpointData(iri, format)) + iri => Right(DataEndpoint(iri, format)) ) } else Left("No endpoint provided") } yield maybeData + override implicit val decodeData: Decoder[DataEndpoint] = (cursor: HCursor) => + { + for { + endpoint <- cursor.downField("endpoint").as[String] + + dataFormat <- cursor + .downField("format") + .as[DataFormat] + .orElse(Right(ApiDefaults.defaultDataFormat)) + + base = DataEndpoint.emptyData.copy( + endpoint = IRI.fromString(endpoint).getOrElse(defaultIri), + dataFormat = dataFormat + ) + + } yield base + } + /** @param endpointStr String containing the endpoint * @param endpointRegex Regex used to look for the endpoint in the string * @return Optionally, the endpoint contained in a given data string diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/SimpleData.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala similarity index 82% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/SimpleData.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala index 8ed45eaf..ed63534b 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/SimpleData.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala @@ -1,4 +1,4 @@ -package es.weso.rdfshape.server.api.routes.data.logic.data +package es.weso.rdfshape.server.api.routes.data.logic.types import cats.effect._ import com.typesafe.scalalogging.LazyLogging @@ -7,7 +7,8 @@ import es.weso.rdf.nodes.IRI import es.weso.rdf.{InferenceEngine, NONE, RDFReasoner} import es.weso.rdfshape.server.api.definitions.ApiDefaults import es.weso.rdfshape.server.api.format.dataFormats.DataFormat -import es.weso.rdfshape.server.api.routes.data.logic.data.DataSource.DataSource +import es.weso.rdfshape.server.api.routes.data.logic.DataSource +import es.weso.rdfshape.server.api.routes.data.logic.DataSource.DataSource import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.html2rdf.HTML2RDF @@ -20,23 +21,21 @@ import io.circe.syntax.EncoderOps * * @note Invalid initial data is accepted, but may cause errors when operating with it. * @param dataRaw RDF data raw text - * @param optEndpoint TODO: remove eventually. Optionally, a data endpoint * @param dataFormat Data format * @param inference Data inference - * @param targetDataFormat Data target format (only for conversion operations) * @param activeDataSource Active source, used to know which source the data comes from */ -sealed case class SimpleData( +sealed case class DataSingle( dataRaw: String, - optEndpoint: Option[String], dataFormat: DataFormat, inference: InferenceEngine, - targetDataFormat: Option[DataFormat], activeDataSource: DataSource ) extends Data with LazyLogging { - override val dataSource: DataSource = activeDataSource + override lazy val rawData: Option[String] = Some(dataRaw) + override val dataSource: DataSource = activeDataSource + override val format: Option[DataFormat] = Some(dataFormat) /** Given an RDF source of data, try to get the RDF model representation * @@ -56,6 +55,36 @@ sealed case class SimpleData( } yield result } + /** Get RDF data from data parameters + * + * @return The resource capable of reading the RDF data + */ + def getRdfResource( + relativeBase: Option[IRI] + ): IO[Resource[IO, RDFReasoner]] = { + val base = relativeBase.map(_.str) + + val x: IO[Resource[IO, RDFReasoner]] = + activeDataSource match { + + case DataSource.TEXT | DataSource.URL | DataSource.FILE => + logger.debug(s"Input - $activeDataSource: $dataRaw") + if(dataRaw.isBlank) + RDFAsJenaModel.empty.flatMap(e => IO(e)) + else + for { + rdf <- rdfFromString(dataRaw, dataFormat, base) + result = rdf.evalMap(rdf => rdf.applyInference(inference)) + } yield result + + case other => + val msg = s"Unknown value for data source: $other" + logger.error(msg) + err(msg) + } + x + } + /** @param data RDF data as a raw string * @param format RDF data format * @param base Base @@ -93,62 +122,20 @@ sealed case class SimpleData( ) } - // def toRdfJena: IO[Resource[IO, RDFAsJenaModel]] = { - // for { - // rdf <- RDFAsJenaModel.fromString( - // dataRaw, - // dataFormat.name, - // None, - /* useBNodeLabels = if(activeDataSource != DataSource.URL) false else true */ - // ) - // } yield rdf - // } - - /** Get RDF data from data parameters - * - * @return The resource capable of reading the RDF data - */ - def getRdfResource( - relativeBase: Option[IRI] - ): IO[Resource[IO, RDFReasoner]] = { - val base = relativeBase.map(_.str) - - val x: IO[Resource[IO, RDFReasoner]] = - activeDataSource match { - - case DataSource.TEXT | DataSource.URL | DataSource.FILE => - logger.debug(s"Input - $activeDataSource: $dataRaw") - if(dataRaw.isBlank) - RDFAsJenaModel.empty.flatMap(e => IO(e)) - else - for { - rdf <- rdfFromString(dataRaw, dataFormat, base) - result = rdf.evalMap(rdf => rdf.applyInference(inference)) - } yield result - - case other => - val msg = s"Unknown value for data source: $other" - logger.error(msg) - err(msg) - } - x - } - + override def toString: String = dataRaw } -private[api] object SimpleData - extends DataCompanion[SimpleData] +private[api] object DataSingle + extends DataCompanion[DataSingle] with LazyLogging { /** Empty data representation, with no inner data and all defaults to none */ - override lazy val emptyData: SimpleData = - SimpleData( + override lazy val emptyData: DataSingle = + DataSingle( dataRaw = emptyDataValue, - optEndpoint = None, dataFormat = DataFormat.defaultFormat, inference = NONE, - targetDataFormat = None, activeDataSource = DataSource.defaultActiveDataSource ) @@ -175,53 +162,16 @@ private[api] object SimpleData .getOrElse(NONE) } yield inference - override implicit val encodeData: Encoder[SimpleData] = (data: SimpleData) => - { + override implicit val encodeData: Encoder[DataSingle] = + (data: DataSingle) => Json.obj( ("data", Json.fromString(data.dataRaw)), ("source", Json.fromString(data.activeDataSource)), ("format", data.dataFormat.asJson), - ("targetFormat", data.targetDataFormat.asJson), ("inference", data.inference.asJson) ) - } - override implicit val decodeData: Decoder[SimpleData] = - (cursor: HCursor) => { - for { - data <- cursor.downField("data").as[String] - - dataFormat <- cursor - .downField("format") - .as[DataFormat] - .orElse(Right(ApiDefaults.defaultDataFormat)) - - targetDataFormat <- cursor - .downField("targetFormat") - .as[Option[DataFormat]] - - dataInference <- - cursor - .downField("inference") - .as[Option[InferenceEngine]] - - dataSource <- cursor - .downField("source") - .as[DataSource] - .orElse(Right(DataSource.defaultActiveDataSource)) - - base = SimpleData.emptyData.copy( - dataRaw = data, - dataFormat = dataFormat, - targetDataFormat = targetDataFormat, - activeDataSource = dataSource, - inference = dataInference.getOrElse(NONE) - ) - - } yield base - } - - override def mkData(partsMap: PartsMap): IO[Either[String, SimpleData]] = + override def mkData(partsMap: PartsMap): IO[Either[String, DataSingle]] = for { dataStr <- partsMap.optPartValue(DataParameter.name) dataUrl <- partsMap.optPartValue(DataUrlParameter.name) @@ -231,10 +181,7 @@ private[api] object SimpleData partsMap ) paramInference <- partsMap.optPartValue(InferenceParameter.name) - targetDataFormat <- DataFormat.fromRequestParams( - TargetDataFormatParameter.name, - partsMap - ) + paramDataSource <- partsMap.optPartValue(ActiveDataSourceParameter.name) // Confirm final format and inference @@ -246,14 +193,13 @@ private[api] object SimpleData _ = logger.debug(s"RDF Data received - Source: $dataSource") // Base for the result - base = SimpleData.emptyData.copy( + base = DataSingle.emptyData.copy( dataFormat = format, - inference = inference, - targetDataFormat = targetDataFormat + inference = inference ) // Create the data - maybeData: Either[String, SimpleData] = dataSource match { + maybeData: Either[String, DataSingle] = dataSource match { case DataSource.TEXT => dataStr match { case None => Left("No value for the data string") @@ -298,6 +244,36 @@ private[api] object SimpleData } } yield maybeData + override implicit val decodeData: Decoder[DataSingle] = + (cursor: HCursor) => { + for { + data <- cursor.downField("data").as[String] + + dataFormat <- cursor + .downField("format") + .as[DataFormat] + .orElse(Right(ApiDefaults.defaultDataFormat)) + + dataInference <- + cursor + .downField("inference") + .as[Option[InferenceEngine]] + + dataSource <- cursor + .downField("source") + .as[DataSource] + .orElse(Right(DataSource.defaultActiveDataSource)) + + base = DataSingle.emptyData.copy( + dataRaw = data, + dataFormat = dataFormat, + activeDataSource = dataSource, + inference = dataInference.getOrElse(NONE) + ) + + } yield base + } + /** @param inferenceStr String representing the inference value * @return Optionally, the inference contained in a given data string */ @@ -306,5 +282,4 @@ private[api] object SimpleData ): Option[InferenceEngine] = { inferenceStr.flatMap(InferenceEngine.fromString(_).toOption) } - } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/CompoundData.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/DataCompound.scala similarity index 60% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/CompoundData.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/DataCompound.scala index 1b196a15..20130c64 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/CompoundData.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/DataCompound.scala @@ -1,4 +1,4 @@ -package es.weso.rdfshape.server.api.routes.data.logic.data.merged +package es.weso.rdfshape.server.api.routes.data.logic.types.merged import cats.effect._ import cats.implicits._ @@ -6,8 +6,15 @@ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.RDFReasoner import es.weso.rdf.jena.RDFAsJenaModel import es.weso.rdf.nodes.IRI -import es.weso.rdfshape.server.api.routes.data.logic.data.DataSource.DataSource -import es.weso.rdfshape.server.api.routes.data.logic.data._ +import es.weso.rdfshape.server.api.format.dataFormats.DataFormat +import es.weso.rdfshape.server.api.routes.data.logic.DataSource +import es.weso.rdfshape.server.api.routes.data.logic.DataSource.DataSource +import es.weso.rdfshape.server.api.routes.data.logic.types.{ + Data, + DataCompanion, + DataEndpoint, + DataSingle +} import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters.CompoundDataParameter import es.weso.rdfshape.server.api.utils.parameters.PartsMap import io.circe._ @@ -16,11 +23,26 @@ import io.circe.syntax._ /** Data class representing the merge of several RDF data into a single compound * - * @param elements List of the individual({@linkplain es.weso.rdfshape.server.api.routes.data.logic.data.SimpleData SimpleData}) conforming a CompoundData instance + * @param elements List of the individual({@linkplain DataSingle SimpleData}) conforming a CompoundData instance */ -case class CompoundData(elements: List[Data]) extends Data with LazyLogging { +case class DataCompound(elements: List[Data]) extends Data with LazyLogging { + + /** Return the compound of all the inner element's data appended to each other. + * + * @note If one element's data cannot be computed, returns none. + */ + override lazy val rawData: Option[String] = { + val definedElements = elements.map(_.rawData).filter(_.isDefined).map(_.get) + + // All elements' raw data was computed + if(elements.length == definedElements.length) + Some(definedElements.mkString("\n")) + else None + } override val dataSource: DataSource = DataSource.COMPOUND + override val format: Option[DataFormat] = + None // Each element may have its own format /** @return RDF logical model of the data contained in the compound */ @@ -36,6 +58,10 @@ case class CompoundData(elements: List[Data]) extends Data with LazyLogging { value } + override def toString: String = { + elements.flatMap(_.toString).mkString("\n") + } + /** Recursively process the data in the compound to extract all individual RDF Jena models to a single list * * @return List of RDF Jena models in each of the elements of the compound @@ -43,33 +69,32 @@ case class CompoundData(elements: List[Data]) extends Data with LazyLogging { private def getJenaModels: List[IO[Resource[IO, RDFAsJenaModel]]] = { elements.flatMap { // Single data: straight extraction - case sd: SimpleData => List(sd.toRdf()) - case ed: EndpointData => List(ed.toRdf()) - case cd: CompoundData => + case sd: DataSingle => List(sd.toRdf()) + case ed: DataEndpoint => List(ed.toRdf()) + case cd: DataCompound => cd.getJenaModels // Compound data: recursive extraction } } - } -private[api] object CompoundData - extends DataCompanion[CompoundData] +private[api] object DataCompound + extends DataCompanion[DataCompound] with LazyLogging { - override lazy val emptyData: CompoundData = CompoundData(List()) + override lazy val emptyData: DataCompound = DataCompound(List()) - override def mkData(partsMap: PartsMap): IO[Either[String, CompoundData]] = + override def mkData(partsMap: PartsMap): IO[Either[String, DataCompound]] = for { // Parse params compoundData <- partsMap.optPartValue(CompoundDataParameter.name) // Try to create data - maybeData: Either[String, CompoundData] = + maybeData: Either[String, DataCompound] = if(compoundData.isDefined) { logger.debug( s"RDF Data received - Compound Data: ${compoundData.get}" ) - CompoundData + DataCompound .fromJsonString(compoundData.get) .leftMap(err => s"Could not read compound data: $err") } else Left("No compound data provided") @@ -77,21 +102,21 @@ private[api] object CompoundData /** Encoder used to transform CompoundData instances to JSON values */ - override implicit val encodeData: Encoder[CompoundData] = - (a: CompoundData) => Json.fromValues(a.elements.map(_.asJson)) + override implicit val encodeData: Encoder[DataCompound] = + (data: DataCompound) => Json.fromValues(data.elements.map(_.asJson)) /** Decoder used to extract CompoundData instances from JSON values */ - override implicit val decodeData: Decoder[CompoundData] = + override implicit val decodeData: Decoder[DataCompound] = (cursor: HCursor) => { cursor.values match { case None => DecodingFailure("Empty list for compound data", List()) - .asLeft[CompoundData] + .asLeft[DataCompound] case Some(vs) => val xs: Decoder.Result[List[Data]] = vs.toList.map(_.as[Data]).sequence - xs.map(CompoundData(_)) + xs.map(DataCompound(_)) } } @@ -101,22 +126,14 @@ private[api] object CompoundData * @return Either a new CompoundData instance or an error message * @note Internally resorts to the decoding method in this class */ - def fromJsonString(jsonStr: String): Either[String, CompoundData] = for { + def fromJsonString(jsonStr: String): Either[String, DataCompound] = for { json <- parse(jsonStr).leftMap(parseError => s"CompoundData.fromString: error parsing $jsonStr as JSON: $parseError" ) cd <- json - .as[CompoundData] + .as[DataCompound] .leftMap(decodeError => s"Error decoding json to compoundData: $decodeError\nJSON obtained: \n${json.spaces2}" ) } yield cd - - // 1. Compound data - // if(compoundData.isDefined) { - // logger.debug(s"RDF Data received - Compound Data: ${compoundData.get}") - // CompoundData - // .fromJsonString(compoundData.get) - // .leftMap(err => s"Could not read compound data: $err") - // } } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/MergedModels.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/MergedModels.scala index a1819ab6..63411bce 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/MergedModels.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/MergedModels.scala @@ -1,4 +1,4 @@ -package es.weso.rdfshape.server.api.routes.data.logic.data.merged +package es.weso.rdfshape.server.api.routes.data.logic.types.merged import _root_.es.weso.rdf._ import _root_.es.weso.rdf.jena.RDFAsJenaModel @@ -138,11 +138,11 @@ case class MergedModels( override def isIsomorphicWith(other: RDFReader): RDFRead[Boolean] = getModel.flatMap(_.isIsomorphicWith(other)) + def getModel: IO[RDFAsJenaModel] = mergedModel.get + override def asRDFBuilder: RDFRead[RDFBuilder] = getModel.flatMap(_.asRDFBuilder) - def getModel: IO[RDFAsJenaModel] = mergedModel.get - override def rdfReaderName: String = s"MergedModels" override def sourceIRI: Option[IRI] = None diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala index 1917380d..3327d6a0 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala @@ -5,27 +5,24 @@ import com.typesafe.scalalogging.LazyLogging import es.weso.rdfshape.server.api.definitions.ApiDefaults import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ availableInferenceEngines, - defaultDataFormat, - defaultInference + defaultInferenceEngineName } import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api -import es.weso.rdfshape.server.api.format.dataFormats.{DataFormat, SchemaFormat} +import es.weso.rdfshape.server.api.format.dataFormats.{ + DataFormat, + GraphicFormat, + RDFFormat +} import es.weso.rdfshape.server.api.routes.ApiService -import es.weso.rdfshape.server.api.routes.data.logic.DataConversion -import es.weso.rdfshape.server.api.routes.data.logic.DataExtract.dataExtract -import es.weso.rdfshape.server.api.routes.data.logic.DataInfo.{ - dataInfoFromRdf, - dataInfoFromString +import es.weso.rdfshape.server.api.routes.data.logic.operations.{ + DataConversion, + DataInfo } -import es.weso.rdfshape.server.api.routes.data.logic.DataOperations.dataFormatOrDefault -import es.weso.rdfshape.server.api.routes.data.logic.data.SimpleData -import es.weso.rdfshape.server.api.routes.endpoint.logic.query.SparqlQuery -import es.weso.rdfshape.server.api.utils.OptEitherF._ +import es.weso.rdfshape.server.api.routes.data.logic.types.Data import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.utils.json.JsonUtils.errorResponseJson -import es.weso.schema._ -import es.weso.utils.IOUtils._ import io.circe.Json +import io.circe.syntax.EncoderOps import org.http4s._ import org.http4s.circe._ import org.http4s.client.Client @@ -36,6 +33,7 @@ import org.http4s.multipart.Multipart * * @param client HTTP4S client object */ +//noinspection DuplicatedCode class DataService(client: Client[IO]) extends Http4sDsl[IO] with ApiService @@ -47,17 +45,24 @@ class DataService(client: Client[IO]) */ val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { - /** Returns a JSON array with the accepted input RDF data formats + /** Returns a JSON array with the accepted input or output RDF data formats */ case GET -> Root / `api` / `verb` / "formats" / "input" => - val formats = DataFormat.availableFormats.map(_.name) - val json = Json.fromValues(formats.map(Json.fromString)) + val formatNames = RDFFormat.availableFormats.map(_.name) + val json = Json.fromValues(formatNames.map(Json.fromString)) Ok(json) /** Returns a JSON array with the available output RDF data formats */ case GET -> Root / `api` / `verb` / "formats" / "output" => - val formats = DataFormats.availableFormats.map(_.name) + val formatNames = RDFFormat.availableFormats.map(_.name) + val json = Json.fromValues(formatNames.map(Json.fromString)) + Ok(json) + + /** Returns a JSON array with the available visualization formats + */ + case GET -> Root / `api` / `verb` / "formats" / "visual" => + val formats = GraphicFormat.availableFormats.map(_.name) val json = Json.fromValues(formats.map(Json.fromString)) Ok(json) @@ -77,20 +82,9 @@ class DataService(client: Client[IO]) /** Returns the default inference engine used as a raw string */ case GET -> Root / `api` / `verb` / "inferenceEngines" / "default" => - val defaultInferenceEngine = defaultInference + val defaultInferenceEngine = defaultInferenceEngineName Ok(Json.fromString(defaultInferenceEngine)) - /** Returns a JSON array with the available visualization formats - */ - case GET -> Root / `api` / `verb` / "visualize" / "formats" => - val formats = DataConversion.availableGraphFormatNames ++ - List( - "DOT", // DOT is not a visual format but can be used to debug - "JSON" // JSON is the format that can be used by Cytoscape - ) - val json = Json.fromValues(formats.map(Json.fromString)) - Ok(json) - /** Obtain information about an RDF source. * Receives a JSON object with the input RDF information: * - data [String]: RDF data @@ -98,43 +92,41 @@ class DataService(client: Client[IO]) * - dataFile [File Object]: File containing RDF data * - dataFormat [String]: Format of the RDF data * - inference [String]: Inference to be applied - * - activeDataTab [String]: Identifies the source of the data (raw, URL, file...) - * Returns a JSON object with the RDF data information: - * - message [String]: Informational message - * - data [String]: RDF data sent back (originally sent by the client) - * - dataFormat [String]: Data format of the data - * - numberOfStatements [String]: Data format of the data - * - prefixMap [Object]: Dictionary with the prefix map of the data - * - predicates [Array]: Array of the predicates present in the data + * - activeDataSource [String]: Identifies the source of the data (raw, URL, file...) + * Returns a JSON object with the operation results. See [[DataInfo.encodeResult]] */ case req @ POST -> Root / `api` / `verb` / "info" => req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) for { - dataTuple <- SimpleData.getData(partsMap, relativeBase) - (resourceRdf, simpleData) = dataTuple - dataFormat = dataFormatOrDefault(simpleData.optDataFormat.map(_.name)) - response <- simpleData.data match { - case Some(data) => + // Get the data from the partsMap + eitherData <- Data.mkData(partsMap) + response <- eitherData.fold( + // If there was an error, return it + err => errorResponseJson(err, InternalServerError), + // Else, try and compute the data info + data => for { - result <- dataInfoFromString(data, dataFormat) - response <- result match { - case Left(err) => errorResponseJson(err, InternalServerError) - case Right(res) => Ok(res.toJson) - } + // Check for exceptions when getting the data info + maybeResult <- DataInfo.dataInfo(data).attempt + response <- maybeResult.fold( + // Error: return it + err => + // Legacy code may return exceptions with "null" messages + err.getMessage match { + case errorMessage: String => + errorResponseJson(errorMessage, InternalServerError) + case _ => // null exception message, return a general error message + errorResponseJson( + DataServiceError.couldNotParseData, + InternalServerError + ) + }, + // Success: build successful response + dataInfo => Ok(dataInfo.asJson) + ) } yield response - case None => - for { - maybeData <- - resourceRdf.use(rdf => - dataInfoFromRdf(rdf, None, simpleData.optDataFormat) - ) - response <- maybeData match { - case Left(err) => errorResponseJson(err, InternalServerError) - case Right(res) => Ok(res.toJson) - } - } yield response - } + ) } yield response } @@ -146,45 +138,42 @@ class DataService(client: Client[IO]) * - dataFormat [String]: Format of the RDF data * - targetDataFormat [String]: Format of the RDF data * - inference [String]: Inference to be applied - * - activeDataTab [String]: Identifies the source of the data (raw, URL, file...) - * Returns a JSON object with the RDF data information: + * - activeDataSource [String]: Identifies the source of the data (raw, URL, file...) + * Returns a JSON object with the operation results. See [[DataConversion.encodeResult]]: * - message [String]: Informational message * - data [String]: RDF data sent back (originally sent by the client) - * - result [String]: RDF resulting from the conversion - * - dataFormat [String]: Data format of the input data + * - inputDataFormat [String]: Data format of the input data * - targetDataFormat [String]: Data format of the output data - * - numberOfStatements [String]: Data format of the data - * - prefixMap [Object]: Dictionary with the prefix map of the data - * - predicates [Array]: Array of the predicates present in the data + * - result[Object]: JSON representation of the resulting data. See [[Data.encodeData]] */ - case req @ POST -> Root / `api` / `verb` / "convert" => - req.decode[Multipart[IO]] { m => - val partsMap = PartsMap(m.parts) - for { - dataParam <- SimpleData.getData(partsMap, relativeBase) - (resourceRdf, dp) = dataParam - targetFormat = dp.targetDataFormat.getOrElse(defaultDataFormat).name - dataFormat = dp.optDataFormat.getOrElse(defaultDataFormat) - - result <- io2f( - resourceRdf.use(rdf => { - logger.debug(s"Attempting data conversion") - DataConversion - .rdfConvert(rdf, dp.data, dataFormat, targetFormat) - - }) - ).attempt - .map( - _.fold(exc => Left(exc.getMessage), dc => Right(dc)) - ) - - response <- result match { - case Left(err) => errorResponseJson(err, InternalServerError) - case Right(result) => Ok(result.toJson) - } - - } yield response - } + // case req @ POST -> Root / `api` / `verb` / "convert" => + // req.decode[Multipart[IO]] { m => + // val partsMap = PartsMap(m.parts) + // for { + // dataParam <- DataSingle.getData(partsMap, relativeBase) + // (resourceRdf, dp) = dataParam + /* targetFormat = dp.targetDataFormat.getOrElse(defaultDataFormat).name */ + /* dataFormat = dp.optDataFormat.getOrElse(defaultDataFormat) */ + // + // result <- io2f( + // resourceRdf.use(rdf => { + // logger.debug(s"Attempting data conversion") + // DataConversion + // .dataConvert(rdf, dp.data, dataFormat, targetFormat) + // + // }) + // ).attempt + // .map( + // _.fold(exc => Left(exc.getMessage), dc => Right(dc)) + // ) + // + // response <- result match { + /* case Left(err) => errorResponseJson(err, InternalServerError) */ + // case Right(result) => Ok(result.toJson) + // } + // + // } yield response + // } /** Perform a SPARQL query on RDF data. * Receives a JSON object with the input RDF and query information: @@ -196,7 +185,7 @@ class DataService(client: Client[IO]) * - query [String]: Raw SPARQL query * - queryUrl [String]: Url containing the SPARQL query * - queryFile [String]: File containing the SPARQL query - * - activeDataTab [String]: Identifies the source of the data (raw, URL, file...) + * - activeDataSource [String]: Identifies the source of the data (raw, URL, file...) * - activeQueryTab [String]: Identifies the source of the query (raw, URL, file...) * Returns a JSON object with the RDF data information: * - message [String]: Informational message @@ -208,38 +197,38 @@ class DataService(client: Client[IO]) * - prefixMap [Object]: Dictionary with the prefix map of the data * - predicates [Array]: Array of the predicates present in the data */ - case req @ POST -> Root / `api` / `verb` / "query" => - req.decode[Multipart[IO]] { m => - val partsMap = PartsMap(m.parts) - logger.debug(s"Data query params map: $partsMap") - for { - /* TODO: an error is thrown on bad query URLs (IO.raise...), but it is - * not controlled */ - dataParam <- SimpleData.getData(partsMap, relativeBase) - - (resourceRdf, dp) = dataParam - maybeQuery <- SparqlQuery.getSparqlQuery(partsMap) - resp <- maybeQuery match { - case Left(err) => - // Query could not be even parsed from user data - errorResponseJson(s"Error obtaining query data: $err", BadRequest) - case Right(query) => - // Query was parsed, but may be invalid still - val optQueryStr = query.queryRaw - logger.debug(s"Data query with querystring: $optQueryStr") - for { - result <- io2f( - resourceRdf.use(rdf => rdf.queryAsJson(optQueryStr)) - ).attempt - .map(_.fold(exc => Left(exc.getMessage), dc => Right(dc))) - response <- result match { - case Left(err) => errorResponseJson(err, InternalServerError) - case Right(json) => Ok(json) - } - } yield response - } - } yield resp - } + // case req @ POST -> Root / `api` / `verb` / "query" => + // req.decode[Multipart[IO]] { m => + // val partsMap = PartsMap(m.parts) + // logger.debug(s"Data query params map: $partsMap") + // for { + /* /* TODO: an error is thrown on bad query URLs (IO.raise...), but it is */ + // * not controlled */ + // dataParam <- DataSingle.getData(partsMap, relativeBase) + // + // (resourceRdf, dp) = dataParam + // maybeQuery <- SparqlQuery.getSparqlQuery(partsMap) + // resp <- maybeQuery match { + // case Left(err) => + // // Query could not be even parsed from user data + /* errorResponseJson(s"Error obtaining query data: $err", BadRequest) */ + // case Right(query) => + // // Query was parsed, but may be invalid still + // val optQueryStr = query.queryRaw + // logger.debug(s"Data query with querystring: $optQueryStr") + // for { + // result <- io2f( + // resourceRdf.use(rdf => rdf.queryAsJson(optQueryStr)) + // ).attempt + /* .map(_.fold(exc => Left(exc.getMessage), dc => Right(dc))) */ + // response <- result match { + /* case Left(err) => errorResponseJson(err, InternalServerError) */ + // case Right(json) => Ok(json) + // } + // } yield response + // } + // } yield resp + // } /** Attempt to extract a schema from an RDF source. * Receives a JSON object with the input RDF information: @@ -248,7 +237,7 @@ class DataService(client: Client[IO]) * - dataFile [File Object]: File containing RDF data * - dataFormat [String]: Format of the RDF data * - inference [String]: Inference to be applied - * - activeDataTab [String]: Identifies the source of the data (raw, URL, file...) + * - activeDataSource [String]: Identifies the source of the data (raw, URL, file...) * Returns a JSON object with the extraction information: * - message [String]: Informational message * - data [String]: Input RDF data @@ -258,53 +247,54 @@ class DataService(client: Client[IO]) * - schemaEngine [String]: Engine of the extracted schema * - resultShapeMap [String]: Shapemap of the extracted schema */ - case req @ POST -> Root / `api` / `verb` / "extract" => - req.decode[Multipart[IO]] { m => - val partsMap = PartsMap(m.parts) - for { - maybeData <- SimpleData.getData(partsMap, relativeBase).attempt - schemaEngine <- partsMap.optPartValue("schemaEngine") - optSchemaFormatStr <- partsMap.optPartValue("schemaFormat") - inference <- partsMap.optPartValue("inference") - label <- partsMap.optPartValue("labelName") - optBaseStr <- partsMap.optPartValue("base") - nodeSelector <- partsMap.optPartValue("nodeSelector") - schemaFormat <- optEither2f( - optSchemaFormatStr, - SchemaFormat.fromString - ) - response <- maybeData match { - // No data received - case Left(err) => - errorResponseJson(err.getMessage, BadRequest) - // Data received, try to extract - case Right((resourceRdf, dp)) => - for { - result <- io2f( - resourceRdf.use(rdf => - dataExtract( - rdf, - dp.data, - dp.optDataFormat, - nodeSelector, - inference, - schemaEngine, - schemaFormat, - label, - None - ) - ) - ).attempt - .map(_.fold(exc => Left(exc.getMessage), res => Right(res))) - response <- result match { - case Left(err) => errorResponseJson(err, InternalServerError) - case Right(result) => Ok(result.toJson) - } - - } yield response - } - } yield response - } + // case req @ POST -> Root / `api` / `verb` / "extract" => + // req.decode[Multipart[IO]] { m => + // val partsMap = PartsMap(m.parts) + // for { + /* maybeData <- DataSingle.getData(partsMap, relativeBase).attempt */ + // schemaEngine <- partsMap.optPartValue("schemaEngine") + // optSchemaFormatStr <- partsMap.optPartValue("schemaFormat") + // inference <- partsMap.optPartValue("inference") + // label <- partsMap.optPartValue("labelName") + // optBaseStr <- partsMap.optPartValue("base") + // nodeSelector <- partsMap.optPartValue("nodeSelector") + // schemaFormat <- optEither2f( + // optSchemaFormatStr, + // SchemaFormat.fromString + // ) + // response <- maybeData match { + // // No data received + // case Left(err) => + // errorResponseJson(err.getMessage, BadRequest) + // // Data received, try to extract + // case Right((resourceRdf, dp)) => + // for { + // result <- io2f( + // // Raise IO error if no node selector, + // resourceRdf.use(rdf => + // dataExtract( + // rdf, + // dp.data, + // dp.optDataFormat, + // nodeSelector, + // inference, + // schemaEngine, + // schemaFormat, + // label, + // None + // ) + // ) + // ).attempt + /* .map(_.fold(exc => Left(exc.getMessage), res => Right(res))) */ + // response <- result match { + /* case Left(err) => errorResponseJson(err, InternalServerError) */ + // case Right(result) => Ok(result.toJson) + // } + // + // } yield response + // } + // } yield response + // } } private val relativeBase = ApiDefaults.relativeBase @@ -321,3 +311,9 @@ object DataService { def apply(client: Client[IO]): DataService = new DataService(client) } + +private object DataServiceError extends Enumeration { + type DataServiceError = String + val couldNotParseData: DataServiceError = + "Unknown error parsing the data provided. Check the input and the selected format." +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/Schema.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/Schema.scala index 8dcafbb7..3d2e184e 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/Schema.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/Schema.scala @@ -4,7 +4,7 @@ import cats.effect._ import cats.implicits._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.RDFReasoner -import es.weso.rdfshape.server.api.definitions.ApiDefaults.defaultSchemaEngine +import es.weso.rdfshape.server.api.definitions.ApiDefaults.defaultSchemaEngineName import es.weso.rdfshape.server.api.format.dataFormats.SchemaFormat import es.weso.rdfshape.server.api.routes.schema.logic.SchemaOperations.getBase import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ @@ -53,7 +53,7 @@ sealed case class Schema( schema <- Schemas.fromString( str, schemaFormat.name, - schemaEngine.getOrElse(defaultSchemaEngine), + schemaEngine.getOrElse(defaultSchemaEngineName), getBase ) _ <- IO { @@ -78,7 +78,7 @@ sealed case class Schema( val schemaFormatStr = schemaFormat.name val schemaEngineStr = - schemaEngine.getOrElse(defaultSchemaEngine) + schemaEngine.getOrElse(defaultSchemaEngineName) Schemas .fromString( schemaStr, @@ -102,7 +102,7 @@ sealed case class Schema( .fromString( schemaStr, schemaFormat.name, - schemaEngine.getOrElse(defaultSchemaEngine), + schemaEngine.getOrElse(defaultSchemaEngineName), getBase ) .attempt @@ -125,7 +125,7 @@ sealed case class Schema( logger.debug(s"nameSchema: $nameSchema") } foundSchema <- Schemas.lookupSchema( - schemaEngine.getOrElse(defaultSchemaEngine) + schemaEngine.getOrElse(defaultSchemaEngineName) ) _ <- IO { logger.debug(s"foundSchema: ${foundSchema.name}") @@ -252,18 +252,3 @@ object Schema extends LazyLogging { ) } - -/** Enumeration of the different possible Schema sources sent by the client. - * The source sent indicates the API if the schema was sent in raw text, as a URL - * to be fetched or as a text file containing the schema. - * In case the client submits the schema in several formats, the selected source will indicate the preferred one. - */ -private[logic] object SchemaSource extends Enumeration { - type SchemaSource = String - - val TEXT = "#schemaTextArea" - val URL = "#schemaUrl" - val FILE = "#schemaFile" - - val defaultActiveSchemaSource: SchemaSource = TEXT -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala index 4d1bdea1..ff7ecf16 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala @@ -1,15 +1,13 @@ package es.weso.rdfshape.server.api.routes.schema.logic import cats.effect.IO -import cats.syntax.either._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.jena.RDFAsJenaModel import es.weso.rdf.nodes.IRI import es.weso.rdf.{InferenceEngine, RDFBuilder, RDFReasoner} import es.weso.rdfshape.server.api.definitions.ApiDefaults import es.weso.rdfshape.server.api.definitions.UmlDefinitions.umlOptions -import es.weso.rdfshape.server.api.format.dataFormats.{DataFormat, SchemaFormat} -import es.weso.rdfshape.server.api.routes.data.logic.data.SimpleData +import es.weso.rdfshape.server.api.format.dataFormats.SchemaFormat import es.weso.schema.{Result, Schema, ShaclexSchema, ValidationTrigger} import es.weso.shacl.converter.Shacl2ShEx import es.weso.shapemaps.ShapeMap @@ -135,47 +133,48 @@ private[api] object SchemaOperations extends LazyLogging { * @param builder RDF builder * @return */ - private[api] def schemaValidateStr( - data: String, - optDataFormat: Option[DataFormat], - optSchema: Option[String], - optSchemaFormat: Option[SchemaFormat], - optSchemaEngine: Option[String], - tp: TriggerMode, - optInference: Option[String], - relativeBase: Option[IRI], - builder: RDFBuilder - ): IO[(Result, Option[ValidationTrigger], Long)] = { - val dp = SimpleData.empty.copy( - data = Some(data), - optDataFormat = optDataFormat, - inference = optInference - ) - val sp = Schema.empty.copy( - schema = optSchema, - schemaFormat = optSchemaFormat.getOrElse(SchemaFormat.defaultFormat), - schemaEngine = optSchemaEngine - ) - - val result: IO[(Result, Option[ValidationTrigger], Long)] = for { - pair <- dp.getData(relativeBase) - (_, resourceRdf) = pair - result <- resourceRdf.use(rdf => - for { - pairSchema <- sp.getSchema(Some(rdf)) - (_, eitherSchema) = pairSchema - schema <- IO.fromEither( - eitherSchema.leftMap(s => - new RuntimeException(s"Error obtaining schema: $s") - ) - ) - res <- schemaValidate(rdf, schema, tp, relativeBase, builder) - } yield res - ) - } yield result - - result.attempt.flatMap(_.fold(e => schemaErr(e.getMessage), IO.pure)) - } + // TODO: redo +// private[api] def schemaValidateStr( +// data: String, +// optDataFormat: Option[DataFormat], +// optSchema: Option[String], +// optSchemaFormat: Option[SchemaFormat], +// optSchemaEngine: Option[String], +// tp: TriggerMode, +// optInference: Option[String], +// relativeBase: Option[IRI], +// builder: RDFBuilder +// ): IO[(Result, Option[ValidationTrigger], Long)] = { +// val dp = DataSingle.empty.copy( +// data = Some(data), +// optDataFormat = optDataFormat, +// inference = optInference +// ) +// val sp = Schema.empty.copy( +// schema = optSchema, +// schemaFormat = optSchemaFormat.getOrElse(SchemaFormat.defaultFormat), +// schemaEngine = optSchemaEngine +// ) +// +// val result: IO[(Result, Option[ValidationTrigger], Long)] = for { +// pair <- dp.getData(relativeBase) +// (_, resourceRdf) = pair +// result <- resourceRdf.use(rdf => +// for { +// pairSchema <- sp.getSchema(Some(rdf)) +// (_, eitherSchema) = pairSchema +// schema <- IO.fromEither( +// eitherSchema.leftMap(s => +// new RuntimeException(s"Error obtaining schema: $s") +// ) +// ) +// res <- schemaValidate(rdf, schema, tp, relativeBase, builder) +// } yield res +// ) +// } yield result +// +// result.attempt.flatMap(_.fold(e => schemaErr(e.getMessage), IO.pure)) +// } /** For a given data and schema, attempt to validate it with WESO libraries * diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaSource.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaSource.scala new file mode 100644 index 00000000..db30874a --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaSource.scala @@ -0,0 +1,16 @@ +package es.weso.rdfshape.server.api.routes.schema.logic + +/** Enumeration of the different possible Schema sources sent by the client. + * The source sent indicates the API if the schema was sent in raw text, as a URL + * to be fetched or as a text file containing the schema. + * In case the client submits the schema in several formats, the selected source will indicate the preferred one. + */ +private[api] object SchemaSource extends Enumeration { + type SchemaSource = String + + val TEXT = "#schemaTextArea" + val URL = "#schemaUrl" + val FILE = "#schemaFile" + + val defaultActiveSchemaSource: SchemaSource = TEXT +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/TriggerMode.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/TriggerMode.scala index 28067572..2d7b7a4f 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/TriggerMode.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/TriggerMode.scala @@ -73,7 +73,7 @@ private[api] object TriggerMode extends LazyLogging { * @param shapeMap Optionally, the inner shapemap associated to the TriggerMode * @return A new TriggerMode based on the given parameters */ - def mkTriggerMode( + private def mkTriggerMode( triggerMode: Option[String], shapeMap: ShapeMap ): Either[String, TriggerMode] = { diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala index 318a0c07..8d196fa1 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala @@ -1,23 +1,19 @@ package es.weso.rdfshape.server.api.routes.schema.service import cats.effect._ -import cats.implicits._ import com.typesafe.scalalogging.LazyLogging -import es.weso.rdf.jena.RDFAsJenaModel import es.weso.rdfshape.server.api.definitions.ApiDefaults -import es.weso.rdfshape.server.api.definitions.ApiDefaults.defaultSchemaEngine +import es.weso.rdfshape.server.api.definitions.ApiDefaults.defaultSchemaEngineName import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.format.dataFormats.SchemaFormat import es.weso.rdfshape.server.api.routes.ApiService -import es.weso.rdfshape.server.api.routes.data.logic.data.SimpleData +import es.weso.rdfshape.server.api.routes.schema.logic.Schema import es.weso.rdfshape.server.api.routes.schema.logic.SchemaOperations._ -import es.weso.rdfshape.server.api.routes.schema.logic.{Schema, TriggerMode} import es.weso.rdfshape.server.api.utils.OptEitherF._ import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.utils.json.JsonUtils.errorResponseJson import es.weso.schema._ -import es.weso.utils.IOUtils.io2f import io.circe.Json import org.http4s._ import org.http4s.circe._ @@ -175,7 +171,7 @@ class SchemaService(client: Client[IO]) schema, sp.schema, sp.schemaFormat, - sp.schemaEngine.getOrElse(defaultSchemaEngine), + sp.schemaEngine.getOrElse(defaultSchemaEngineName), targetSchemaFormat, sp.targetSchemaEngine ) @@ -279,56 +275,57 @@ class SchemaService(client: Client[IO]) * - appInfo [Object]: Additional information on why the node conforms or not * - errors [Array]: Array of errors in the validation */ - case req @ POST -> Root / `api` / `verb` / "validate" => - req.decode[Multipart[IO]] { m => - { - val partsMap = PartsMap(m.parts) - val r = for { - dataPair <- SimpleData.getData(partsMap, relativeBase) - (resourceRdf, dp) = dataPair - res <- for { - emptyRes <- RDFAsJenaModel.empty - vv <- (resourceRdf, emptyRes).tupled.use { case (rdf, builder) => - for { - schemaPair <- Schema.mkSchema(partsMap, Some(rdf)) - (schema, _) = schemaPair - maybeTriggerMode <- TriggerMode.mkTriggerMode(partsMap) - newRdf <- applyInference(rdf, dp.inference) - ret <- maybeTriggerMode match { - case Left(err) => - IO.raiseError( - new RuntimeException( - s"Could not obtain validation trigger: $err" - ) - ) - case Right(triggerMode) => - for { - r <- io2f( - schemaValidate( - newRdf, - schema, - triggerMode, - relativeBase, - builder - ) - ) - json <- io2f(schemaResult2json(r._1)) - } yield json - } - } yield ret - } - } yield vv - } yield res - - for { - e <- r.attempt - res <- e.fold( - exc => errorResponseJson(exc.getMessage, BadRequest), - json => Ok(json) - ) - } yield res - } - } + /* TODO: redo */ + // case req @ POST -> Root / `api` / `verb` / "validate" => + // req.decode[Multipart[IO]] { m => + // { + // val partsMap = PartsMap(m.parts) + // val r = for { + // dataPair <- DataSingle.getData(partsMap, relativeBase) + // (resourceRdf, dp) = dataPair + // res <- for { + // emptyRes <- RDFAsJenaModel.empty + /* vv <- (resourceRdf, emptyRes).tupled.use { case (rdf, builder) => */ + // for { + // schemaPair <- Schema.mkSchema(partsMap, Some(rdf)) + // (schema, _) = schemaPair + // maybeTriggerMode <- TriggerMode.mkTriggerMode(partsMap) + // newRdf <- applyInference(rdf, dp.inference) + // ret <- maybeTriggerMode match { + // case Left(err) => + // IO.raiseError( + // new RuntimeException( + // s"Could not obtain validation trigger: $err" + // ) + // ) + // case Right(triggerMode) => + // for { + // r <- io2f( + // schemaValidate( + // newRdf, + // schema, + // triggerMode, + // relativeBase, + // builder + // ) + // ) + // json <- io2f(schemaResult2json(r._1)) + // } yield json + // } + // } yield ret + // } + // } yield vv + // } yield res + // + // for { + // e <- r.attempt + // res <- e.fold( + // exc => errorResponseJson(exc.getMessage, BadRequest), + // json => Ok(json) + // ) + // } yield res + // } + // } } private val relativeBase = ApiDefaults.relativeBase diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/values/EndpointValue.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/values/EndpointValue.scala index fd206a16..0b408230 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/values/EndpointValue.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/values/EndpointValue.scala @@ -3,6 +3,5 @@ package es.weso.rdfshape.server.api.values /** Data class representing any endpoint from where information is fetched or that identifies RDF data * @param endpoint Base endpoint * @param node Specific information node - * TODO */ case class EndpointValue(endpoint: Option[String], node: Option[String]) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtils.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtils.scala index 2db57f0d..2c7fcc83 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtils.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtils.scala @@ -1,6 +1,8 @@ package es.weso.rdfshape.server.utils.json import cats.effect.IO +import es.weso.rdf.PrefixMap +import es.weso.rdf.nodes.IRI import io.circe.Json import org.http4s.circe._ import org.http4s.dsl.Http4sDsl @@ -30,8 +32,8 @@ object JsonUtils extends Http4sDsl[IO] { /** Converts some object to JSON, given a converter function. * - * @param data Data to be converted to JSON * @param name Name given to the data + * @param data Data to be converted to JSON * @param converter Converter function from A to Json * @tparam A Type of the data to be converted to JSON * @return A list with containing a single tuple: the name given to the data and the JSON representation of "A" itself. @@ -95,4 +97,27 @@ object JsonUtils extends Http4sDsl[IO] { private def mkJson(msg: String): Json = Json.fromFields(List(("error", Json.fromString(msg)))) + /** Convert a given prefix map to JSON format for API operations + * + * @param prefixMap Input prefix map + * @return JSON representation of the prefix map + */ + def prefixMap2Json(prefixMap: PrefixMap): Json = { + Json.fromFields(prefixMap.pm.map { case (prefix, iri) => + (prefix.str, Json.fromString(iri.getLexicalForm)) + }) + } + + /** @param iri IRI to be converted + * @param prefixMap Optionally, the prefix map with the IRI to be converted + * @return JSON representation of the IRI + */ + def iri2Json(iri: IRI, prefixMap: Option[PrefixMap]): Json = { + prefixMap match { + case Some(pm) => Json.fromString(pm.qualifyIRI(iri)) + case None => Json.fromString(iri.toString) + } + + } + } From 7e73dcd87fdfeef08c69cb6723f2aa82eb085553 Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Mon, 25 Oct 2021 20:22:31 +0200 Subject: [PATCH 25/32] Refactored data conversion. --- .../server/api/definitions/ApiDefaults.scala | 2 + .../rdfshape/server/api/format/Format.scala | 16 +- .../data/logic/aux/InferenceCodecs.scala | 29 ++++ .../logic/operations/DataConversion.scala | 156 +++++++++--------- .../api/routes/data/logic/types/Data.scala | 4 +- .../routes/data/logic/types/DataSingle.scala | 27 +-- .../logic/types/merged/DataCompound.scala | 17 +- .../logic/types/merged/MergedModels.scala | 4 +- .../api/routes/data/service/DataService.scala | 96 +++++++---- .../server/utils/json/JsonUtils.scala | 4 +- 10 files changed, 206 insertions(+), 149 deletions(-) create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/aux/InferenceCodecs.scala diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala index f5aa1d8a..d6de518e 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala @@ -4,6 +4,7 @@ import es.weso.rdf.nodes.IRI import es.weso.rdf.{InferenceEngine, NONE} import es.weso.rdfshape.server.api.format.dataFormats.{ DataFormat, + RDFFormat, SchemaFormat, ShapeMapFormat } @@ -21,6 +22,7 @@ import es.weso.shapemaps.ShapeMap case object ApiDefaults { val availableDataFormats: List[DataFormat] = DataFormat.availableFormats val defaultDataFormat: DataFormat = DataFormat.defaultFormat + val defaultRdfFormat: RDFFormat = RDFFormat.defaultFormat val availableSchemaFormats: List[SchemaFormat] = SchemaFormat.availableFormats val defaultSchemaFormat: SchemaFormat = SchemaFormat.defaultFormat val defaultSchemaFormatName: String = defaultSchemaFormat.name diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala index 043a230a..a080551d 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala @@ -21,6 +21,13 @@ trait Format { override def toString: String = name + override def equals(otherFormat: Any): Boolean = { + otherFormat match { + case other: Format => name == other.name && mimeType == other.mimeType + case _ => false + } + } + } object Format extends FormatCompanion[Format] { @@ -47,6 +54,8 @@ trait FormatCompanion[F <: Format] extends LazyLogging { */ val availableFormats: List[F] + /** Format encoder. Forms a JSON object with the formats name and mimetype + */ implicit val encodeFormat: Encoder[F] = (format: F) => { Json.obj( ("name", Json.fromString(format.name)), @@ -59,9 +68,14 @@ trait FormatCompanion[F <: Format] extends LazyLogging { ) } + /** Format decoder. Forms a Format instance from a given String, if the format name is valid. + * + * @note The decoder is simplified because the client normally sends the format name only, like: + * "format": "turtle" + */ implicit val decodeFormat: Decoder[F] = (cursor: HCursor) => for { - formatStr <- cursor.downField("name").as[String] + formatStr <- cursor.value.as[String] format = fromString(formatStr).toOption.getOrElse(defaultFormat) } yield format diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/aux/InferenceCodecs.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/aux/InferenceCodecs.scala new file mode 100644 index 00000000..e448332f --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/aux/InferenceCodecs.scala @@ -0,0 +1,29 @@ +package es.weso.rdfshape.server.api.routes.data.logic.aux + +import es.weso.rdf.{InferenceEngine, NONE} +import es.weso.rdfshape.server.api.routes.data.logic.types.Data +import io.circe.{Decoder, Encoder, HCursor, Json} + +/** Implicit encoders and decoders for [[es.weso.rdf.InferenceEngine]] instances, + * used when encoding and decoding [[Data]] instances + */ +private[data] object InferenceCodecs { + + /** Auxiliary encoder for data inference. + */ + implicit val encodeInference: Encoder[InferenceEngine] = + (inference: InferenceEngine) => Json.fromString(inference.name) + + /** Auxiliary decoder for data inference + */ + implicit val decodeInference: Decoder[InferenceEngine] = + (cursor: HCursor) => + for { + inferenceName <- cursor.value.as[String] + inference = InferenceEngine + .fromString(inferenceName) + .toOption + .getOrElse(NONE) + } yield inference + +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataConversion.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataConversion.scala index 116e6d32..03653fd3 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataConversion.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataConversion.scala @@ -5,12 +5,7 @@ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.jena.RDFAsJenaModel import es.weso.rdf.sgraph.{RDF2SGraph, RDFDotPreferences} import es.weso.rdf.{InferenceEngine, NONE} -import es.weso.rdfshape.server.api.format.dataFormats.{ - DataFormat, - Dot, - Png, - Svg -} +import es.weso.rdfshape.server.api.format.dataFormats.{DataFormat, Png, Svg} import es.weso.rdfshape.server.api.routes.data.logic.operations.DataConversion.successMessage import es.weso.rdfshape.server.api.routes.data.logic.types.{Data, DataSingle} import es.weso.utils.IOUtils.either2io @@ -28,9 +23,9 @@ import scala.util.Try /** Data class representing the output of a data-conversion operation * - * @param inputData Data before conversion - * @param targetFormat Target data format - * @param result Data after conversion + * @param inputData Data before conversion + * @param targetFormat Target data format + * @param result Data after conversion */ final case class DataConversion private ( override val inputData: Data, @@ -44,7 +39,7 @@ private[api] object DataConversion extends LazyLogging { /** List of graph format names */ - lazy val availableGraphFormatNames: immutable.Seq[String] = + private lazy val availableGraphFormatNames: immutable.Seq[String] = availableGraphFormats.map(_.name) /** List of available RDF format names (uppercase) @@ -85,88 +80,97 @@ private[api] object DataConversion extends LazyLogging { * @param targetFormat Target * @return A new Data instance */ + /* TODO: weird NullPointerException on data merges when using the "rdf" + * resource */ + def dataConvert( inputData: Data, targetFormat: DataFormat ): IO[DataConversion] = { logger.info(s"Conversion target format: $targetFormat") - for { - rdf <- inputData.toRdf() - sgraph <- rdf.use(rdfReasoner => RDF2SGraph.rdf2sgraph(rdfReasoner)) + // Get a handle to the RDF resource + rdf <- inputData.toRdf() + _ <- IO.println("STATS") + _ <- IO.println(inputData.getClass) + _ <- IO.println(inputData.format) + // Compute the inference to be used targetInference = inputData match { case ds: DataSingle => ds.inference case _ => NONE } - convertedData <- targetFormat.name.toUpperCase match { - // JSON: convert to JSON String and return a DataSingle with it - case "JSON" => - IO { - DataSingle( - dataRaw = sgraph.toJson.spaces2, - dataFormat = targetFormat, - inference = targetInference, - activeDataSource = inputData.dataSource - ) - } - - case "DOT" => - IO { - DataSingle( - dataRaw = sgraph.toDot(RDFDotPreferences.defaultRDFPrefs), - dataFormat = targetFormat, - inference = targetInference, - activeDataSource = inputData.dataSource - ) + // Perform the conversion while using the RDF resource + conversionResult <- rdf.use(rdfReasoner => { + for { + sgraph <- RDF2SGraph.rdf2sgraph(rdfReasoner) + convertedData <- targetFormat.name.toUpperCase match { + // JSON: convert to JSON String and return a DataSingle with it + case "JSON" => + IO { + DataSingle( + dataRaw = sgraph.toJson.spaces2, + dataFormat = targetFormat, + inference = targetInference, + activeDataSource = inputData.dataSource + ) + } + + case "DOT" => + IO { + DataSingle( + dataRaw = sgraph.toDot(RDFDotPreferences.defaultRDFPrefs), + dataFormat = targetFormat, + inference = targetInference, + activeDataSource = inputData.dataSource + ) + } + case tFormat if rdfDataFormatNames.contains(tFormat) => + for { + data <- rdfReasoner.serialize(tFormat) + } yield DataSingle( + dataRaw = data, + dataFormat = targetFormat, + inference = targetInference, + activeDataSource = inputData.dataSource + ) + case tFormat if availableGraphFormatNames.contains(tFormat) => + for { + eitherFormat <- either2io(getTargetFormat(tFormat)) + dotStr = sgraph.toDot(RDFDotPreferences.defaultRDFPrefs) + data <- eitherFormat.fold( + err => IO.raiseError(new RuntimeException(err)), + _ => IO(dotStr) + ) + } yield DataSingle( + dataRaw = data, + dataFormat = targetFormat, + inference = targetInference, + activeDataSource = inputData.dataSource + ) + case t => + IO.raiseError(new RuntimeException(s"Unsupported format: $t")) } - case tFormat if rdfDataFormatNames.contains(tFormat) => - for { - data <- rdf.use(_.serialize(tFormat)) - } yield DataSingle( - dataRaw = data, - dataFormat = targetFormat, - inference = targetInference, - activeDataSource = inputData.dataSource - ) - case tFormat if availableGraphFormatNames.contains(tFormat) => - for { - eitherFormat <- either2io(getTargetFormat(tFormat)) - dotStr = sgraph.toDot(RDFDotPreferences.defaultRDFPrefs) - inputDataDot = DataSingle( - dataRaw = dotStr, - dataFormat = Dot, - inference = targetInference, - activeDataSource = inputData.dataSource - ) - _ <- eitherFormat.fold( - err => IO.raiseError(new RuntimeException(err)), - format => dotConvert(inputDataDot, format, targetInference) - ) - data <- eitherFormat.fold( - err => IO.raiseError(new RuntimeException(err)), - _ => IO(dotStr) - ) - } yield DataSingle( - dataRaw = data, - dataFormat = targetFormat, - inference = targetInference, - activeDataSource = inputData.dataSource - ) - case t => - IO.raiseError(new RuntimeException(s"Unsupported format: $t")) - } - } yield DataConversion(inputData, targetFormat, convertedData) - + } yield DataConversion(inputData, targetFormat, convertedData) + }) + } yield conversionResult } + private def getTargetFormat(str: String): Either[String, Format] = + str.toUpperCase match { + case "SVG" => Right(Format.SVG) + case "PNG" => Right(Format.PNG) + case "PS" => Right(Format.PS) + case _ => Left(s"Unsupported format $str") + } + /** Perform a conversion from DOT data to another format * * @param inputData Input Data (DOT format) to be converted * @param targetFormat Target format (graphviz) * @return Data after conversion */ - def dotConvert( + private def dotConvert( inputData: Data, targetFormat: Format, inference: InferenceEngine = NONE @@ -230,13 +234,5 @@ private[api] object DataConversion extends LazyLogging { } } - private def getTargetFormat(str: String): Either[String, Format] = - str.toUpperCase match { - case "SVG" => Right(Format.SVG) - case "PNG" => Right(Format.PNG) - case "PS" => Right(Format.PS) - case _ => Left(s"Unsupported format $str") - } - private case class GraphFormat(name: String, mime: String, fmt: Format) } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/Data.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/Data.scala index 3a4f0b3f..f7fbf2a9 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/Data.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/Data.scala @@ -54,12 +54,14 @@ object Data extends DataCompanion[Data] { /** Dummy implementation meant to be overridden * If called on a general [[Data]] instance, pattern match among the available data types to * use the correct implementation + * + * @note Defaults to [[DataSingle]]'s implementation of decoding data */ implicit val decodeData: Decoder[Data] = (cursor: HCursor) => { this.getClass match { - case ds if ds == classOf[DataSingle] => DataSingle.decodeData(cursor) case de if de == classOf[DataEndpoint] => DataEndpoint.decodeData(cursor) case dc if dc == classOf[DataCompound] => DataCompound.decodeData(cursor) + case _ => DataSingle.decodeData(cursor) } } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala index ed63534b..56d7edc6 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala @@ -6,9 +6,10 @@ import es.weso.rdf.jena._ import es.weso.rdf.nodes.IRI import es.weso.rdf.{InferenceEngine, NONE, RDFReasoner} import es.weso.rdfshape.server.api.definitions.ApiDefaults -import es.weso.rdfshape.server.api.format.dataFormats.DataFormat +import es.weso.rdfshape.server.api.format.dataFormats.{DataFormat, RDFFormat} import es.weso.rdfshape.server.api.routes.data.logic.DataSource import es.weso.rdfshape.server.api.routes.data.logic.DataSource.DataSource +import es.weso.rdfshape.server.api.routes.data.logic.aux.InferenceCodecs._ import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.html2rdf.HTML2RDF @@ -52,6 +53,7 @@ sealed case class DataSingle( for { rdf <- rdfFromString(dataRaw, dataFormat, relativeBase.map(_.str)) result = rdf.evalMap(rdf => rdf.applyInference(inference)) + } yield result } @@ -143,25 +145,6 @@ private[api] object DataSingle */ val emptyDataValue = "" - /** Auxiliar encoder for data inference - */ - private implicit val encodeInference: Encoder[InferenceEngine] = - (inference: InferenceEngine) => { - Json.obj(("name", Json.fromString(inference.name))) - } - - /** Auxiliar decoder for data inference - */ - private implicit val decodeInference: Decoder[InferenceEngine] = - (cursor: HCursor) => - for { - inferenceName <- cursor.downField("name").as[String] - inference = InferenceEngine - .fromString(inferenceName) - .toOption - .getOrElse(NONE) - } yield inference - override implicit val encodeData: Encoder[DataSingle] = (data: DataSingle) => Json.obj( @@ -250,8 +233,8 @@ private[api] object DataSingle data <- cursor.downField("data").as[String] dataFormat <- cursor - .downField("format") - .as[DataFormat] + .downField("dataFormat") + .as[RDFFormat] .orElse(Right(ApiDefaults.defaultDataFormat)) dataInference <- diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/DataCompound.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/DataCompound.scala index 20130c64..85b11990 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/DataCompound.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/DataCompound.scala @@ -41,8 +41,10 @@ case class DataCompound(elements: List[Data]) extends Data with LazyLogging { } override val dataSource: DataSource = DataSource.COMPOUND - override val format: Option[DataFormat] = - None // Each element may have its own format + override val format: Option[DataFormat] = { + if(elements.forall(_.format == elements.head.format)) elements.head.format + else None + } // None if each element has its own format. If all elements have the same format, use that format /** @return RDF logical model of the data contained in the compound */ @@ -52,9 +54,10 @@ case class DataCompound(elements: List[Data]) extends Data with LazyLogging { val jenaModels = getJenaModels.sequence // Whole compound value resulting from merging the individual elements - val value = jenaModels.flatMap(lsRs => + val value: IO[Resource[IO, RDFReasoner]] = jenaModels.flatMap(lsRs => IO(lsRs.sequence.evalMap(ls => MergedModels.fromList(ls))) ) + value } @@ -66,6 +69,7 @@ case class DataCompound(elements: List[Data]) extends Data with LazyLogging { * * @return List of RDF Jena models in each of the elements of the compound */ + // TODO: The moment you use one of these resources, things crash private def getJenaModels: List[IO[Resource[IO, RDFAsJenaModel]]] = { elements.flatMap { // Single data: straight extraction @@ -96,7 +100,7 @@ private[api] object DataCompound ) DataCompound .fromJsonString(compoundData.get) - .leftMap(err => s"Could not read compound data: $err") + .leftMap(err => s"Could not read compound data.\n $err") } else Left("No compound data provided") } yield maybeData @@ -116,6 +120,7 @@ private[api] object DataCompound case Some(vs) => val xs: Decoder.Result[List[Data]] = vs.toList.map(_.as[Data]).sequence + xs.map(DataCompound(_)) } } @@ -130,10 +135,10 @@ private[api] object DataCompound json <- parse(jsonStr).leftMap(parseError => s"CompoundData.fromString: error parsing $jsonStr as JSON: $parseError" ) - cd <- json + compoundData <- json .as[DataCompound] .leftMap(decodeError => s"Error decoding json to compoundData: $decodeError\nJSON obtained: \n${json.spaces2}" ) - } yield cd + } yield compoundData } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/MergedModels.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/MergedModels.scala index 63411bce..49188ccb 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/MergedModels.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/MergedModels.scala @@ -111,6 +111,8 @@ case class MergedModels( def triplesWithPredicateObject(p: IRI, o: RDFNode): RDFStream[RDFTriple] = Stream.eval(getModel).flatMap(_.triplesWithPredicateObject(p, o)) + def getModel: IO[RDFAsJenaModel] = mergedModel.get + // TODO: Not optimized...it just appends the inferred model to the end... override def applyInference(inference: InferenceEngine): RDFRead[Rdf] = for { mergedRdf <- getModel @@ -138,8 +140,6 @@ case class MergedModels( override def isIsomorphicWith(other: RDFReader): RDFRead[Boolean] = getModel.flatMap(_.isIsomorphicWith(other)) - def getModel: IO[RDFAsJenaModel] = mergedModel.get - override def asRDFBuilder: RDFRead[RDFBuilder] = getModel.flatMap(_.asRDFBuilder) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala index 3327d6a0..beda6cb8 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala @@ -19,6 +19,7 @@ import es.weso.rdfshape.server.api.routes.data.logic.operations.{ DataInfo } import es.weso.rdfshape.server.api.routes.data.logic.types.Data +import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters.TargetDataFormatParameter import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.utils.json.JsonUtils.errorResponseJson import io.circe.Json @@ -98,11 +99,12 @@ class DataService(client: Client[IO]) case req @ POST -> Root / `api` / `verb` / "info" => req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) + for { // Get the data from the partsMap eitherData <- Data.mkData(partsMap) response <- eitherData.fold( - // If there was an error, return it + // If there was an error parsing the data, return it err => errorResponseJson(err, InternalServerError), // Else, try and compute the data info data => @@ -139,41 +141,65 @@ class DataService(client: Client[IO]) * - targetDataFormat [String]: Format of the RDF data * - inference [String]: Inference to be applied * - activeDataSource [String]: Identifies the source of the data (raw, URL, file...) - * Returns a JSON object with the operation results. See [[DataConversion.encodeResult]]: - * - message [String]: Informational message - * - data [String]: RDF data sent back (originally sent by the client) - * - inputDataFormat [String]: Data format of the input data - * - targetDataFormat [String]: Data format of the output data - * - result[Object]: JSON representation of the resulting data. See [[Data.encodeData]] + * Returns a JSON object with the operation results. See [[DataConversion.encodeResult]]. */ - // case req @ POST -> Root / `api` / `verb` / "convert" => - // req.decode[Multipart[IO]] { m => - // val partsMap = PartsMap(m.parts) - // for { - // dataParam <- DataSingle.getData(partsMap, relativeBase) - // (resourceRdf, dp) = dataParam - /* targetFormat = dp.targetDataFormat.getOrElse(defaultDataFormat).name */ - /* dataFormat = dp.optDataFormat.getOrElse(defaultDataFormat) */ - // - // result <- io2f( - // resourceRdf.use(rdf => { - // logger.debug(s"Attempting data conversion") - // DataConversion - // .dataConvert(rdf, dp.data, dataFormat, targetFormat) - // - // }) - // ).attempt - // .map( - // _.fold(exc => Left(exc.getMessage), dc => Right(dc)) - // ) - // - // response <- result match { - /* case Left(err) => errorResponseJson(err, InternalServerError) */ - // case Right(result) => Ok(result.toJson) - // } - // - // } yield response - // } + case req @ POST -> Root / `api` / `verb` / "convert" => + req.decode[Multipart[IO]] { m => + val partsMap = PartsMap(m.parts) + + for { + // Get the data from the partsMap + eitherData <- Data.mkData(partsMap) + // Get the target data format + optTargetFormatStr <- partsMap.optPartValue( + TargetDataFormatParameter.name + ) + optTargetFormat = for { + targetFormatStr <- optTargetFormatStr + targetFormat <- DataFormat.fromString(targetFormatStr).toOption + } yield targetFormat + + // Abort if no valid target format, else continue + response <- optTargetFormat match { + case None => + errorResponseJson( + "Empty or invalid target format for conversion", + BadRequest + ) + case Some(targetFormat) => + eitherData.fold( + // If there was an error parsing the data, return it + err => errorResponseJson(err, InternalServerError), + // Else, try and compute the data conversion + data => + for { + // Check for exceptions when converting the data + maybeResult <- DataConversion + .dataConvert(data, targetFormat) + .attempt + response <- maybeResult.fold( + // Error: return it + err => + /* Legacy code may return exceptions with "null" + * messages */ + err.getMessage match { + case errorMessage: String => + errorResponseJson(errorMessage, InternalServerError) + case _ => // null exception message, return a general error message + err.printStackTrace() + errorResponseJson( + DataServiceError.couldNotParseData, + InternalServerError + ) + }, + // Success: build successful response + dataConversion => Ok(dataConversion.asJson) + ) + } yield response + ) + } + } yield response + } /** Perform a SPARQL query on RDF data. * Receives a JSON object with the input RDF and query information: diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtils.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtils.scala index 2c7fcc83..53618e7d 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtils.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtils.scala @@ -75,7 +75,7 @@ object JsonUtils extends Http4sDsl[IO] { * @return The response object, ready to be dispatched elsewhere */ def errorResponseJson(msg: String, status: Status = Ok): IO[Response[IO]] = { - val responseMessage = mkJson(msg) + val responseMessage = mkJsonError(msg) mapStatusCodes(status) match { case Status.Created => Created(responseMessage) case Status.Accepted => Accepted(responseMessage) @@ -94,7 +94,7 @@ object JsonUtils extends Http4sDsl[IO] { } } - private def mkJson(msg: String): Json = + private def mkJsonError(msg: String): Json = Json.fromFields(List(("error", Json.fromString(msg)))) /** Convert a given prefix map to JSON format for API operations From a958b32616f508958b6a70fdc1ae8e3ed3178980 Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Thu, 28 Oct 2021 17:53:22 +0200 Subject: [PATCH 26/32] End refactor of data service. --- .../server/api/definitions/ApiDefaults.scala | 6 +- .../api/routes/data/logic/DataSource.scala | 12 +- ...DataConversion.scala => DataConvert.scala} | 121 ++------ .../data/logic/operations/DataExtract.scala | 146 ++++----- .../data/logic/operations/DataInfo.scala | 57 ++-- .../data/logic/operations/DataQuery.scala | 76 +++++ .../api/routes/data/logic/types/Data.scala | 12 +- .../data/logic/types/DataEndpoint.scala | 16 +- .../routes/data/logic/types/DataSingle.scala | 206 ++++++------- .../logic/types/merged/DataCompound.scala | 27 +- .../api/routes/data/service/DataService.scala | 276 +++++++++--------- .../endpoint/logic/query/SparqlQuery.scala | 128 ++++---- .../logic/query/SparqlQuerySource.scala | 8 +- .../endpoint/service/EndpointService.scala | 23 +- .../api/routes/schema/logic/Schema.scala | 2 +- .../schema/logic/SchemaOperations.scala | 2 +- .../routes/schema/logic/SchemaSource.scala | 8 +- .../api/routes/schema/logic/TriggerMode.scala | 14 +- .../api/routes/shapemap/logic/ShapeMap.scala | 182 +++++------- .../shapemap/logic/ShapeMapSource.scala | 8 +- .../shapemap/service/ShapeMapService.scala | 6 +- .../IncomingRequestParameters.scala | 121 +++----- 22 files changed, 691 insertions(+), 766 deletions(-) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/{DataConversion.scala => DataConvert.scala} (55%) create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataQuery.scala diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala index d6de518e..aa036aff 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala @@ -39,11 +39,11 @@ case object ApiDefaults { val defaultSchemaEmbedded = false val defaultInferenceEngine: InferenceEngine = NONE val defaultInferenceEngineName: String = defaultInferenceEngine.name - val defaultActiveDataSource: DataSource = DataSource.defaultActiveDataSource + val defaultActiveDataSource: DataSource = DataSource.defaultDataSource val defaultActiveSchemaSource: SchemaSource = - SchemaSource.defaultActiveSchemaSource + SchemaSource.defaultSchemaSource val defaultActiveShapeMapSource: ShapeMapSource = - ShapeMapSource.defaultActiveShapeMapSource + ShapeMapSource.defaultShapeMapSource val defaultShapeMapFormat: ShapeMapFormat = ShapeMapFormat.defaultFormat val availableShapeMapFormats: List[String] = ShapeMap.formats val defaultActiveShapeMapTab = "#shapeMapTextArea" diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataSource.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataSource.scala index 0b56c8b5..2d4fbe4a 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataSource.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataSource.scala @@ -8,11 +8,11 @@ package es.weso.rdfshape.server.api.routes.data.logic private[api] object DataSource extends Enumeration { type DataSource = String - val TEXT = "#dataTextArea" - val URL = "#dataUrl" - val FILE = "#dataFile" - val COMPOUND = "#compoundData" - val ENDPOINT = "#dataEndpoint" + val TEXT = "byText" + val URL = "byUrl" + val FILE = "byFile" + val COMPOUND = "compoundData" + val ENDPOINT = "dataEndpoint" - val defaultActiveDataSource: DataSource = TEXT + val defaultDataSource: DataSource = TEXT } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataConversion.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataConvert.scala similarity index 55% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataConversion.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataConvert.scala index 03653fd3..7a487e82 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataConversion.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataConvert.scala @@ -2,24 +2,18 @@ package es.weso.rdfshape.server.api.routes.data.logic.operations import cats.effect.IO import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf.NONE import es.weso.rdf.jena.RDFAsJenaModel import es.weso.rdf.sgraph.{RDF2SGraph, RDFDotPreferences} -import es.weso.rdf.{InferenceEngine, NONE} -import es.weso.rdfshape.server.api.format.dataFormats.{DataFormat, Png, Svg} -import es.weso.rdfshape.server.api.routes.data.logic.operations.DataConversion.successMessage +import es.weso.rdfshape.server.api.format.dataFormats.DataFormat +import es.weso.rdfshape.server.api.routes.data.logic.operations.DataConvert.successMessage import es.weso.rdfshape.server.api.routes.data.logic.types.{Data, DataSingle} import es.weso.utils.IOUtils.either2io -import guru.nidi.graphviz.engine.{Format, Graphviz} -import guru.nidi.graphviz.model.MutableGraph -import guru.nidi.graphviz.parse.Parser +import guru.nidi.graphviz.engine.Format import io.circe.syntax.EncoderOps import io.circe.{Encoder, Json} -import java.io.ByteArrayOutputStream -import java.util.Base64 -import javax.imageio.ImageIO import scala.collection.immutable -import scala.util.Try /** Data class representing the output of a data-conversion operation * @@ -27,7 +21,7 @@ import scala.util.Try * @param targetFormat Target data format * @param result Data after conversion */ -final case class DataConversion private ( +final case class DataConvert private ( override val inputData: Data, targetFormat: DataFormat, result: Data @@ -35,7 +29,7 @@ final case class DataConversion private ( /** Static utilities for data conversion */ -private[api] object DataConversion extends LazyLogging { +private[api] object DataConvert extends LazyLogging { /** List of graph format names */ @@ -57,12 +51,12 @@ private[api] object DataConversion extends LazyLogging { private val successMessage = "Conversion successful" - /** Convert a conversion result to its JSON representation + /** Convert a [[DataConvert]] to its JSON representation * * @return JSON representation of the conversion result */ - implicit val encodeResult: Encoder[DataConversion] = - (dataConversion: DataConversion) => { + implicit val encodeDataConversionOperation: Encoder[DataConvert] = + (dataConversion: DataConvert) => { Json.fromFields( List( ("message", Json.fromString(dataConversion.successMessage)), @@ -80,20 +74,14 @@ private[api] object DataConversion extends LazyLogging { * @param targetFormat Target * @return A new Data instance */ - /* TODO: weird NullPointerException on data merges when using the "rdf" - * resource */ - def dataConvert( inputData: Data, targetFormat: DataFormat - ): IO[DataConversion] = { + ): IO[DataConvert] = { logger.info(s"Conversion target format: $targetFormat") for { // Get a handle to the RDF resource rdf <- inputData.toRdf() - _ <- IO.println("STATS") - _ <- IO.println(inputData.getClass) - _ <- IO.println(inputData.format) // Compute the inference to be used targetInference = inputData match { case ds: DataSingle => ds.inference @@ -109,30 +97,31 @@ private[api] object DataConversion extends LazyLogging { case "JSON" => IO { DataSingle( - dataRaw = sgraph.toJson.spaces2, + dataPre = Option(sgraph.toJson.spaces2), dataFormat = targetFormat, inference = targetInference, - activeDataSource = inputData.dataSource + dataSource = inputData.dataSource ) } case "DOT" => IO { DataSingle( - dataRaw = sgraph.toDot(RDFDotPreferences.defaultRDFPrefs), + dataPre = + Option(sgraph.toDot(RDFDotPreferences.defaultRDFPrefs)), dataFormat = targetFormat, inference = targetInference, - activeDataSource = inputData.dataSource + dataSource = inputData.dataSource ) } case tFormat if rdfDataFormatNames.contains(tFormat) => for { data <- rdfReasoner.serialize(tFormat) } yield DataSingle( - dataRaw = data, + dataPre = Option(data), dataFormat = targetFormat, inference = targetInference, - activeDataSource = inputData.dataSource + dataSource = inputData.dataSource ) case tFormat if availableGraphFormatNames.contains(tFormat) => for { @@ -143,15 +132,15 @@ private[api] object DataConversion extends LazyLogging { _ => IO(dotStr) ) } yield DataSingle( - dataRaw = data, + dataPre = Option(data), dataFormat = targetFormat, inference = targetInference, - activeDataSource = inputData.dataSource + dataSource = inputData.dataSource ) case t => IO.raiseError(new RuntimeException(s"Unsupported format: $t")) } - } yield DataConversion(inputData, targetFormat, convertedData) + } yield DataConvert(inputData, targetFormat, convertedData) }) } yield conversionResult } @@ -164,75 +153,5 @@ private[api] object DataConversion extends LazyLogging { case _ => Left(s"Unsupported format $str") } - /** Perform a conversion from DOT data to another format - * - * @param inputData Input Data (DOT format) to be converted - * @param targetFormat Target format (graphviz) - * @return Data after conversion - */ - private def dotConvert( - inputData: Data, - targetFormat: Format, - inference: InferenceEngine = NONE - ): IO[Data] = { - logger.debug(s"dotConverter to $targetFormat. dot\n$inputData") - if(inputData.format.isEmpty) - IO.raiseError(new RuntimeException("Unspecified input data format")) - else if(inputData.rawData.isEmpty) - IO.raiseError( - new RuntimeException("Empty or malformed input data contents") - ) - else if( - inputData.format.get != es.weso.rdfshape.server.api.format.dataFormats.Dot - ) IO.raiseError(new RuntimeException("Input format is not DOT")) - else { - Try { - val g: MutableGraph = new Parser().read(inputData.rawData.get) - targetFormat match { - case Format.SVG => - val renderer = Graphviz - .fromGraph(g) //.width(200) - .render(targetFormat) - logger.debug(s"SVG converted: ${renderer.toString}") - IO { - DataSingle( - dataRaw = renderer.toString, - dataFormat = Svg, - inference = inference, - activeDataSource = inputData.dataSource - ) - } - case Format.PNG => - val renderer = Graphviz.fromGraph(g).render(Format.PNG) - val image = renderer.toImage - val baos = new ByteArrayOutputStream() - ImageIO.write(image, "png", baos) - val data = Base64.getEncoder.encodeToString(baos.toByteArray) - val imageString = "data:image/png;base64," + data - - IO { - DataSingle( - dataRaw = - "", - dataFormat = Png, - inference = inference, - activeDataSource = inputData.dataSource - ) - } - - case _ => - IO.raiseError( - new RuntimeException( - s"Error converting from DOT to $targetFormat" - ) - ) - } - }.fold( - err => IO.raiseError(err), - identity - ) - } - } - private case class GraphFormat(name: String, mime: String, fmt: Format) } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataExtract.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataExtract.scala index 78bf3e9a..6632d751 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataExtract.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataExtract.scala @@ -4,11 +4,14 @@ import cats.data.EitherT import cats.effect.IO import cats.effect.unsafe.implicits.global import com.typesafe.scalalogging.LazyLogging -import es.weso.rdf.InferenceEngine import es.weso.rdf.nodes.{IRI, Lang} +import es.weso.rdfshape.server.api.definitions.ApiDefaults import es.weso.rdfshape.server.api.definitions.ApiDefaults._ import es.weso.rdfshape.server.api.format.dataFormats.SchemaFormat -import es.weso.rdfshape.server.api.routes.data.logic.operations.DataExtract.successMessage +import es.weso.rdfshape.server.api.routes.data.logic.operations.DataExtract.{ + DataExtractResult, + successMessage +} import es.weso.rdfshape.server.api.routes.data.logic.types.Data import es.weso.schema.Schema import es.weso.schemaInfer.{InferOptions, PossiblePrefixes, SchemaInfer} @@ -17,20 +20,18 @@ import es.weso.utils.IOUtils.{either2es, io2es} import io.circe.syntax.EncoderOps import io.circe.{Encoder, Json} -/** Data class representing the output of a data-extraction operation (input RDF data => output schema) +/** Data class representing the output of a schema-extraction operation (input RDF data => output schema) * * @param inputData RDF input data from which ShEx may be extracted - * @param targetSchemaFormat Target schema format - * @param targetSchemaEngine Target schema engine - * @param schema Resulting schema - * @param shapeMap Resulting shapemap + * @param schemaFormat Target schema format + * @param schemaEngine Target schema engine + * @param result Object of type [[DataExtractResult]] containing the results of the data extraction */ final case class DataExtract private ( override val inputData: Data, - targetSchemaFormat: SchemaFormat, - targetSchemaEngine: Schema = defaultSchemaEngine, - schema: Schema, - shapeMap: ResultShapeMap + schemaFormat: SchemaFormat = ApiDefaults.defaultSchemaFormat, + schemaEngine: Schema = ApiDefaults.defaultSchemaEngine, + result: DataExtractResult ) extends DataOperation(successMessage, inputData) /** Static utilities to extract schemas from RDF data @@ -51,64 +52,30 @@ private[api] object DataExtract extends LazyLogging { sortFunction = InferOptions.orderByIRI ) - /** Convert an extraction result to its JSON representation - * - * @return JSON representation of the extraction result - */ - - implicit val encodeResult: Encoder[DataExtract] = - (dataExtract: DataExtract) => { - - val resultJson: Json = Json.fromFields( - List( - ( - "schema", - Json.fromString( - dataExtract.schema - .serialize(dataExtract.targetSchemaFormat.name) - .unsafeRunSync() - ) - ), - ("shapeMap", Json.fromString(dataExtract.shapeMap.toString)) - ) - ) - - Json.fromFields( - List( - ("message", Json.fromString(dataExtract.successMessage)), - ("data", dataExtract.inputData.asJson), - ("result", resultJson), - ("targetSchemaFormat", dataExtract.targetSchemaFormat.asJson), - ( - "targetSchemaEngine", - Json.fromString(dataExtract.targetSchemaEngine.name) - ) - ) - ) - } - /** Extract Shex from a given RDF input * * @param inputData Input data for the extraction * @param nodeSelector Node selector for the schema extraction - * @param inferenceEngine Inference engine - * @param targetSchemaEngine Target conversion engine - * @param targetSchemaFormat Target schema format - * @param optLabelName Label name (optional), will default to [[defaultShapeLabel]] + * @param optTargetSchemaEngine Optionally, the target conversion engine. Defaults to [[ApiDefaults.defaultSchemaEngine]]. + * @param optTargetSchemaFormat Optionally, the target schema format. Defaults to [[ApiDefaults.defaultSchemaFormat]]. + * @param optLabel Label IRI (optional). Defaults to [[ApiDefaults.defaultShapeLabel]] * @param relativeBase Relative base * @return */ def dataExtract( inputData: Data, nodeSelector: String, - inferenceEngine: InferenceEngine, - targetSchemaEngine: Schema, - targetSchemaFormat: SchemaFormat, - optLabelName: Option[String], + optTargetSchemaEngine: Option[Schema], + optTargetSchemaFormat: Option[SchemaFormat], + optLabel: Option[IRI], relativeBase: Option[IRI] ): IO[DataExtract] = { val base = relativeBase.map(_.str) + val targetSchemaEngine = + optTargetSchemaEngine.getOrElse(ApiDefaults.defaultSchemaEngine) + val targetSchemaFormat = + optTargetSchemaFormat.getOrElse(ApiDefaults.defaultSchemaFormat) for { rdf <- inputData.toRdf() // Get rdf resource @@ -125,7 +92,7 @@ private[api] object DataExtract extends LazyLogging { rdfReader, ns, targetSchemaEngine.name, - optLabelName.map(IRI(_)).getOrElse(defaultShapeLabel), + optLabel.getOrElse(defaultShapeLabel), inferOptions ) ) @@ -136,18 +103,6 @@ private[api] object DataExtract extends LazyLogging { results.value }) -// finalResult = eitherResult match { -// case Left(err) => IO.raiseError(new RuntimeException(err)) -// case Right((resultSchema, resultShapemap)) => -// DataExtract( -// inputData = inputData, -// targetSchemaFormat = targetSchemaFormat, -// targetSchemaEngine = targetSchemaEngine, -// schema = resultSchema, -// shapeMap = resultShapemap -// ) -// } - finalResult <- eitherResult.fold( err => IO.raiseError(new RuntimeException(err)), pair => { @@ -155,14 +110,61 @@ private[api] object DataExtract extends LazyLogging { IO { DataExtract( inputData = inputData, - targetSchemaFormat = targetSchemaFormat, - targetSchemaEngine = targetSchemaEngine, - schema = resultSchema, - shapeMap = resultShapemap + schemaFormat = targetSchemaFormat, + schemaEngine = targetSchemaEngine, + result = DataExtractResult( + targetSchemaFormat = targetSchemaFormat, + schema = resultSchema, + shapeMap = resultShapemap + ) ) } } ) } yield finalResult } + + /** Encoder for [[DataExtractResult]] + */ + private implicit val encodeDataExtractResult: Encoder[DataExtractResult] = + (dataExtract: DataExtractResult) => + Json.fromFields( + List( + ( + "schema", + Json.fromString( + dataExtract.schema + .serialize(dataExtract.targetSchemaFormat.name) + .unsafeRunSync() + ) + ), + ("shapeMap", Json.fromString(dataExtract.shapeMap.toString)) + ) + ) + + /** Convert a [[DataExtract]] to its JSON representation + * + * @return JSON representation of the extraction result + */ + + implicit val encodeDataExtractOperation: Encoder[DataExtract] = + (dataExtract: DataExtract) => { + Json.fromFields( + List( + ("message", Json.fromString(dataExtract.successMessage)), + ("data", dataExtract.inputData.asJson), + ("schemaFormat", dataExtract.schemaFormat.asJson), + ("schemaEngine", Json.fromString(dataExtract.schemaEngine.name)), + ("result", dataExtract.result.asJson) + ) + ) + } + + /** Case class representing the results to be returned when performing a data-info operation + */ + final case class DataExtractResult private ( + targetSchemaFormat: SchemaFormat, + schema: Schema, + shapeMap: ResultShapeMap + ) } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataInfo.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataInfo.scala index 04b8fdd8..9e694a97 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataInfo.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataInfo.scala @@ -3,7 +3,10 @@ package es.weso.rdfshape.server.api.routes.data.logic.operations import cats.effect.IO import es.weso.rdf.PrefixMap import es.weso.rdf.nodes.IRI -import es.weso.rdfshape.server.api.routes.data.logic.operations.DataInfo.successMessage +import es.weso.rdfshape.server.api.routes.data.logic.operations.DataInfo.{ + DataInfoResult, + successMessage +} import es.weso.rdfshape.server.api.routes.data.logic.types.Data import es.weso.rdfshape.server.utils.json.JsonUtils._ import io.circe.syntax.EncoderOps @@ -11,29 +14,24 @@ import io.circe.{Encoder, Json} /** Data class representing the output of a data-information operation * - * @param inputData RDF input data (contains content and format information) - * @param predicates List of predicates in the RDF input - * @param numberOfStatements Number of statements in the RDF input - * @param prefixMap Prefix map in the RDF input + * @param inputData RDF input data (contains content and format information) + * @param result Object of type [[DataInfoResult]] containing the properties extracted from the data */ final case class DataInfo private ( override val inputData: Data, - numberOfStatements: Int, - prefixMap: PrefixMap, - predicates: Set[IRI] + result: DataInfoResult ) extends DataOperation(successMessage, inputData) {} /** Static utilities to obtain information about RDF data */ private[api] object DataInfo { - private val successMessage = "Well formed RDF" /** Given an input data, get information about it * * @param data Input Data object of any type (Simple, Compound...) - * @return Either a DataInfo object about the input data or an error message + * @return A [[DataInfo]] object with the information of the input data */ def dataInfo(data: Data): IO[DataInfo] = for { @@ -50,18 +48,28 @@ private[api] object DataInfo { } yield DataInfo( inputData = data, - numberOfStatements = nStatements, - predicates = predicates.toSet, - prefixMap = prefixMap + result = DataInfoResult( + numberOfStatements = nStatements, + prefixMap = prefixMap, + predicates = predicates.toSet + ) ) - implicit val encodeResult: Encoder[DataInfo] = - (dataInfo: DataInfo) => { + /** Case class representing the results to be returned when performing a data-info operation + */ + final case class DataInfoResult private ( + numberOfStatements: Int, + prefixMap: PrefixMap, + predicates: Set[IRI] + ) - val resultJson: Json = Json.fromFields( + /** Encoder for [[DataInfoResult]] + */ + private implicit val encodeDataInfoResult: Encoder[DataInfoResult] = + (dataInfo: DataInfoResult) => + Json.fromFields( List( ("numberOfStatements", dataInfo.numberOfStatements.asJson), - ("format", dataInfo.inputData.format.asJson), ("prefixMap", prefixMap2Json(dataInfo.prefixMap)), ( "predicates", @@ -72,12 +80,23 @@ private[api] object DataInfo { ) ) + /** Encoder for [[DataInfo]] + */ + implicit val encodeDataInfoOperation: Encoder[DataInfo] = + (dataInfo: DataInfo) => Json.fromFields( List( ("message", Json.fromString(dataInfo.successMessage)), ("data", dataInfo.inputData.asJson), - ("result", resultJson) + ( + "result", + dataInfo.result.asJson.deepMerge( + Json.fromFields( + List(("format", dataInfo.inputData.format.asJson)) + ) + ) + ) ) ) - } + } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataQuery.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataQuery.scala new file mode 100644 index 00000000..8437503e --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataQuery.scala @@ -0,0 +1,76 @@ +package es.weso.rdfshape.server.api.routes.data.logic.operations + +import cats.effect.IO +import es.weso.rdfshape.server.api.routes.data.logic.operations.DataQuery.{ + DataQueryResult, + successMessage +} +import es.weso.rdfshape.server.api.routes.data.logic.types.Data +import es.weso.rdfshape.server.api.routes.endpoint.logic.query.SparqlQuery +import io.circe.syntax.EncoderOps +import io.circe.{Encoder, Json} + +/** Data class representing the output of a data-information operation + * + * @param inputData RDF input data (contains content and format information) + * @param inputQuery Sparql query input + * @param result Object of type [[DataQueryResult]] containing the properties extracted from the data + */ + +final case class DataQuery private ( + override val inputData: Data, + inputQuery: SparqlQuery, + result: DataQueryResult +) extends DataOperation(successMessage, inputData) {} + +/** Static utilities to perform SPARQL queries on RDF data + */ +private[api] object DataQuery { + private val successMessage = "Query executed successfully" + + /** Given an input data and query, perform the query on the data + * + * @param data Input data to be queried + * @param query Input SPARQL query + * @return A [[DataQuery]] object with the query results (see also [[DataQueryResult]]) + */ + + def dataQuery(data: Data, query: SparqlQuery): IO[DataQuery] = + query.rawQuery match { + case Left(err) => IO.raiseError(new RuntimeException(err)) + case Right(raw) => + for { + rdf <- data.toRdf() // Get the RDF reader + resultJson <- rdf.use( + _.queryAsJson(raw) + ) // Perform query + } yield DataQuery( // Form the results object + inputData = data, + inputQuery = query, + result = DataQueryResult( + json = resultJson + ) + ) + } + + /** Case class representing the results to be returned when performing a data-query operation + * @note Currently limited to JSON formatted results for convenience + */ + final case class DataQueryResult( + json: Json + ) + + /** Encoder for [[DataQuery]] + */ + implicit val encodeDataQueryOperation: Encoder[DataQuery] = + (dataQuery: DataQuery) => + Json.fromFields( + List( + ("message", Json.fromString(dataQuery.successMessage)), + ("data", dataQuery.inputData.asJson), + ("query", dataQuery.inputQuery.asJson), + ("result", dataQuery.result.json) + ) + ) + +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/Data.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/Data.scala index f7fbf2a9..e8c97c4d 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/Data.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/Data.scala @@ -18,15 +18,15 @@ import io.circe.{Decoder, Encoder, HCursor} */ trait Data { - /** Source where the data comes from + /** Raw RDF content represented as a String (Right) + * An error occurred when trying to parse the data (Left) */ - val dataSource: DataSource - - val format: Option[DataFormat] + lazy val rawData: Either[String, String] = Left("") - /** Raw RDF content represented as a String + /** Source where the data comes from */ - val rawData: Option[String] + val dataSource: DataSource + val format: Option[DataFormat] = None /** Given an RDF source of data, try to parse it and get the RDF model representation * diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataEndpoint.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataEndpoint.scala index 82cb4f10..31508022 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataEndpoint.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataEndpoint.scala @@ -30,9 +30,9 @@ case class DataEndpoint( ) extends Data with LazyLogging { - override lazy val rawData: Option[String] = getUrlContents( + override lazy val rawData: Either[String, String] = getUrlContents( endpoint.uri.toString - ).toOption + ) override val dataSource: DataSource = DataSource.ENDPOINT override val format: Option[DataFormat] = Some(dataFormat) @@ -77,7 +77,7 @@ private[api] object DataEndpoint extends DataCompanion[DataEndpoint] { format = paramFormat.getOrElse(ApiDefaults.defaultDataFormat) // Try to create data - maybeData: Either[String, DataEndpoint] = // 2. Endpoint data + maybeData: Either[String, DataEndpoint] = if(endpoint.isDefined) { logger.debug(s"RDF Data received - Endpoint Data: ${endpoint.get}") IRI @@ -88,7 +88,15 @@ private[api] object DataEndpoint extends DataCompanion[DataEndpoint] { ) } else Left("No endpoint provided") - } yield maybeData + + } yield maybeData.flatMap(dataEndpoint => + /* Check if the created data is empty, then an error occurred when + * fetching the endpoint on creation */ + dataEndpoint.rawData.fold( + err => Left(err), + _ => Right(dataEndpoint) + ) + ) override implicit val decodeData: Decoder[DataEndpoint] = (cursor: HCursor) => { diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala index 56d7edc6..c7e2ad1e 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala @@ -4,7 +4,7 @@ import cats.effect._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.jena._ import es.weso.rdf.nodes.IRI -import es.weso.rdf.{InferenceEngine, NONE, RDFReasoner} +import es.weso.rdf.{InferenceEngine, NONE} import es.weso.rdfshape.server.api.definitions.ApiDefaults import es.weso.rdfshape.server.api.format.dataFormats.{DataFormat, RDFFormat} import es.weso.rdfshape.server.api.routes.data.logic.DataSource @@ -14,29 +14,51 @@ import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.html2rdf.HTML2RDF import es.weso.rdfshape.server.utils.networking.NetworkingUtils.getUrlContents -import es.weso.utils.IOUtils.err import io.circe._ import io.circe.syntax.EncoderOps /** Data class representing a single RDF data instance with its current format and source * * @note Invalid initial data is accepted, but may cause errors when operating with it. - * @param dataRaw RDF data raw text - * @param dataFormat Data format - * @param inference Data inference - * @param activeDataSource Active source, used to know which source the data comes from + * @param dataPre RDF data, as it is received before being processed depending on the [[dataSource]] + * @param dataFormat Data format + * @param inference Data inference + * @param dataSource Active source, used to know how to process the raw data */ sealed case class DataSingle( - dataRaw: String, + private val dataPre: Option[String], dataFormat: DataFormat, inference: InferenceEngine, - activeDataSource: DataSource + override val dataSource: DataSource ) extends Data with LazyLogging { - override lazy val rawData: Option[String] = Some(dataRaw) - override val dataSource: DataSource = activeDataSource - override val format: Option[DataFormat] = Some(dataFormat) + /** Given the (user input) for the data and its source, fetch the Data contents using the input in the way the source needs it + * (e.g.: for URLs, fetch the input with a web request; for files, decode the input; for raw data, do nothing) + * + * @return Either an error creating the raw data or a String containing the final text + */ + override lazy val rawData: Either[String, String] = + dataPre match { + case None => Left("Could not build the RDF from empty data") + case Some(userData) => + dataSource match { + case DataSource.TEXT | // Raw test input by user + DataSource.FILE | // File input already decoded to string + DataSource.COMPOUND => // Compound data already processed by server + Right(userData) + + case DataSource.URL => + getUrlContents(userData) + + case other => + val msg = s"Unknown data source: $other" + logger.warn(msg) + Left(msg) + } + } + + override val format: Option[DataFormat] = Some(dataFormat) /** Given an RDF source of data, try to get the RDF model representation * @@ -47,64 +69,36 @@ sealed case class DataSingle( relativeBase: Option[IRI] = None ): IO[Resource[IO, RDFAsJenaModel]] = { - if(dataRaw.isBlank) - RDFAsJenaModel.empty.flatMap(e => IO(e)) - else - for { - rdf <- rdfFromString(dataRaw, dataFormat, relativeBase.map(_.str)) - result = rdf.evalMap(rdf => rdf.applyInference(inference)) + rawData match { + case Right(data) => + for { + rdf <- rdfFromString(data, dataFormat, relativeBase.map(_.str)) + result = rdf.evalMap(rdf => rdf.applyInference(inference)) - } yield result - } + } yield result + case Left(_) => RDFAsJenaModel.empty.flatMap(e => IO(e)) + } - /** Get RDF data from data parameters - * - * @return The resource capable of reading the RDF data - */ - def getRdfResource( - relativeBase: Option[IRI] - ): IO[Resource[IO, RDFReasoner]] = { - val base = relativeBase.map(_.str) - - val x: IO[Resource[IO, RDFReasoner]] = - activeDataSource match { - - case DataSource.TEXT | DataSource.URL | DataSource.FILE => - logger.debug(s"Input - $activeDataSource: $dataRaw") - if(dataRaw.isBlank) - RDFAsJenaModel.empty.flatMap(e => IO(e)) - else - for { - rdf <- rdfFromString(dataRaw, dataFormat, base) - result = rdf.evalMap(rdf => rdf.applyInference(inference)) - } yield result - - case other => - val msg = s"Unknown value for data source: $other" - logger.error(msg) - err(msg) - } - x } - /** @param data RDF data as a raw string - * @param format RDF data format - * @param base Base + /** @param dataStr RDF data as a raw string + * @param format RDF data format + * @param base Base * @return An RDF model extracted from the input data */ private def rdfFromString( - data: String, + dataStr: String, format: DataFormat, base: Option[String] ): IO[Resource[IO, RDFAsJenaModel]] = { logger.debug(s"RDF from string with format: $format") val formatName = format.name if(HTML2RDF.availableExtractorNames contains formatName) - IO(HTML2RDF.extractFromString(data, formatName)) + IO(HTML2RDF.extractFromString(dataStr, formatName)) else for { baseIri <- mkBase(base) - res <- RDFAsJenaModel.fromChars(data, format.name, baseIri) + res <- RDFAsJenaModel.fromChars(dataStr, format.name, baseIri) } yield res } @@ -124,7 +118,7 @@ sealed case class DataSingle( ) } - override def toString: String = dataRaw + override def toString: String = rawData.toString } private[api] object DataSingle @@ -135,44 +129,43 @@ private[api] object DataSingle */ override lazy val emptyData: DataSingle = DataSingle( - dataRaw = emptyDataValue, + dataPre = emptyDataValue, dataFormat = DataFormat.defaultFormat, inference = NONE, - activeDataSource = DataSource.defaultActiveDataSource + dataSource = DataSource.defaultDataSource ) /** Placeholder value used for the raw data whenever an empty data is issued/needed. */ - val emptyDataValue = "" + val emptyDataValue: Option[String] = None override implicit val encodeData: Encoder[DataSingle] = (data: DataSingle) => Json.obj( - ("data", Json.fromString(data.dataRaw)), - ("source", Json.fromString(data.activeDataSource)), + ("data", data.rawData.toOption.asJson), + ("source", data.dataSource.asJson), ("format", data.dataFormat.asJson), ("inference", data.inference.asJson) ) override def mkData(partsMap: PartsMap): IO[Either[String, DataSingle]] = for { - dataStr <- partsMap.optPartValue(DataParameter.name) - dataUrl <- partsMap.optPartValue(DataUrlParameter.name) - dataFile <- partsMap.optPartValue(DataFileParameter.name) + // Data param as sent by client + paramData <- partsMap.optPartValue(DataParameter.name) paramFormat <- DataFormat.fromRequestParams( DataFormatParameter.name, partsMap ) paramInference <- partsMap.optPartValue(InferenceParameter.name) - paramDataSource <- partsMap.optPartValue(ActiveDataSourceParameter.name) + paramDataSource <- partsMap.optPartValue(DataSourceParameter.name) // Confirm final format and inference inference = getInference(paramInference).getOrElse(NONE) format = paramFormat.getOrElse(ApiDefaults.defaultDataFormat) // Check the client's selected source - dataSource = paramDataSource.getOrElse(DataSource.defaultActiveDataSource) + dataSource = paramDataSource.getOrElse(DataSource.defaultDataSource) _ = logger.debug(s"RDF Data received - Source: $dataSource") // Base for the result @@ -181,61 +174,35 @@ private[api] object DataSingle inference = inference ) - // Create the data - maybeData: Either[String, DataSingle] = dataSource match { - case DataSource.TEXT => - dataStr match { - case None => Left("No value for the data string") - case Some(dataRaw) => - Right( - base.copy( - dataRaw = dataRaw.trim, - activeDataSource = DataSource.TEXT - ) - ) - } - case DataSource.URL => - dataUrl match { - case None => Left("No value for the data url") - case Some(url) => - getUrlContents(url) match { - case Right(dataRaw) => - Right( - base.copy( - dataRaw = dataRaw.trim, - activeDataSource = DataSource.URL - ) - ) - case Left(err) => Left(s"Could not read data: $err") - } - } - case DataSource.FILE => - dataFile match { - case None => Left("No value for the data file") - case Some(dataRaw) => - Right( - base.copy( - dataRaw = dataRaw.trim, - activeDataSource = DataSource.FILE - ) - ) - } - case other => - val msg = s"Unknown data source: $other" - logger.warn(msg) - Left(msg) - } - } yield maybeData + // Create the data instance + data = base.copy( + dataPre = paramData, + dataSource = dataSource + ) + + } yield data.rawData.fold( + err => Left(err), + _ => Right(data) + ) + + /** @param inferenceStr String representing the inference value + * @return Optionally, the inference contained in a given data string + */ + private def getInference( + inferenceStr: Option[String] + ): Option[InferenceEngine] = { + inferenceStr.flatMap(InferenceEngine.fromString(_).toOption) + } override implicit val decodeData: Decoder[DataSingle] = (cursor: HCursor) => { for { - data <- cursor.downField("data").as[String] + data <- cursor.downField("data").as[Option[String]] dataFormat <- cursor .downField("dataFormat") .as[RDFFormat] - .orElse(Right(ApiDefaults.defaultDataFormat)) + .orElse(Right(ApiDefaults.defaultRdfFormat)) dataInference <- cursor @@ -243,26 +210,17 @@ private[api] object DataSingle .as[Option[InferenceEngine]] dataSource <- cursor - .downField("source") + .downField("dataSource") .as[DataSource] - .orElse(Right(DataSource.defaultActiveDataSource)) + .orElse(Right(DataSource.defaultDataSource)) base = DataSingle.emptyData.copy( - dataRaw = data, + dataPre = data, dataFormat = dataFormat, - activeDataSource = dataSource, + dataSource = dataSource, inference = dataInference.getOrElse(NONE) ) } yield base } - - /** @param inferenceStr String representing the inference value - * @return Optionally, the inference contained in a given data string - */ - private def getInference( - inferenceStr: Option[String] - ): Option[InferenceEngine] = { - inferenceStr.flatMap(InferenceEngine.fromString(_).toOption) - } } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/DataCompound.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/DataCompound.scala index 85b11990..a56d1603 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/DataCompound.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/DataCompound.scala @@ -31,15 +31,15 @@ case class DataCompound(elements: List[Data]) extends Data with LazyLogging { * * @note If one element's data cannot be computed, returns none. */ - override lazy val rawData: Option[String] = { - val definedElements = elements.map(_.rawData).filter(_.isDefined).map(_.get) - - // All elements' raw data was computed + override lazy val rawData: Either[String, String] = { + val definedElements = + elements.map(_.rawData).filter(_.isRight).map(_.toOption.get) + // If all elements' raw data was computed... if(elements.length == definedElements.length) - Some(definedElements.mkString("\n")) - else None - + Right(definedElements.mkString("\n")) + else Left("Could not parse compound data") } + override val dataSource: DataSource = DataSource.COMPOUND override val format: Option[DataFormat] = { if(elements.forall(_.format == elements.head.format)) elements.head.format @@ -87,11 +87,10 @@ private[api] object DataCompound override lazy val emptyData: DataCompound = DataCompound(List()) - override def mkData(partsMap: PartsMap): IO[Either[String, DataCompound]] = + override def mkData(partsMap: PartsMap): IO[Either[String, DataCompound]] = { for { // Parse params compoundData <- partsMap.optPartValue(CompoundDataParameter.name) - // Try to create data maybeData: Either[String, DataCompound] = if(compoundData.isDefined) { @@ -102,7 +101,15 @@ private[api] object DataCompound .fromJsonString(compoundData.get) .leftMap(err => s"Could not read compound data.\n $err") } else Left("No compound data provided") - } yield maybeData + } yield maybeData.flatMap(dataCompound => + /* Check if the created data is empty, then an error occurred when merging + * the elements */ + dataCompound.rawData.fold( + err => Left(err), + _ => Right(dataCompound) + ) + ) + } /** Encoder used to transform CompoundData instances to JSON values */ diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala index beda6cb8..f41eb3d1 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala @@ -2,26 +2,27 @@ package es.weso.rdfshape.server.api.routes.data.service import cats.effect._ import com.typesafe.scalalogging.LazyLogging -import es.weso.rdfshape.server.api.definitions.ApiDefaults +import es.weso.rdf.nodes.IRI import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ availableInferenceEngines, defaultInferenceEngineName } import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api -import es.weso.rdfshape.server.api.format.dataFormats.{ - DataFormat, - GraphicFormat, - RDFFormat -} +import es.weso.rdfshape.server.api.format.dataFormats._ import es.weso.rdfshape.server.api.routes.ApiService +import es.weso.rdfshape.server.api.routes.data.logic.DataSource import es.weso.rdfshape.server.api.routes.data.logic.operations.{ - DataConversion, - DataInfo + DataConvert, + DataExtract, + DataInfo, + DataQuery } import es.weso.rdfshape.server.api.routes.data.logic.types.Data -import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters.TargetDataFormatParameter +import es.weso.rdfshape.server.api.routes.endpoint.logic.query.SparqlQuery +import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.utils.json.JsonUtils.errorResponseJson +import es.weso.schema.ShExSchema import io.circe.Json import io.circe.syntax.EncoderOps import org.http4s._ @@ -86,15 +87,23 @@ class DataService(client: Client[IO]) val defaultInferenceEngine = defaultInferenceEngineName Ok(Json.fromString(defaultInferenceEngine)) + /** Returns a JSON array with the valid data sources that the server will accept when sent via [[DataSourceParameter]] + */ + case GET -> Root / `api` / `verb` / "sources" => + val json = Json.arr( + Json.fromString(DataSource.TEXT), + Json.fromString(DataSource.URL), + Json.fromString(DataSource.FILE) + ) + Ok(json) + /** Obtain information about an RDF source. * Receives a JSON object with the input RDF information: - * - data [String]: RDF data - * - dataUrl [String]: Url containing the RDF data - * - dataFile [File Object]: File containing RDF data + * - data [String]: RDF data (raw, URL containing the data or File with the data) + * - dataSource [String]: Identifies the source of the data (raw, URL, file...) so that the server knows how to handle it * - dataFormat [String]: Format of the RDF data * - inference [String]: Inference to be applied - * - activeDataSource [String]: Identifies the source of the data (raw, URL, file...) - * Returns a JSON object with the operation results. See [[DataInfo.encodeResult]] + * Returns a JSON object with the operation results. See [[DataInfo.encodeDataInfoOperation]] */ case req @ POST -> Root / `api` / `verb` / "info" => req.decode[Multipart[IO]] { m => @@ -134,14 +143,11 @@ class DataService(client: Client[IO]) /** Convert an RDF source into another format/syntax. * Receives a JSON object with the input RDF information: - * - data [String]: RDF data - * - dataUrl [String]: Url containing the RDF data - * - dataFile [File Object]: File containing RDF data - * - dataFormat [String]: Format of the RDF data + * - data [String]: RDF data (raw, URL containing the data or File with the data) + * - dataSource [String]: Identifies the source of the data (raw, URL, file...) so that the server knows how to handle it * - targetDataFormat [String]: Format of the RDF data * - inference [String]: Inference to be applied - * - activeDataSource [String]: Identifies the source of the data (raw, URL, file...) - * Returns a JSON object with the operation results. See [[DataConversion.encodeResult]]. + * Returns a JSON object with the operation results. See [[DataConvert.encodeDataConversionOperation]]. */ case req @ POST -> Root / `api` / `verb` / "convert" => req.decode[Multipart[IO]] { m => @@ -174,7 +180,7 @@ class DataService(client: Client[IO]) data => for { // Check for exceptions when converting the data - maybeResult <- DataConversion + maybeResult <- DataConvert .dataConvert(data, targetFormat) .attempt response <- maybeResult.fold( @@ -203,127 +209,129 @@ class DataService(client: Client[IO]) /** Perform a SPARQL query on RDF data. * Receives a JSON object with the input RDF and query information: - * - data [String]: Raw RDF data - * - dataUrl [String]: Url containing the RDF data - * - dataFile [File Object]: File containing RDF data + * - data [String]: RDF data (raw, URL containing the data or File with the data) + * - dataSource [String]: Identifies the source of the data (raw, URL, file...) so that the server knows how to handle it * - dataFormat [String]: Format of the RDF data * - inference [String]: Inference to be applied - * - query [String]: Raw SPARQL query - * - queryUrl [String]: Url containing the SPARQL query - * - queryFile [String]: File containing the SPARQL query - * - activeDataSource [String]: Identifies the source of the data (raw, URL, file...) - * - activeQueryTab [String]: Identifies the source of the query (raw, URL, file...) - * Returns a JSON object with the RDF data information: - * - message [String]: Informational message - * - data [String]: RDF data sent back (originally sent by the client) - * - result [String]: RDF resulting from the conversion - * - dataFormat [String]: Data format of the input data - * - targetDataFormat [String]: Data format of the output data - * - numberOfStatements [String]: Data format of the data - * - prefixMap [Object]: Dictionary with the prefix map of the data - * - predicates [Array]: Array of the predicates present in the data + * + * - query [String]: SPARQL query data (raw, URL containing the data or File with the query) + * - querySource [String]: Identifies the source of the query (raw, URL, file...) so that the server knows how to handle it + * + * Returns a JSON object with the query inputs and results (see [[DataQuery.encodeDataQueryOperation]]). */ - // case req @ POST -> Root / `api` / `verb` / "query" => - // req.decode[Multipart[IO]] { m => - // val partsMap = PartsMap(m.parts) - // logger.debug(s"Data query params map: $partsMap") - // for { - /* /* TODO: an error is thrown on bad query URLs (IO.raise...), but it is */ - // * not controlled */ - // dataParam <- DataSingle.getData(partsMap, relativeBase) - // - // (resourceRdf, dp) = dataParam - // maybeQuery <- SparqlQuery.getSparqlQuery(partsMap) - // resp <- maybeQuery match { - // case Left(err) => - // // Query could not be even parsed from user data - /* errorResponseJson(s"Error obtaining query data: $err", BadRequest) */ - // case Right(query) => - // // Query was parsed, but may be invalid still - // val optQueryStr = query.queryRaw - // logger.debug(s"Data query with querystring: $optQueryStr") - // for { - // result <- io2f( - // resourceRdf.use(rdf => rdf.queryAsJson(optQueryStr)) - // ).attempt - /* .map(_.fold(exc => Left(exc.getMessage), dc => Right(dc))) */ - // response <- result match { - /* case Left(err) => errorResponseJson(err, InternalServerError) */ - // case Right(json) => Ok(json) - // } - // } yield response - // } - // } yield resp - // } + case req @ POST -> Root / `api` / `verb` / "query" => + req.decode[Multipart[IO]] { m => + val partsMap = PartsMap(m.parts) + for { + // Get the data from the partsMap + eitherData <- Data.mkData(partsMap) + // Get the query from the partsMap + eitherQuery <- SparqlQuery.mkSparqlQuery(partsMap) + + /* Accumulate either: + * - the errors occurred parsing the data/query + * - the results of parsing the data/query in a single value */ + eitherInputs: Either[String, (Data, SparqlQuery)] = for { + data <- eitherData + query <- eitherQuery + } yield (data, query) + + // Make response + response <- eitherInputs.fold( + // If there was an error parsing the data/query, return it + err => errorResponseJson(err, InternalServerError), + // Else, try and compute the query, first destructuring the tuple + { + // Destructure tuple + case (data, query) => + for { + maybeDataQuery <- DataQuery + .dataQuery(data, query) + .attempt + response <- maybeDataQuery.fold( + err => + errorResponseJson(err.getMessage, InternalServerError), + dataQuery => Ok(dataQuery.asJson) + ) + } yield response + + // Generic error. Code should not reach here. + case _ => + errorResponseJson( + DataServiceError.couldNotParseData, + InternalServerError + ) + } + ) + } yield response + } /** Attempt to extract a schema from an RDF source. * Receives a JSON object with the input RDF information: - * - data [String]: Raw RDF data - * - dataUrl [String]: Url containing the RDF data - * - dataFile [File Object]: File containing RDF data + * - data [String]: RDF data (raw, URL containing the data or File with the data) + * - dataSource [String]: Identifies the source of the data (raw, URL, file...) so that the server knows how to handle it * - dataFormat [String]: Format of the RDF data * - inference [String]: Inference to be applied - * - activeDataSource [String]: Identifies the source of the data (raw, URL, file...) - * Returns a JSON object with the extraction information: - * - message [String]: Informational message - * - data [String]: Input RDF data - * - dataFormat [String]: Format of the input RDF data - * - inferredShape [String]: Raw extracted shape - * - schemaFormat [String]: Format of the extracted schema - * - schemaEngine [String]: Engine of the extracted schema - * - resultShapeMap [String]: Shapemap of the extracted schema + * - nodeSelector [String]: Node selector to use + * Returns a JSON object with the extraction information (see [[DataExtract.encodeDataExtractOperation]] */ - // case req @ POST -> Root / `api` / `verb` / "extract" => - // req.decode[Multipart[IO]] { m => - // val partsMap = PartsMap(m.parts) - // for { - /* maybeData <- DataSingle.getData(partsMap, relativeBase).attempt */ - // schemaEngine <- partsMap.optPartValue("schemaEngine") - // optSchemaFormatStr <- partsMap.optPartValue("schemaFormat") - // inference <- partsMap.optPartValue("inference") - // label <- partsMap.optPartValue("labelName") - // optBaseStr <- partsMap.optPartValue("base") - // nodeSelector <- partsMap.optPartValue("nodeSelector") - // schemaFormat <- optEither2f( - // optSchemaFormatStr, - // SchemaFormat.fromString - // ) - // response <- maybeData match { - // // No data received - // case Left(err) => - // errorResponseJson(err.getMessage, BadRequest) - // // Data received, try to extract - // case Right((resourceRdf, dp)) => - // for { - // result <- io2f( - // // Raise IO error if no node selector, - // resourceRdf.use(rdf => - // dataExtract( - // rdf, - // dp.data, - // dp.optDataFormat, - // nodeSelector, - // inference, - // schemaEngine, - // schemaFormat, - // label, - // None - // ) - // ) - // ).attempt - /* .map(_.fold(exc => Left(exc.getMessage), res => Right(res))) */ - // response <- result match { - /* case Left(err) => errorResponseJson(err, InternalServerError) */ - // case Right(result) => Ok(result.toJson) - // } - // - // } yield response - // } - // } yield response - // } + case req @ POST -> Root / `api` / `verb` / "extract" => + req.decode[Multipart[IO]] { m => + val partsMap = PartsMap(m.parts) + for { + // Get the data from the partsMap + eitherData <- Data.mkData(partsMap) + // Schema format and engine will be ShEx, force later. + // Try to map label to IRI or node selector + optLabel <- partsMap + .optPartValue(LabelParameter.name) + .map(_.map(IRI(_))) + // Try to get node selector + optNodeSelectorStr <- partsMap.optPartValue( + NodeSelectorParameter.name + ) + + response <- eitherData.fold( + // If there was an error parsing the data, return it + err => errorResponseJson(err, InternalServerError), + // Else, try and compute the shex extraction + data => + // Return error if no node selector + optNodeSelectorStr match { + case None => + errorResponseJson(DataServiceError.noNodeSelector, BadRequest) + case Some(nodeSelector) if nodeSelector.isBlank => + errorResponseJson( + DataServiceError.emptyNodeSelector, + BadRequest + ) + case Some(nodeSelector) => + for { + maybeResult <- DataExtract + .dataExtract( + data, + nodeSelector, + Option(ShExSchema.empty), + Option(ShExC), + optLabel, + relativeBase = None + ) + .attempt // Check for exceptions when extracting + response <- maybeResult.fold( + // Error in extraction: return the error + err => + errorResponseJson(err.getMessage, InternalServerError), + // Success: build successful response + dataExtraction => Ok(dataExtraction.asJson) + ) + } yield response + } + ) + } yield response + + } } - private val relativeBase = ApiDefaults.relativeBase } @@ -342,4 +350,10 @@ private object DataServiceError extends Enumeration { type DataServiceError = String val couldNotParseData: DataServiceError = "Unknown error parsing the data provided. Check the input and the selected format." + + val noNodeSelector: DataServiceError = + "No node selector provided for extraction." + + val emptyNodeSelector: DataServiceError = + "Empty node selector provided for extraction." } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/query/SparqlQuery.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/query/SparqlQuery.scala index b4c6df91..9ad0b838 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/query/SparqlQuery.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/query/SparqlQuery.scala @@ -4,55 +4,85 @@ import cats.effect.IO import com.typesafe.scalalogging.LazyLogging import es.weso.rdfshape.server.api.routes.endpoint.logic.query.SparqlQuerySource.{ SparqlQuerySource, - defaultActiveQuerySource + defaultQuerySource } import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters.{ - ActiveQuerySourceParameter, - QueryFileParameter, QueryParameter, - QueryUrlParameter + QuerySourceParameter } import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.utils.networking.NetworkingUtils.getUrlContents +import io.circe.syntax.EncoderOps +import io.circe.{Encoder, Json} /** Data class representing a SPARQL query and its current source * - * @param queryRaw Query raw text - * @param activeQuerySource Active source, used to know which source the query comes from + * @param queryPre Query contents, as received before being processed depending on the [[querySource]] + * @param querySource Active source, used to know which source the query comes from */ sealed case class SparqlQuery private ( - queryRaw: String, - activeQuerySource: SparqlQuerySource -) + private val queryPre: Option[String], + querySource: SparqlQuerySource +) extends LazyLogging { + + /** Given the (user input) for the query and its source, fetch the Query contents using the input in the way the source needs it + * (e.g.: for URLs, fetch the input with a web request; for files, decode the input; for raw data, do nothing) + * + * @return Either an error building the query text or a String containing the final text of the SPARQL query + */ + lazy val rawQuery: Either[String, String] = + queryPre match { + case None => Left("Could not build the query from empty data") + + case Some(userQuery) => + querySource match { + case SparqlQuerySource.TEXT | SparqlQuerySource.FILE => + Right(userQuery) + case SparqlQuerySource.URL => + getUrlContents(userQuery) + + case other => + val msg = s"Unknown query source: $other" + logger.warn(msg) + Left(msg) + } + } +} private[api] object SparqlQuery extends LazyLogging { + implicit val encodeSparqlQuery: Encoder[SparqlQuery] = + (query: SparqlQuery) => + Json.obj( + ("query", query.rawQuery.toOption.asJson), + ("source", query.querySource.asJson) + ) + /** Placeholder value used for the sparql query whenever an empty query is issued/needed. */ - private val emptyQueryValue = "" + private val emptyQuery = SparqlQuery( + queryPre = None, + querySource = defaultQuerySource + ) /** Given a request's parameters, try to extract a SPARQL query from them * * @param partsMap Request's parameters * @return Either the SPARQL query or an error message */ - def getSparqlQuery( + def mkSparqlQuery( partsMap: PartsMap ): IO[Either[String, SparqlQuery]] = for { - queryStr <- partsMap.optPartValue(QueryParameter.name) - queryUrl <- partsMap.optPartValue(QueryUrlParameter.name) - queryFile <- partsMap.optPartValue(QueryFileParameter.name) - activeQueryTab <- partsMap.optPartValue(ActiveQuerySourceParameter.name) + paramQuery <- partsMap.optPartValue(QueryParameter.name) + activeQueryTab <- partsMap.optPartValue(QuerySourceParameter.name) _ = logger.debug( s"Getting SPARQL from params. Query tab: $activeQueryTab" ) - maybeQuery: Either[String, SparqlQuery] = mkSparqlQuery( - queryStr, - queryUrl, - queryFile, + maybeQuery <- mkSparqlQuery( + paramQuery, activeQueryTab ) @@ -60,55 +90,25 @@ private[api] object SparqlQuery extends LazyLogging { /** Create a SparqlQuery instance, given its source and data * - * @param queryStr Optionally, the raw contents of the query - * @param queryUrl Optionally, the URL with the contents of the query - * @param queryFile Optionally, the file with the contents of the query + * @param queryStr Optionally, the contents of the query not processed by their source * @param activeQuerySource Optionally, the indicator of the query source (raw, url or file) * @return */ - def mkSparqlQuery( + private def mkSparqlQuery( queryStr: Option[String], - queryUrl: Option[String], - queryFile: Option[String], activeQuerySource: Option[SparqlQuerySource] - ): Either[String, SparqlQuery] = { - - // Create the query depending on the client's selected method - val maybeQuery: Either[String, SparqlQuery] = activeQuerySource.getOrElse( - defaultActiveQuerySource - ) match { - case SparqlQuerySource.TEXT => - queryStr match { - case None => Left("No value for the query string") - case Some(queryRaw) => - Right(SparqlQuery(queryRaw, SparqlQuerySource.TEXT)) - } - case SparqlQuerySource.URL => - queryUrl match { - case None => Left(s"No value for the query URL") - case Some(queryUrl) => - getUrlContents(queryUrl) match { - case Right(queryRaw) => - Right(SparqlQuery(queryRaw, SparqlQuerySource.URL)) - case Left(err) => Left(err) - } - - } - case SparqlQuerySource.FILE => - queryFile match { - case None => Left(s"No value for the query file") - case Some(queryRaw) => - Right(SparqlQuery(queryRaw, SparqlQuerySource.FILE)) - } - - case other => - val msg = s"Unknown value for activeQueryTab: $other" - logger.warn(msg) - Left(msg) - - } - - maybeQuery - } + ): IO[Either[String, SparqlQuery]] = + for { + query <- IO { + emptyQuery.copy( + queryPre = queryStr, + querySource = activeQuerySource.getOrElse(defaultQuerySource) + ) + } + } yield query.rawQuery.fold( + // If the query text built is blank, an error occurred + err => Left(err), + _ => Right(query) + ) } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/query/SparqlQuerySource.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/query/SparqlQuerySource.scala index ada122c7..85abd97c 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/query/SparqlQuerySource.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/query/SparqlQuerySource.scala @@ -8,9 +8,9 @@ package es.weso.rdfshape.server.api.routes.endpoint.logic.query private[logic] object SparqlQuerySource extends Enumeration { type SparqlQuerySource = String - val TEXT = "#queryTextArea" - val URL = "#queryUrl" - val FILE = "#queryFile" + val TEXT = "byText" + val URL = "byUrl" + val FILE = "byFile" - val defaultActiveQuerySource: SparqlQuerySource = TEXT + val defaultQuerySource: SparqlQuerySource = TEXT } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala index d97bfa05..628754a0 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala @@ -13,7 +13,7 @@ import es.weso.rdfshape.server.api.routes.endpoint.logic.Endpoint.{ import es.weso.rdfshape.server.api.routes.endpoint.logic.EndpointStatus._ import es.weso.rdfshape.server.api.routes.endpoint.logic.Outgoing.getOutgoing import es.weso.rdfshape.server.api.routes.endpoint.logic.query.SparqlQuery -import es.weso.rdfshape.server.api.routes.endpoint.logic.query.SparqlQuery.getSparqlQuery +import es.weso.rdfshape.server.api.routes.endpoint.logic.query.SparqlQuery.mkSparqlQuery import es.weso.rdfshape.server.api.routes.endpoint.logic.{Endpoint, Outgoing} import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters.{ EndpointParameter, @@ -47,9 +47,8 @@ class EndpointService(client: Client[IO]) /** Perform a SPARQL query targeted to a specific endpoint. * Receives a JSON object with the input endpoint query: - * - query [String]: Input query - * - endpoint [String]: Target endpoint - * - activeQueryTab [String]: Identifies the source of the query (raw, URL, file...) + * - query [String]: User input for the query + * - querySource [String]: Identifies the source of the query (raw, URL, file...) * Returns a JSON object with the query results: * - head [Object]: Query metadata * - vars: [Array]: Query variables @@ -68,15 +67,19 @@ class EndpointService(client: Client[IO]) String, SparqlQuery ]]( - getSparqlQuery(partsMap) + mkSparqlQuery(partsMap) ) - query <- EitherT.fromEither[IO](either) - queryString = query.queryRaw + eitherQuery <- EitherT.fromEither[IO](either) + json <- { - logger.debug( - s"Query to endpoint $endpoint: $queryString" + + eitherQuery.rawQuery.fold( + err => EitherT.left(IO.pure(err)), + raw => { + logger.debug(s"Query to endpoint $endpoint: $raw") + io2es(endpoint.queryAsJson(raw)) + } ) - io2es(endpoint.queryAsJson(queryString)) } } yield json diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/Schema.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/Schema.scala index 3d2e184e..9584ce09 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/Schema.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/Schema.scala @@ -224,7 +224,7 @@ object Schema extends LazyLogging { TargetSchemaFormatParameter.name ) activeSchemaSource <- partsMap.optPartValue( - ActiveSchemaSourceParameter.name + SchemaSourceParameter.name ) } yield { Schema( diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala index ff7ecf16..37feb280 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala @@ -197,7 +197,7 @@ private[api] object SchemaOperations extends LazyLogging { val triggerModeStr = triggerMode.triggerModeStr for { prefixMap <- rdf.getPrefixMap - shapeMapRaw = triggerMode.shapeMap.shapeMapRaw + shapeMapRaw = triggerMode.shapeMap.rawShapeMap.getOrElse("") pair <- ValidationTrigger.findTrigger( triggerModeStr, diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaSource.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaSource.scala index db30874a..1cca2995 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaSource.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaSource.scala @@ -8,9 +8,9 @@ package es.weso.rdfshape.server.api.routes.schema.logic private[api] object SchemaSource extends Enumeration { type SchemaSource = String - val TEXT = "#schemaTextArea" - val URL = "#schemaUrl" - val FILE = "#schemaFile" + val TEXT = "byText" + val URL = "byUrl" + val FILE = "byFile" - val defaultActiveSchemaSource: SchemaSource = TEXT + val defaultSchemaSource: SchemaSource = TEXT } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/TriggerMode.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/TriggerMode.scala index 2d7b7a4f..dedf1cb3 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/TriggerMode.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/TriggerMode.scala @@ -38,24 +38,20 @@ private[api] object TriggerMode extends LazyLogging { ): IO[Either[String, TriggerMode]] = { for { // Get data sent in que query - triggerMode <- partsMap.optPartValue(TriggerModeParameter.name) - shapeMapStr <- partsMap.optPartValue(ShapeMapTextParameter.name) - shapeMapUrl <- partsMap.optPartValue(ShapeMapUrlParameter.name) - shapeMapFile <- partsMap.optPartValue(ShapeMapFileParameter.name) + triggerMode <- partsMap.optPartValue(TriggerModeParameter.name) + paramShapemap <- partsMap.optPartValue(ShapeMapParameter.name) shapeMapFormat <- ShapeMapFormat.fromRequestParams( ShapeMapFormatParameter.name, partsMap ) activeShapeMapTab <- partsMap.optPartValue( - ActiveShapeSourceTabParameter.name + ShapemapSourceParameter.name ) // Get companion shapemap - maybeShapeMap = ShapeMap.mkShapeMap( - shapeMapStr, - shapeMapUrl, - shapeMapFile, + maybeShapeMap <- ShapeMap.mkShapeMap( + paramShapemap, shapeMapFormat, None, activeShapeMapTab diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala index 703eeda7..4a21801e 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala @@ -2,8 +2,12 @@ package es.weso.rdfshape.server.api.routes.shapemap.logic import cats.effect.IO import com.typesafe.scalalogging.LazyLogging -import es.weso.rdfshape.server.api.format.dataFormats.{Compact, ShapeMapFormat} -import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMapSource.ShapeMapSource +import es.weso.rdfshape.server.api.definitions.ApiDefaults +import es.weso.rdfshape.server.api.format.dataFormats.ShapeMapFormat +import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMapSource.{ + ShapeMapSource, + defaultShapeMapSource +} import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.utils.error.exceptions.JsonConversionException @@ -15,27 +19,47 @@ import io.circe.Json /** Data class representing a ShapeMap and its current source. * * @note Invalid initial data is accepted, but may cause exceptions when operating with it (like converting to JSON). - * @param shapeMapRaw Shapemap raw text + * @param shapeMapPre Shapemap contents, as received before being processed depending on the [[shapeMapSource]] * @param shapeMapFormat Shapemap format * @param targetShapeMapFormat Optionally, the shapemap target format (only for conversion operations) - * @param activeShapeMapSource Active source, used to know which source the shapemap comes from + * @param shapeMapSource Active source, used to know which source the shapemap comes from */ sealed case class ShapeMap private ( - shapeMapRaw: String, + private val shapeMapPre: Option[String], shapeMapFormat: ShapeMapFormat, targetShapeMapFormat: Option[ShapeMapFormat], - activeShapeMapSource: String -) { + shapeMapSource: String +) extends LazyLogging { + + /** Given the (user input) for the shapeMap and its source, fetch the shapeMap contents using the input in the way the source needs it + * (e.g.: for URLs, fetch the input with a web request; for files, decode the input; for raw data, do nothing) + * + * @return Optionally, a String containing the final text of the shapeMap query + */ + lazy val rawShapeMap: Option[String] = shapeMapSource match { + case ShapeMapSource.TEXT | ShapeMapSource.FILE => + shapeMapPre + case ShapeMapSource.URL => + shapeMapPre.flatMap(getUrlContents(_).toOption) + + case other => + logger.warn(s"Unknown value for activeQueryTab: $other") + None + } /** Inner shapemap structure of the data in this instance * * @return A ShapeMap instance used by WESO libraries in validation */ lazy val innerShapeMap: Either[String, ShapeMapW] = { - ShapeMapW - .fromString(shapeMapRaw, shapeMapFormat.name) match { - case Left(errorList) => Left(errorList.toList.mkString("\n")) - case Right(shapeMap) => Right(shapeMap) + rawShapeMap match { + case Some(shapeMapStr) => + ShapeMapW + .fromString(shapeMapStr, shapeMapFormat.name) match { + case Left(errorList) => Left(errorList.toList.mkString("\n")) + case Right(shapeMap) => Right(shapeMap) + } + case None => Left("Cannot extract the ShapeMap from an empty instance") } } @@ -49,7 +73,7 @@ sealed case class ShapeMap private ( case Left(err) => throw JsonConversionException(err) case Right(dataShapeMap) => Json.fromFields( - maybeField("shapeMap", Some(shapeMapRaw), Json.fromString) ++ + maybeField("shapeMap", rawShapeMap, Json.fromString) ++ maybeField( "shapeMapFormat", Some(shapeMapFormat), @@ -70,25 +94,25 @@ private[api] object ShapeMap extends LazyLogging { /** Placeholder value used for the shapemap whenever an empty shapemap is issued/needed. */ - val emptyShapeMapValue = "" - - /** Default shapemap format used when no alternatives are present - */ - private val defaultShapeMapFormat: ShapeMapFormat = Compact + private val emptyShapeMap = + ShapeMap( + shapeMapPre = None, + shapeMapFormat = ApiDefaults.defaultShapeMapFormat, + targetShapeMapFormat = None, + shapeMapSource = ShapeMapSource.defaultShapeMapSource + ) /** Given a request's parameters, try to extract a shapemap from them * * @param partsMap Request's parameters * @return Either the shapemap or an error message */ - def getShapeMap( + def mkShapeMap( partsMap: PartsMap ): IO[Either[String, ShapeMap]] = { for { // Get data sent in que query - shapeMapStr <- partsMap.optPartValue(ShapeMapTextParameter.name) - shapeMapUrl <- partsMap.optPartValue(ShapeMapUrlParameter.name) - shapeMapFile <- partsMap.optPartValue(ShapeMapFileParameter.name) + paramShapemap <- partsMap.optPartValue(ShapeMapParameter.name) shapeMapFormat <- ShapeMapFormat.fromRequestParams( ShapeMapFormatParameter.name, partsMap @@ -98,7 +122,7 @@ private[api] object ShapeMap extends LazyLogging { partsMap ) activeShapeMapSource <- partsMap.optPartValue( - ActiveShapeSourceTabParameter.name + ShapemapSourceParameter.name ) _ = logger.debug( @@ -106,10 +130,8 @@ private[api] object ShapeMap extends LazyLogging { ) // Create the shapemap depending on the client's selected method - maybeShapeMap: Either[String, ShapeMap] = mkShapeMap( - shapeMapStr, - shapeMapUrl, - shapeMapFile, + maybeShapeMap <- mkShapeMap( + paramShapemap, shapeMapFormat, targetShapeMapFormat, activeShapeMapSource @@ -120,92 +142,32 @@ private[api] object ShapeMap extends LazyLogging { /** Create a ShapeMap instance, given its source and format * - * @param shapeMapStr Optionally, the raw contents of the shapemap - * @param shapeMapUrl Optionally, the URL with the contents of the shapemap - * @param shapeMapFile Optionally, the file with the contents of the shapemap - * @param shapeMapFormat Optionally, the format of the shapemap - * @param targetShapeMapFormat Optionally, the target format of the shapemap (for conversions) - * @param activeShapeMapSource Optionally, the indicator of the shapemap source (raw, url or file) + * @param optShapeMapData Optionally, the contents of the shapemap + * @param optShapeMapFormat Optionally, the format of the shapemap + * @param optTargetShapeMapFormat Optionally, the target format of the shapemap (for conversions) + * @param optShapeMapSource Optionally, the indicator of the shapemap source (raw, url or file) * @return A new ShapeMap based on the given parameters */ - def mkShapeMap( - shapeMapStr: Option[String], - shapeMapUrl: Option[String], - shapeMapFile: Option[String], - shapeMapFormat: Option[ShapeMapFormat], - targetShapeMapFormat: Option[ShapeMapFormat], - activeShapeMapSource: Option[ShapeMapSource] - ): Either[String, ShapeMap] = { - // Confirm chosen formats - val format = - shapeMapFormat.getOrElse(ShapeMapFormat.defaultFormat) - - // Create the shapemap depending on the client's selected source - val maybeShapeMap: Either[String, ShapeMap] = - activeShapeMapSource.getOrElse( - ShapeMapSource.defaultActiveShapeMapSource - ) match { - case ShapeMapSource.TEXT => - shapeMapStr match { - case None => Left("No value for the ShapeMap string") - case Some(shapeMapRaw) => - Right( - ShapeMap( - shapeMapRaw = shapeMapRaw, - shapeMapFormat = format, - targetShapeMapFormat = targetShapeMapFormat, - activeShapeMapSource = ShapeMapSource.TEXT - ) - ) - } - - case ShapeMapSource.URL => - shapeMapUrl match { - case None => Left(s"No value for the shapemap URL") - case Some(url) => - getUrlContents(url) match { - case Right(shapeMapRaw) => - Right( - ShapeMap( - shapeMapRaw = shapeMapRaw, - shapeMapFormat = format, - targetShapeMapFormat = targetShapeMapFormat, - activeShapeMapSource = ShapeMapSource.URL - ) - ) - case Left(err) => Left(err) - } - } - case ShapeMapSource.FILE => - shapeMapFile match { - case None => Left(s"No value for the shapemap file") - case Some(shapeMapRaw) => - Right( - ShapeMap( - shapeMapRaw = shapeMapRaw, - shapeMapFormat = format, - targetShapeMapFormat = targetShapeMapFormat, - activeShapeMapSource = ShapeMapSource.FILE - ) - ) - } - case other => - val msg = s"Unknown shapemap source: $other" - logger.warn(msg) - Left(msg) + private[api] def mkShapeMap( + optShapeMapData: Option[String], + optShapeMapFormat: Option[ShapeMapFormat], + optTargetShapeMapFormat: Option[ShapeMapFormat], + optShapeMapSource: Option[ShapeMapSource] + ): IO[Either[String, ShapeMap]] = + for { + shapeMap <- IO { + ShapeMap( + shapeMapPre = optShapeMapData, + shapeMapFormat = + optShapeMapFormat.getOrElse(ApiDefaults.defaultShapeMapFormat), + targetShapeMapFormat = optTargetShapeMapFormat, + shapeMapSource = optShapeMapSource.getOrElse(defaultShapeMapSource) + ) } - maybeShapeMap - } - - /** @return Empty shapemap representation, with no inner data and all defaults - */ - private def empty: ShapeMap = - ShapeMap( - shapeMapRaw = emptyShapeMapValue, - shapeMapFormat = defaultShapeMapFormat, - targetShapeMapFormat = None, - activeShapeMapSource = ShapeMapSource.defaultActiveShapeMapSource - ) - + result = shapeMap.rawShapeMap match { + case Some(_) => Right(shapeMap) + case None => Left("Could not build the shapeMap") + } + } yield result } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMapSource.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMapSource.scala index 00c83130..789559a1 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMapSource.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMapSource.scala @@ -8,9 +8,9 @@ package es.weso.rdfshape.server.api.routes.shapemap.logic private[api] object ShapeMapSource extends Enumeration { type ShapeMapSource = String - val TEXT = "#shapeMapTextArea" - val URL = "#shapeMapUrl" - val FILE = "#shapeMapFile" + val TEXT = "byText" + val URL = "byUrl" + val FILE = "byFile" - val defaultActiveShapeMapSource: ShapeMapSource = TEXT + val defaultShapeMapSource: ShapeMapSource = TEXT } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala index 507d9d6a..d61e4646 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala @@ -6,7 +6,7 @@ import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.format.dataFormats.ShapeMapFormat import es.weso.rdfshape.server.api.routes.ApiService import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMap -import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMap.getShapeMap +import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMap.mkShapeMap import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.utils.json.JsonUtils.errorResponseJson import io.circe._ @@ -27,7 +27,7 @@ class ShapeMapService(client: Client[IO]) with ApiService with LazyLogging { - override val verb: String = "shapeMap" + override val verb: String = "shapemap" /** Describe the API routes handled by this service and the actions performed on each of them */ @@ -58,7 +58,7 @@ class ShapeMapService(client: Client[IO]) req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) - val maybeShapeMap: IO[Either[String, ShapeMap]] = getShapeMap(partsMap) + val maybeShapeMap: IO[Either[String, ShapeMap]] = mkShapeMap(partsMap) maybeShapeMap.attempt.flatMap( _.fold( // General exception diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala index a2ef823b..aa48f8dd 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala @@ -12,8 +12,6 @@ object IncomingRequestParameters { // String constants representing each parameter name expected by the server lazy val data = "data" lazy val compoundData = "compoundData" - lazy val dataUrl = "dataUrl" - lazy val dataFile = "dataFile" lazy val dataFormat = "dataFormat" lazy val targetDataFormat = "targetDataFormat" @@ -32,23 +30,19 @@ object IncomingRequestParameters { lazy val node = "node" lazy val nodeSelector = "nodeSelector" - lazy val shapeMap = "shapeMap" + lazy val shapemap = "shapemap" lazy val shape_map = "shape-map" - lazy val shapeMapUrl = "shapeMapUrl" - lazy val shapeMapFile = "shapeMapFile" - lazy val shapeMapFormat = "shapeMapFormat" - lazy val targetShapeMapFormat = "targetShapeMapFormat" + lazy val shapemapFormat = "shapemapFormat" + lazy val targetShapemapFormat = "targetShapemapFormat" - lazy val query = "query" - lazy val queryUrl = "queryUrl" - lazy val queryFile = "queryFile" + lazy val query = "query" lazy val endpoint = "endpoint" - lazy val activeDataSource = "activeDataSource" - lazy val activeSchemaSource = "activeSchemaSource" - lazy val activeShapeMapSource = "activeShapeMapSource" - lazy val activeQuerySource = "activeQuerySource" + lazy val dataSource = "dataSource" + lazy val schemaSource = "schemaSource" + lazy val shapemapSource = "shapemapSource" + lazy val querySource = "querySource" lazy val wdEntity = "wdEntity" lazy val wdSchema = "wdSchema" @@ -65,33 +59,22 @@ object IncomingRequestParameters { lazy val continue = "continue" lazy val withDot = "withDot" - /** Parameter expected to contain raw RDF data (URL encoded) + /** Parameter expected to contain RDF data contents (URL encoded) + * + * @note These contents may be raw data, a URL with the data or a File with the data. + * The source of the data is therefore specified by [[DataSourceParameter]] */ object DataParameter extends OptionalQueryParamDecoderMatcher[String](data) { val name: String = data } - /** Parameter expected to contain a compound of raw RDF data (URL encoded), formed by 2 or more RDF sources + /** Parameter expected to contain a compound of RDF data (URL encoded), formed by 2 or more RDF sources */ object CompoundDataParameter extends OptionalQueryParamDecoderMatcher[String](compoundData) { val name: String = compoundData } - /** Parameter expected to contain a URL where RDF data is located - */ - object DataUrlParameter - extends OptionalQueryParamDecoderMatcher[String](dataUrl) { - val name: String = dataUrl - } - - /** Parameter expected to contain a file where RDF data is located - */ - object DataFileParameter - extends OptionalQueryParamDecoderMatcher[String](dataFile) { - val name: String = dataFile - } - /** Parameter expected to contain an RDF format name, referencing the user's data format */ object DataFormatParameter @@ -196,69 +179,47 @@ object IncomingRequestParameters { val name: String = nodeSelector } - /** Parameter expected to contain raw shapemap data (URL encoded) + /** Parameter expected to contain Shapemap contents (URL encoded) + * + * @note These contents may be raw data, a URL with the data or a File with the data. + * The source of the data is therefore specified by [[ShapemapSourceParameter]] */ - object ShapeMapTextParameter - extends OptionalQueryParamDecoderMatcher[String](shapeMap) { - val name: String = shapeMap + object ShapeMapParameter + extends OptionalQueryParamDecoderMatcher[String](shapemap) { + val name: String = shapemap } - /** Parameter expected to contain raw shapemap data (URL encoded) + /** Alternative parameter with the same uses as [[ShapeMapParameter]] */ object ShapeMapParameterAlt extends OptionalQueryParamDecoderMatcher[String](shape_map) { val name: String = shape_map } - /** Parameter expected to contain a URL where a shapemap is located - */ - object ShapeMapUrlParameter - extends OptionalQueryParamDecoderMatcher[String](shapeMapUrl) { - val name: String = shapeMapUrl - } - - /** Parameter expected to contain a file where a shapemap is located - */ - object ShapeMapFileParameter - extends OptionalQueryParamDecoderMatcher[String](shapeMapFile) { - val name: String = shapeMapFile - } - /** Parameter expected to contain a shapemap format name, referencing the user's shapemap format */ object ShapeMapFormatParameter - extends OptionalQueryParamDecoderMatcher[String](shapeMapFormat) { - val name: String = shapeMapFormat + extends OptionalQueryParamDecoderMatcher[String](shapemapFormat) { + val name: String = shapemapFormat } /** Parameter expected to contain a shapemap format name, referencing the target format of a conversion */ object TargetShapeMapFormatParameter - extends OptionalQueryParamDecoderMatcher[String](targetShapeMapFormat) { - val name: String = targetShapeMapFormat + extends OptionalQueryParamDecoderMatcher[String](targetShapemapFormat) { + val name: String = targetShapemapFormat } - /** Parameter expected to contain a raw SPARQL query (URL encoded) + /** Parameter expected to contain SPARQL query data contents (URL encoded) + * + * @note These contents may be raw data, a URL with the query or a File with the query. + * The source of the query is therefore specified by [[QuerySourceParameter]] */ object QueryParameter extends OptionalQueryParamDecoderMatcher[String](query) { val name: String = query } - /** Parameter expected to contain a URL where a SPARQL query is located - */ - object QueryUrlParameter - extends OptionalQueryParamDecoderMatcher[String](queryUrl) { - val name: String = queryUrl - } - - /** Parameter expected to contain a file where a SPARQL query is located - */ - object QueryFileParameter - extends OptionalQueryParamDecoderMatcher[String](queryFile) { - val name: String = queryFile - } - /** Parameter expected to contain a raw endpoint location */ object EndpointParameter @@ -269,33 +230,33 @@ object IncomingRequestParameters { /** Parameter expected to contain a valid identifier of the source of the data sent by the client (currently raw data, a URL or a file) * in data-related operations */ - object ActiveDataSourceParameter - extends OptionalQueryParamDecoderMatcher[String](activeDataSource) { - val name: String = activeDataSource + object DataSourceParameter + extends OptionalQueryParamDecoderMatcher[String](dataSource) { + val name: String = dataSource } /** Parameter expected to contain a valid identifier of the source of the data sent by the client (currently raw data, a URL or a file) * in schema-related operations */ - object ActiveSchemaSourceParameter - extends OptionalQueryParamDecoderMatcher[String](activeSchemaSource) { - val name: String = activeSchemaSource + object SchemaSourceParameter + extends OptionalQueryParamDecoderMatcher[String](schemaSource) { + val name: String = schemaSource } /** Parameter expected to contain a valid identifier of the source of the data sent by the client (currently raw data, a URL or a file) * in shapemap-related operations */ - object ActiveShapeSourceTabParameter - extends OptionalQueryParamDecoderMatcher[String](activeShapeMapSource) { - val name: String = activeShapeMapSource + object ShapemapSourceParameter + extends OptionalQueryParamDecoderMatcher[String](shapemapSource) { + val name: String = shapemapSource } /** Parameter expected to contain a valid identifier of the source of the data sent by the client (currently raw data, a URL or a file) * in query-related operations */ - object ActiveQuerySourceParameter - extends OptionalQueryParamDecoderMatcher[String](activeQuerySource) { - val name: String = activeQuerySource + object QuerySourceParameter + extends OptionalQueryParamDecoderMatcher[String](querySource) { + val name: String = querySource } /** Parameter expected to contain a valid identifier/name/label of a wikidata entity From c683fcd9cfb190afb4922722c25de8d648b5f8fd Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Tue, 16 Nov 2021 20:03:00 +0100 Subject: [PATCH 27/32] Refactored formats and began schema service --- .../server/api/definitions/ApiDefaults.scala | 13 +- .../api/format/dataFormats/DataFormat.scala | 24 +- .../api/format/dataFormats/RdfFormat.scala | 9 + .../{ => schemaFormats}/SchemaFormat.scala | 25 +- .../schemaFormats/ShExFormat.scala | 43 + .../schemaFormats/ShaclFormat.scala | 32 + .../api/routes/data/logic/DataSource.scala | 2 +- .../data/logic/aux/InferenceCodecs.scala | 5 +- .../data/logic/operations/DataConvert.scala | 7 +- .../data/logic/operations/DataExtract.scala | 2 +- .../data/logic/operations/DataInfo.scala | 33 +- .../data/logic/operations/DataOperation.scala | 7 +- .../api/routes/data/logic/types/Data.scala | 32 +- .../data/logic/types/DataEndpoint.scala | 9 +- .../routes/data/logic/types/DataSingle.scala | 56 +- .../logic/types/merged/DataCompound.scala | 7 +- .../api/routes/data/service/DataService.scala | 125 ++- .../api/routes/schema/logic/Schema.scala | 254 ------ .../schema/logic/SchemaConversionResult.scala | 82 -- .../api/routes/schema/logic/SchemaInfo.scala | 66 -- .../schema/logic/SchemaInfoResult.scala | 11 - .../schema/logic/SchemaOperations.scala | 360 -------- .../api/routes/schema/logic/TriggerMode.scala | 85 -- .../schema/logic/aux/SchemaAdapter.scala | 41 + .../schema/logic/aux/SchemaOperations.scala | 102 +++ .../logic/operations/SchemaConvert.scala | 115 +++ .../schema/logic/operations/SchemaInfo.scala | 93 ++ .../logic/operations/SchemaOperation.scala | 20 + .../logic/operations/SchemaValidate.scala | 126 +++ .../schema/logic/trigger/TriggerMode.scala | 102 +++ .../logic/trigger/TriggerModeType.scala | 13 + .../logic/trigger/TriggerShapeMap.scala | 72 ++ .../trigger/TriggerTargetDeclarations.scala | 47 ++ .../routes/schema/logic/types/Schema.scala | 111 +++ .../schema/logic/types/SchemaSimple.scala | 177 ++++ .../routes/schema/service/SchemaService.scala | 489 ++++++----- .../api/routes/shapemap/logic/ShapeMap.scala | 149 ++-- .../shapemap/service/ShapeMapService.scala | 3 +- .../wikibase/logic/WikibaseSchema.scala | 134 +++ .../wikibase/logic/WikibaseSchemaParam.scala | 118 --- .../wikibase/service/WikibaseService.scala | 797 +++++++++--------- .../service/WikibaseServiceUtils.scala | 1 + .../IncomingRequestParameters.scala | 35 +- .../server/utils/json/JsonUtils.scala | 38 +- 44 files changed, 2109 insertions(+), 1963 deletions(-) rename modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/{ => schemaFormats}/SchemaFormat.scala (53%) create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/schemaFormats/ShExFormat.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/schemaFormats/ShaclFormat.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/Schema.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaConversionResult.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaInfo.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaInfoResult.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/TriggerMode.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/aux/SchemaAdapter.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/aux/SchemaOperations.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaConvert.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaInfo.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaOperation.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaValidate.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerMode.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerModeType.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerShapeMap.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerTargetDeclarations.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/types/Schema.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/types/SchemaSimple.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseSchema.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseSchemaParam.scala diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala index aa036aff..73c6838c 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala @@ -2,10 +2,13 @@ package es.weso.rdfshape.server.api.definitions import es.weso.rdf.nodes.IRI import es.weso.rdf.{InferenceEngine, NONE} +import es.weso.rdfshape.server.api.format.dataFormats.schemaFormats.{ + SchemaFormat, + ShaclFormat +} import es.weso.rdfshape.server.api.format.dataFormats.{ DataFormat, RDFFormat, - SchemaFormat, ShapeMapFormat } import es.weso.rdfshape.server.api.routes.data.logic.DataSource @@ -24,7 +27,7 @@ case object ApiDefaults { val defaultDataFormat: DataFormat = DataFormat.defaultFormat val defaultRdfFormat: RDFFormat = RDFFormat.defaultFormat val availableSchemaFormats: List[SchemaFormat] = SchemaFormat.availableFormats - val defaultSchemaFormat: SchemaFormat = SchemaFormat.defaultFormat + val defaultSchemaFormat: SchemaFormat = ShaclFormat.defaultFormat val defaultSchemaFormatName: String = defaultSchemaFormat.name val availableSchemaEngines: List[String] = Schemas.availableSchemaNames val defaultSchemaEngine: Schema = Schemas.defaultSchema @@ -39,10 +42,10 @@ case object ApiDefaults { val defaultSchemaEmbedded = false val defaultInferenceEngine: InferenceEngine = NONE val defaultInferenceEngineName: String = defaultInferenceEngine.name - val defaultActiveDataSource: DataSource = DataSource.defaultDataSource - val defaultActiveSchemaSource: SchemaSource = + val defaultDataSource: DataSource = DataSource.defaultDataSource + val defaultSchemaSource: SchemaSource = SchemaSource.defaultSchemaSource - val defaultActiveShapeMapSource: ShapeMapSource = + val defaultShapeMapSource: ShapeMapSource = ShapeMapSource.defaultShapeMapSource val defaultShapeMapFormat: ShapeMapFormat = ShapeMapFormat.defaultFormat val availableShapeMapFormats: List[String] = ShapeMap.formats diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/DataFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/DataFormat.scala index 90fa464c..ca4baff5 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/DataFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/DataFormat.scala @@ -1,6 +1,7 @@ package es.weso.rdfshape.server.api.format.dataFormats import es.weso.rdfshape.server.api.format._ +import es.weso.rdfshape.server.api.format.dataFormats.schemaFormats.SchemaFormat import org.http4s.MediaType /** Extension of the Format interface to represent RDF data formats @@ -14,22 +15,13 @@ class DataFormat(formatName: String, formatMimeType: MediaType) extends Format { */ object DataFormat extends FormatCompanion[DataFormat] { - override lazy val availableFormats: List[DataFormat] = List( - Json, - Dot, - Svg, - Png, - Turtle, - NTriples, - Trig, - JsonLd, - RdfXml, - RdfJson, - HtmlMicrodata, - HtmlRdfa11, - ShExC, - Compact - ) + override lazy val availableFormats: List[DataFormat] = + (RDFFormat.availableFormats ++ + SchemaFormat.availableFormats ++ + HtmlFormat.availableFormats ++ + GraphicFormat.availableFormats ++ + ShapeMapFormat.availableFormats ++ + List(Json, Dot)).distinct override val defaultFormat: DataFormat = Json } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/RdfFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/RdfFormat.scala index d981c95f..6677a840 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/RdfFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/RdfFormat.scala @@ -18,6 +18,7 @@ object RDFFormat extends FormatCompanion[RDFFormat] { List( Turtle, NTriples, + NQuads, Trig, JsonLd, RdfXml, @@ -42,6 +43,14 @@ case object NTriples formatMimeType = new MediaType("application", "n-triples") ) +/** Represents the mime-type "application/n-quads" + */ +case object NQuads + extends RDFFormat( + formatName = "N-Quads", + formatMimeType = new MediaType("application", "n-quads") + ) + /** Represents the mime-type "application/trig" */ case object Trig diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/SchemaFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/schemaFormats/SchemaFormat.scala similarity index 53% rename from modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/SchemaFormat.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/schemaFormats/SchemaFormat.scala index c32829b8..b9bce5e8 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/SchemaFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/schemaFormats/SchemaFormat.scala @@ -1,5 +1,6 @@ -package es.weso.rdfshape.server.api.format.dataFormats +package es.weso.rdfshape.server.api.format.dataFormats.schemaFormats +import es.weso.rdfshape.server.api.format.dataFormats._ import es.weso.rdfshape.server.api.format.{Format, FormatCompanion} import org.http4s.MediaType @@ -17,23 +18,9 @@ class SchemaFormat(formatName: String, formatMimeType: MediaType) */ object SchemaFormat extends FormatCompanion[SchemaFormat] { - override lazy val availableFormats: List[SchemaFormat] = - List( - new SchemaFormat(Turtle), - new SchemaFormat(JsonLd), - new SchemaFormat(NTriples), - new SchemaFormat(RdfXml), - new SchemaFormat(RdfJson), - new SchemaFormat(Trig), - ShExC - ) + override lazy val availableFormats: List[SchemaFormat] = { + (ShExFormat.availableFormats ++ + ShaclFormat.availableFormats).distinct + } override val defaultFormat: SchemaFormat = ShExC } - -/** Represents the mime-type "text/shex" - */ -case object ShExC - extends SchemaFormat( - formatName = "ShExC", - formatMimeType = new MediaType("text", "shex") - ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/schemaFormats/ShExFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/schemaFormats/ShExFormat.scala new file mode 100644 index 00000000..17a8a01f --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/schemaFormats/ShExFormat.scala @@ -0,0 +1,43 @@ +package es.weso.rdfshape.server.api.format.dataFormats.schemaFormats + +import es.weso.rdfshape.server.api.format.dataFormats._ +import es.weso.rdfshape.server.api.format.{Format, FormatCompanion} +import org.http4s.MediaType + +/** Dummy class to differentiate shapemap formats from the more generic DataFormat + * @see {@link DataFormat} + */ +class ShExFormat(formatName: String, formatMimeType: MediaType) + extends SchemaFormat(formatName, formatMimeType) { + def this(format: Format) = { + this(format.name, format.mimeType) + } +} + +/** Companion object with all SchemaFormat static utilities + */ +object ShExFormat extends FormatCompanion[ShExFormat] { + + override lazy val availableFormats: List[ShExFormat] = + List( + ShExC, + ShExJ + ) + override val defaultFormat: ShExFormat = ShExC +} + +/** Represents the mime-type "text/shexc" + */ +case object ShExC + extends ShExFormat( + formatName = "ShExC", + formatMimeType = new MediaType("text", "shexc") + ) + +/** Represents the mime-type "text/shexj" + */ +case object ShExJ + extends ShExFormat( + formatName = "ShExJ", + formatMimeType = new MediaType("text", "shexj") + ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/schemaFormats/ShaclFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/schemaFormats/ShaclFormat.scala new file mode 100644 index 00000000..c3637040 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/schemaFormats/ShaclFormat.scala @@ -0,0 +1,32 @@ +package es.weso.rdfshape.server.api.format.dataFormats.schemaFormats + +import es.weso.rdfshape.server.api.format.dataFormats._ +import es.weso.rdfshape.server.api.format.{Format, FormatCompanion} +import org.http4s.MediaType + +/** Dummy class to differentiate shapemap formats from the more generic DataFormat + * + * @see {@link DataFormat} + */ +class ShaclFormat(formatName: String, formatMimeType: MediaType) + extends SchemaFormat(formatName, formatMimeType) { + def this(format: Format) = { + this(format.name, format.mimeType) + } +} + +/** Companion object with all SchemaFormat static utilities + */ +object ShaclFormat extends FormatCompanion[ShaclFormat] { + + override lazy val availableFormats: List[ShaclFormat] = + List( + new ShaclFormat(Turtle), + new ShaclFormat(NTriples), + new ShaclFormat(NQuads), + new ShaclFormat(Trig), + new ShaclFormat(JsonLd), + new ShaclFormat(RdfXml) + ) + override val defaultFormat: ShaclFormat = new ShaclFormat(Turtle) +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataSource.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataSource.scala index 2d4fbe4a..03317d02 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataSource.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/DataSource.scala @@ -2,7 +2,7 @@ package es.weso.rdfshape.server.api.routes.data.logic /** Enumeration of the different possible Data sources sent by the client. * The source sent indicates the API if the schema was sent in raw text, as a URL - * to be fetched or as a text file containing the schema. + * to be fetched or as a text file containing the data. * In case the client submits the data in several formats, the selected source will indicate the preferred one. */ private[api] object DataSource extends Enumeration { diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/aux/InferenceCodecs.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/aux/InferenceCodecs.scala index e448332f..eadeac37 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/aux/InferenceCodecs.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/aux/InferenceCodecs.scala @@ -1,6 +1,7 @@ package es.weso.rdfshape.server.api.routes.data.logic.aux -import es.weso.rdf.{InferenceEngine, NONE} +import es.weso.rdf.InferenceEngine +import es.weso.rdfshape.server.api.definitions.ApiDefaults import es.weso.rdfshape.server.api.routes.data.logic.types.Data import io.circe.{Decoder, Encoder, HCursor, Json} @@ -23,7 +24,7 @@ private[data] object InferenceCodecs { inference = InferenceEngine .fromString(inferenceName) .toOption - .getOrElse(NONE) + .getOrElse(ApiDefaults.defaultInferenceEngine) } yield inference } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataConvert.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataConvert.scala index 7a487e82..2b8c8807 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataConvert.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataConvert.scala @@ -62,7 +62,6 @@ private[api] object DataConvert extends LazyLogging { ("message", Json.fromString(dataConversion.successMessage)), ("data", dataConversion.inputData.asJson), ("result", dataConversion.result.asJson), - ("inputDataFormat", dataConversion.inputData.format.asJson), ("targetDataFormat", dataConversion.result.format.asJson) ) ) @@ -71,14 +70,14 @@ private[api] object DataConvert extends LazyLogging { /** Perform the actual conversion operation between RDF text formats * * @param inputData Input conversion data - * @param targetFormat Target - * @return A new Data instance + * @param targetFormat Target format + * @return A new [[Data]] instance */ def dataConvert( inputData: Data, targetFormat: DataFormat ): IO[DataConvert] = { - logger.info(s"Conversion target format: $targetFormat") + logger.info(s"Data conversion target format: ${targetFormat.name}") for { // Get a handle to the RDF resource rdf <- inputData.toRdf() diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataExtract.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataExtract.scala index 6632d751..f8b7d42b 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataExtract.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataExtract.scala @@ -7,7 +7,7 @@ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.nodes.{IRI, Lang} import es.weso.rdfshape.server.api.definitions.ApiDefaults import es.weso.rdfshape.server.api.definitions.ApiDefaults._ -import es.weso.rdfshape.server.api.format.dataFormats.SchemaFormat +import es.weso.rdfshape.server.api.format.dataFormats.schemaFormats.SchemaFormat import es.weso.rdfshape.server.api.routes.data.logic.operations.DataExtract.{ DataExtractResult, successMessage diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataInfo.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataInfo.scala index 9e694a97..45b84ed6 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataInfo.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataInfo.scala @@ -21,7 +21,7 @@ import io.circe.{Encoder, Json} final case class DataInfo private ( override val inputData: Data, result: DataInfoResult -) extends DataOperation(successMessage, inputData) {} +) extends DataOperation(successMessage, inputData) /** Static utilities to obtain information about RDF data */ @@ -30,8 +30,8 @@ private[api] object DataInfo { /** Given an input data, get information about it * - * @param data Input Data object of any type (Simple, Compound...) - * @return A [[DataInfo]] object with the information of the input data + * @param data Input Data instance of any type (Simple, Compound...) + * @return A [[DataInfo]] instance with the information of the input data */ def dataInfo(data: Data): IO[DataInfo] = for { @@ -49,6 +49,7 @@ private[api] object DataInfo { } yield DataInfo( inputData = data, result = DataInfoResult( + data = data, numberOfStatements = nStatements, prefixMap = prefixMap, predicates = predicates.toSet @@ -56,8 +57,14 @@ private[api] object DataInfo { ) /** Case class representing the results to be returned when performing a data-info operation + * + * @param data Data operated on + * @param numberOfStatements Number of statements in the data + * @param prefixMap Prefix map in the data + * @param predicates Set of predicates in the data */ final case class DataInfoResult private ( + data: Data, numberOfStatements: Int, prefixMap: PrefixMap, predicates: Set[IRI] @@ -66,15 +73,18 @@ private[api] object DataInfo { /** Encoder for [[DataInfoResult]] */ private implicit val encodeDataInfoResult: Encoder[DataInfoResult] = - (dataInfo: DataInfoResult) => + (dataInfoResult: DataInfoResult) => Json.fromFields( List( - ("numberOfStatements", dataInfo.numberOfStatements.asJson), - ("prefixMap", prefixMap2Json(dataInfo.prefixMap)), + ("numberOfStatements", dataInfoResult.numberOfStatements.asJson), + ("format", dataInfoResult.data.format.asJson), + ("prefixMap", prefixMap2JsonArray(dataInfoResult.prefixMap)), ( "predicates", Json.fromValues( - dataInfo.predicates.map(iri2Json(_, Some(dataInfo.prefixMap))) + dataInfoResult.predicates.map( + iri2Json(_, Some(dataInfoResult.prefixMap)) + ) ) ) ) @@ -88,14 +98,7 @@ private[api] object DataInfo { List( ("message", Json.fromString(dataInfo.successMessage)), ("data", dataInfo.inputData.asJson), - ( - "result", - dataInfo.result.asJson.deepMerge( - Json.fromFields( - List(("format", dataInfo.inputData.format.asJson)) - ) - ) - ) + ("result", dataInfo.result.asJson) ) ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataOperation.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataOperation.scala index a5cff4bf..cac92bfd 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataOperation.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataOperation.scala @@ -1,6 +1,5 @@ package es.weso.rdfshape.server.api.routes.data.logic.operations -import es.weso.rdfshape.server.api.routes.data.logic.operations.DataOperation.successMessage import es.weso.rdfshape.server.api.routes.data.logic.types.Data /** General definition of operations that operate on Data @@ -8,12 +7,12 @@ import es.weso.rdfshape.server.api.routes.data.logic.types.Data * @param successMessage Message attached to the result of the operation * @param inputData Data operated on */ -abstract class DataOperation( - val successMessage: String = successMessage, +private[operations] abstract class DataOperation( + val successMessage: String = DataOperation.successMessage, val inputData: Data ) -object DataOperation { +private[operations] object DataOperation { /** Dummy success message */ diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/Data.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/Data.scala index e8c97c4d..1e1ade25 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/Data.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/Data.scala @@ -18,14 +18,17 @@ import io.circe.{Decoder, Encoder, HCursor} */ trait Data { - /** Raw RDF content represented as a String (Right) - * An error occurred when trying to parse the data (Left) + /** Either the raw RDF content represented as a String, + * or the error occurred when trying to parse the data */ lazy val rawData: Either[String, String] = Left("") /** Source where the data comes from */ val dataSource: DataSource + + /** Format of the data + */ val format: Option[DataFormat] = None /** Given an RDF source of data, try to parse it and get the RDF model representation @@ -38,6 +41,8 @@ trait Data { object Data extends DataCompanion[Data] { /** Dummy implementation meant to be overridden + * + * @note Resort by default to [[DataSingle]]'s empty representation */ override val emptyData: Data = DataSingle.emptyData @@ -54,25 +59,24 @@ object Data extends DataCompanion[Data] { /** Dummy implementation meant to be overridden * If called on a general [[Data]] instance, pattern match among the available data types to * use the correct implementation - * - * @note Defaults to [[DataSingle]]'s implementation of decoding data */ implicit val decodeData: Decoder[Data] = (cursor: HCursor) => { this.getClass match { + case ds if ds == classOf[DataSingle] => DataSingle.decodeData(cursor) case de if de == classOf[DataEndpoint] => DataEndpoint.decodeData(cursor) case dc if dc == classOf[DataCompound] => DataCompound.decodeData(cursor) - case _ => DataSingle.decodeData(cursor) } } /** General implementation delegating on subclasses */ override def mkData(partsMap: PartsMap): IO[Either[String, Data]] = for { + // 1. Make some checks on the parameters to distinguish between Data types compoundData <- partsMap.optPartValue(CompoundDataParameter.name) paramEndpoint <- partsMap.optPartValue(EndpointParameter.name) + // 2. Delegate on the correct sub-class for creating the Data maybeData <- { - // Create one of: Simple Data, Compound Data or Endpoint Data // 1. Compound data if(compoundData.isDefined) DataCompound.mkData(partsMap) // 2. Endpoint data @@ -84,28 +88,28 @@ object Data extends DataCompanion[Data] { } yield maybeData } -/** Static utilities to be used with Data representations +/** Static utilities to be used with [[Data]] representations * - * @tparam D Specific data representation to be handled + * @tparam D Specific [[Data]] representation to be handled */ -trait DataCompanion[D <: Data] extends LazyLogging { +private[data] trait DataCompanion[D <: Data] extends LazyLogging { - /** Empty instance of the data representation in use + /** Empty instance of the [[Data]] representation in use */ val emptyData: D - /** Encoder used to transform Data instances to JSON values + /** Encoder used to transform [[Data]] instances to JSON values */ implicit val encodeData: Encoder[D] - /** Decoder used to extract Data instances from JSON values + /** Decoder used to extract [[Data]] instances from JSON values */ implicit val decodeData: Decoder[D] - /** Given a request's parameters, try to extract an instance of Data (type D) from them + /** Given a request's parameters, try to extract an instance of [[Data]] (type [[D]]) from them * * @param partsMap Request's parameters - * @return Either the Data instance or an error message + * @return Either the [[Data]] instance or an error message */ def mkData(partsMap: PartsMap): IO[Either[String, D]] } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataEndpoint.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataEndpoint.scala index 31508022..7519a04b 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataEndpoint.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataEndpoint.scala @@ -89,14 +89,7 @@ private[api] object DataEndpoint extends DataCompanion[DataEndpoint] { } else Left("No endpoint provided") - } yield maybeData.flatMap(dataEndpoint => - /* Check if the created data is empty, then an error occurred when - * fetching the endpoint on creation */ - dataEndpoint.rawData.fold( - err => Left(err), - _ => Right(dataEndpoint) - ) - ) + } yield maybeData.flatMap(_.rawData.flatMap(_ => maybeData)) override implicit val decodeData: Decoder[DataEndpoint] = (cursor: HCursor) => { diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala index c7e2ad1e..5a831b47 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala @@ -17,23 +17,23 @@ import es.weso.rdfshape.server.utils.networking.NetworkingUtils.getUrlContents import io.circe._ import io.circe.syntax.EncoderOps -/** Data class representing a single RDF data instance with its current format and source +/** Data class representing a single RDF data instance with its inner content, format and source * * @note Invalid initial data is accepted, but may cause errors when operating with it. * @param dataPre RDF data, as it is received before being processed depending on the [[dataSource]] * @param dataFormat Data format * @param inference Data inference - * @param dataSource Active source, used to know how to process the raw data + * @param dataSource Origin source, used to know how to process the raw data */ sealed case class DataSingle( private val dataPre: Option[String], - dataFormat: DataFormat, + private val dataFormat: DataFormat, inference: InferenceEngine, override val dataSource: DataSource ) extends Data with LazyLogging { - /** Given the (user input) for the data and its source, fetch the Data contents using the input in the way the source needs it + /** Given the user input [[dataPre]] for the data and its source, fetch the Data contents using the input in the way the source needs it * (e.g.: for URLs, fetch the input with a web request; for files, decode the input; for raw data, do nothing) * * @return Either an error creating the raw data or a String containing the final text @@ -43,7 +43,7 @@ sealed case class DataSingle( case None => Left("Could not build the RDF from empty data") case Some(userData) => dataSource match { - case DataSource.TEXT | // Raw test input by user + case DataSource.TEXT | // Raw text input by user DataSource.FILE | // File input already decoded to string DataSource.COMPOUND => // Compound data already processed by server Right(userData) @@ -58,7 +58,8 @@ sealed case class DataSingle( } } - override val format: Option[DataFormat] = Some(dataFormat) + // Override and make publicly available the trait properties + override val format: Option[DataFormat] = Option(dataFormat) /** Given an RDF source of data, try to get the RDF model representation * @@ -125,27 +126,23 @@ private[api] object DataSingle extends DataCompanion[DataSingle] with LazyLogging { - /** Empty data representation, with no inner data and all defaults to none + /** Empty data representation, with no inner data and all defaults or None */ override lazy val emptyData: DataSingle = DataSingle( - dataPre = emptyDataValue, - dataFormat = DataFormat.defaultFormat, + dataPre = None, + dataFormat = ApiDefaults.defaultDataFormat, inference = NONE, - dataSource = DataSource.defaultDataSource + dataSource = ApiDefaults.defaultDataSource ) - /** Placeholder value used for the raw data whenever an empty data is issued/needed. - */ - val emptyDataValue: Option[String] = None - override implicit val encodeData: Encoder[DataSingle] = (data: DataSingle) => Json.obj( ("data", data.rawData.toOption.asJson), - ("source", data.dataSource.asJson), ("format", data.dataFormat.asJson), - ("inference", data.inference.asJson) + ("inference", data.inference.asJson), + ("source", data.dataSource.asJson) ) override def mkData(partsMap: PartsMap): IO[Either[String, DataSingle]] = @@ -168,22 +165,15 @@ private[api] object DataSingle dataSource = paramDataSource.getOrElse(DataSource.defaultDataSource) _ = logger.debug(s"RDF Data received - Source: $dataSource") - // Base for the result - base = DataSingle.emptyData.copy( - dataFormat = format, - inference = inference - ) - // Create the data instance - data = base.copy( + data = DataSingle( dataPre = paramData, + dataFormat = format, + inference = inference, dataSource = dataSource ) - } yield data.rawData.fold( - err => Left(err), - _ => Right(data) - ) + } yield data.rawData.map(_ => data) /** @param inferenceStr String representing the inference value * @return Optionally, the inference contained in a given data string @@ -195,32 +185,30 @@ private[api] object DataSingle } override implicit val decodeData: Decoder[DataSingle] = - (cursor: HCursor) => { + (cursor: HCursor) => for { data <- cursor.downField("data").as[Option[String]] dataFormat <- cursor .downField("dataFormat") .as[RDFFormat] - .orElse(Right(ApiDefaults.defaultRdfFormat)) dataInference <- cursor .downField("inference") - .as[Option[InferenceEngine]] + .as[InferenceEngine] dataSource <- cursor .downField("dataSource") .as[DataSource] .orElse(Right(DataSource.defaultDataSource)) - base = DataSingle.emptyData.copy( + decoded = DataSingle.emptyData.copy( dataPre = data, dataFormat = dataFormat, dataSource = dataSource, - inference = dataInference.getOrElse(NONE) + inference = dataInference ) - } yield base - } + } yield decoded } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/DataCompound.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/DataCompound.scala index a56d1603..f4fe4641 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/DataCompound.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/merged/DataCompound.scala @@ -101,14 +101,9 @@ private[api] object DataCompound .fromJsonString(compoundData.get) .leftMap(err => s"Could not read compound data.\n $err") } else Left("No compound data provided") - } yield maybeData.flatMap(dataCompound => /* Check if the created data is empty, then an error occurred when merging * the elements */ - dataCompound.rawData.fold( - err => Left(err), - _ => Right(dataCompound) - ) - ) + } yield maybeData.flatMap(_.rawData.flatMap(_ => maybeData)) } /** Encoder used to transform CompoundData instances to JSON values diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala index f41eb3d1..a3cfef20 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala @@ -9,6 +9,7 @@ import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ } import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.format.dataFormats._ +import es.weso.rdfshape.server.api.format.dataFormats.schemaFormats.ShExC import es.weso.rdfshape.server.api.routes.ApiService import es.weso.rdfshape.server.api.routes.data.logic.DataSource import es.weso.rdfshape.server.api.routes.data.logic.operations.{ @@ -50,7 +51,8 @@ class DataService(client: Client[IO]) /** Returns a JSON array with the accepted input or output RDF data formats */ case GET -> Root / `api` / `verb` / "formats" / "input" => - val formatNames = RDFFormat.availableFormats.map(_.name) + val formats = RDFFormat.availableFormats ++ HtmlFormat.availableFormats + val formatNames = formats.map(_.name) val json = Json.fromValues(formatNames.map(Json.fromString)) Ok(json) @@ -117,26 +119,21 @@ class DataService(client: Client[IO]) err => errorResponseJson(err, InternalServerError), // Else, try and compute the data info data => - for { - // Check for exceptions when getting the data info - maybeResult <- DataInfo.dataInfo(data).attempt - response <- maybeResult.fold( - // Error: return it - err => - // Legacy code may return exceptions with "null" messages - err.getMessage match { - case errorMessage: String => - errorResponseJson(errorMessage, InternalServerError) - case _ => // null exception message, return a general error message - errorResponseJson( - DataServiceError.couldNotParseData, - InternalServerError - ) - }, - // Success: build successful response - dataInfo => Ok(dataInfo.asJson) + DataInfo + .dataInfo(data) + .flatMap(info => Ok(info.asJson)) + .handleErrorWith(err => + // Legacy code may return exceptions with "null" messages + err.getMessage match { + case errorMessage: String => + errorResponseJson(errorMessage, InternalServerError) + case _ => // null exception message, return a general error message + errorResponseJson( + DataServiceError.couldNotParseData, + InternalServerError + ) + } ) - } yield response ) } yield response } @@ -160,9 +157,13 @@ class DataService(client: Client[IO]) optTargetFormatStr <- partsMap.optPartValue( TargetDataFormatParameter.name ) + optTargetFormat = for { targetFormatStr <- optTargetFormatStr - targetFormat <- DataFormat.fromString(targetFormatStr).toOption + // Standard data format or graphical format + targetFormat <- DataFormat + .fromString(targetFormatStr) + .toOption } yield targetFormat // Abort if no valid target format, else continue @@ -178,30 +179,22 @@ class DataService(client: Client[IO]) err => errorResponseJson(err, InternalServerError), // Else, try and compute the data conversion data => - for { - // Check for exceptions when converting the data - maybeResult <- DataConvert - .dataConvert(data, targetFormat) - .attempt - response <- maybeResult.fold( - // Error: return it - err => - /* Legacy code may return exceptions with "null" - * messages */ - err.getMessage match { - case errorMessage: String => - errorResponseJson(errorMessage, InternalServerError) - case _ => // null exception message, return a general error message - err.printStackTrace() - errorResponseJson( - DataServiceError.couldNotParseData, - InternalServerError - ) - }, - // Success: build successful response - dataConversion => Ok(dataConversion.asJson) + // Check for exceptions when converting the data + DataConvert + .dataConvert(data, targetFormat) + .flatMap(conversion => Ok(conversion.asJson)) + .handleErrorWith(err => + err.getMessage match { + case errorMessage: String => + errorResponseJson(errorMessage, InternalServerError) + case _ => // null exception message, return a general error message + err.printStackTrace() + errorResponseJson( + DataServiceError.couldNotParseData, + InternalServerError + ) + } ) - } yield response ) } } yield response @@ -244,16 +237,12 @@ class DataService(client: Client[IO]) { // Destructure tuple case (data, query) => - for { - maybeDataQuery <- DataQuery - .dataQuery(data, query) - .attempt - response <- maybeDataQuery.fold( - err => - errorResponseJson(err.getMessage, InternalServerError), - dataQuery => Ok(dataQuery.asJson) + DataQuery + .dataQuery(data, query) + .flatMap(result => Ok(result.asJson)) + .handleErrorWith(err => + errorResponseJson(err.getMessage, InternalServerError) ) - } yield response // Generic error. Code should not reach here. case _ => @@ -306,25 +295,19 @@ class DataService(client: Client[IO]) BadRequest ) case Some(nodeSelector) => - for { - maybeResult <- DataExtract - .dataExtract( - data, - nodeSelector, - Option(ShExSchema.empty), - Option(ShExC), - optLabel, - relativeBase = None - ) - .attempt // Check for exceptions when extracting - response <- maybeResult.fold( - // Error in extraction: return the error - err => - errorResponseJson(err.getMessage, InternalServerError), - // Success: build successful response - dataExtraction => Ok(dataExtraction.asJson) + DataExtract + .dataExtract( + data, + nodeSelector, + Option(ShExSchema.empty), + Option(ShExC), + optLabel, + relativeBase = None + ) + .flatMap(result => Ok(result.asJson)) + .handleErrorWith(err => + errorResponseJson(err.getMessage, InternalServerError) ) - } yield response } ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/Schema.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/Schema.scala deleted file mode 100644 index 9584ce09..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/Schema.scala +++ /dev/null @@ -1,254 +0,0 @@ -package es.weso.rdfshape.server.api.routes.schema.logic - -import cats.effect._ -import cats.implicits._ -import com.typesafe.scalalogging.LazyLogging -import es.weso.rdf.RDFReasoner -import es.weso.rdfshape.server.api.definitions.ApiDefaults.defaultSchemaEngineName -import es.weso.rdfshape.server.api.format.dataFormats.SchemaFormat -import es.weso.rdfshape.server.api.routes.schema.logic.SchemaOperations.getBase -import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ -import es.weso.rdfshape.server.api.utils.parameters.PartsMap -import es.weso.schema.{Schemas, Schema => SchemaW} - -import scala.io.Source -import scala.util.Try - -sealed case class Schema( - schema: Option[String], - schemaUrl: Option[String], - schemaFile: Option[String], - schemaFormat: SchemaFormat, - schemaEngine: Option[String], - targetSchemaEngine: Option[String], - targetSchemaFormat: Option[String], - activeSchemaSource: Option[String] -) extends LazyLogging { - - def getSchema( - data: Option[RDFReasoner] - ): IO[(Option[String], Either[String, SchemaW])] = { - - logger.debug(s"activeSchemaTab: $activeSchemaSource") - logger.debug(s"schemaEngine: $schemaEngine") - val inputType = activeSchemaSource match { - case Some(a) => parseSchemaTab(a) - case None if schema.isDefined => Right(SchemaTextAreaType) - case None if schemaUrl.isDefined => Right(SchemaUrlType) - case None if schemaFile.isDefined => Right(SchemaFileType) - case None => Right(SchemaTextAreaType) - } - logger.debug(s"inputType: $inputType") - val maybeSchema: IO[(Option[String], Either[String, SchemaW])] = - inputType match { - case Right(`SchemaUrlType`) => - logger.debug("Schema input type - SchemaUrlType") - schemaUrl match { - case None => IO((None, Left(s"Non value for schemaUrl"))) - case Some(schemaUrl) => - val e: IO[(String, SchemaW)] = for { - str <- IO.fromEither( - Try(Source.fromURL(schemaUrl).mkString).toEither - ) - schema <- Schemas.fromString( - str, - schemaFormat.name, - schemaEngine.getOrElse(defaultSchemaEngineName), - getBase - ) - _ <- IO { - logger.debug("Schema parsed") - } - } yield (str, schema) - e.attempt.map( - _.fold( - s => (none[String], s.getMessage.asLeft[SchemaW]), - pair => { - val (str, schema) = pair - (Some(str), Right(schema)) - } - ) - ) - } - case Right(`SchemaFileType`) => - logger.debug("Schema input type - SchemaFileType") - schemaFile match { - case None => IO((None, Left(s"No value for schemaFile"))) - case Some(schemaStr) => - val schemaFormatStr = - schemaFormat.name - val schemaEngineStr = - schemaEngine.getOrElse(defaultSchemaEngineName) - Schemas - .fromString( - schemaStr, - schemaFormatStr, - schemaEngineStr, - getBase - ) - .attempt - .map( - _.fold( - s => (Some(schemaStr), Left(s"Error parsing file: $s")), - schema => (Some(schemaStr), Right(schema)) - ) - ) - } - case Right(`SchemaTextAreaType`) => - logger.debug("Schema input type - SchemaTextAreaType") - val schemaStr = schema.getOrElse("") - for { - pair <- Schemas - .fromString( - schemaStr, - schemaFormat.name, - schemaEngine.getOrElse(defaultSchemaEngineName), - getBase - ) - .attempt - .map( - _.fold( - err => { - /* TODO: some specific malformed schemas produce a - * NullPointerException with no further message */ - val msg = - if(err.getMessage == null) "Unknown error." - else err.getMessage - (Some(schemaStr), Left(msg)) - }, - schema => (Some(schemaStr), Right(schema)) - ) - ) - (str, eitherSchema) = pair - nameSchema = eitherSchema.map(_.name).getOrElse(s"No schema") - _ <- IO { - logger.debug(s"nameSchema: $nameSchema") - } - foundSchema <- Schemas.lookupSchema( - schemaEngine.getOrElse(defaultSchemaEngineName) - ) - _ <- IO { - logger.debug(s"foundSchema: ${foundSchema.name}") - } - } yield pair - case Right(other) => - logger.warn(s"Unknown value for activeSchemaTab: $other") - IO((None, Left(s"Unknown value for activeSchemaTab: $other"))) - case Left(msg) => - logger.warn(msg) - IO((None, Left(msg))) - } - - maybeSchema - } - - def parseSchemaTab(tab: String): Either[String, SchemaInputType] = { - val inputTypes = List(SchemaUrlType, SchemaFileType, SchemaTextAreaType) - inputTypes.find(_.id == tab) match { - case Some(x) => Right(x) - case None => - Left( - s"Wrong value of tab: $tab, must be one of [${inputTypes.map(_.id).mkString(",")}]" - ) - } - } - - sealed abstract class SchemaInputType { - val id: String - } - - case object SchemaUrlType extends SchemaInputType { - override val id = "#schemaUrl" - } - - case object SchemaFileType extends SchemaInputType { - override val id = "#schemaFile" - } - - case object SchemaTextAreaType extends SchemaInputType { - override val id = "#schemaTextArea" - } - -} - -object Schema extends LazyLogging { - - private[api] def mkSchema( - partsMap: PartsMap, - data: Option[RDFReasoner] - ): IO[(SchemaW, Schema)] = { - val result: IO[Either[String, (SchemaW, Schema)]] = for { - sp <- { - mkSchema(partsMap) - } - eitherPair <- sp.getSchema(data).attempt - resp <- eitherPair.fold( - err => IO.pure(Left(err.getMessage)), - pair => { - val (maybeStr, maybeSchema) = pair - maybeSchema match { - // TODO: HERE "SRT" ERROR IS NULL - case Left(str) => IO.pure(Left(str)) - case Right(schema) => - IO.pure(Right((schema, sp.copy(schema = maybeStr)))) - } - } - ) - } yield resp - result.flatMap( - _.fold( - errMsg => { - logger.error(errMsg) - IO.raiseError( - new RuntimeException(s"Could not obtain schema. $errMsg") - ) - }, - IO.pure - ) - ) - } - - private[api] def mkSchema(partsMap: PartsMap): IO[Schema] = for { - schema <- partsMap.optPartValue(SchemaParameter.name) - schemaUrl <- partsMap.optPartValue(SchemaUrlParameter.name) - schemaFile <- partsMap.optPartValue(SchemaFileParameter.name) - optSchemaFormat <- SchemaFormat.fromRequestParams( - SchemaFormatParameter.name, - partsMap - ) - schemaEngine <- partsMap.optPartValue(SchemaEngineParameter.name) - targetSchemaEngine <- partsMap.optPartValue( - TargetSchemaEngineParameter.name - ) - targetSchemaFormat <- partsMap.optPartValue( - TargetSchemaFormatParameter.name - ) - activeSchemaSource <- partsMap.optPartValue( - SchemaSourceParameter.name - ) - } yield { - Schema( - schema = schema, - schemaUrl = schemaUrl, - schemaFile = schemaFile, - schemaFormat = optSchemaFormat.getOrElse(SchemaFormat.defaultFormat), - schemaEngine = schemaEngine, - targetSchemaEngine = targetSchemaEngine, - targetSchemaFormat = targetSchemaFormat, - activeSchemaSource = activeSchemaSource - ) - } - - private[api] def empty: Schema = - Schema( - schema = None, - schemaUrl = None, - schemaFile = None, - schemaFormat = SchemaFormat.defaultFormat, - schemaEngine = None, - targetSchemaEngine = None, - targetSchemaFormat = None, - activeSchemaSource = None - ) - -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaConversionResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaConversionResult.scala deleted file mode 100644 index 5cb525fc..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaConversionResult.scala +++ /dev/null @@ -1,82 +0,0 @@ -package es.weso.rdfshape.server.api.routes.schema.logic - -import es.weso.rdfshape.server.utils.json.JsonUtils.maybeField -import es.weso.shapemaps.ShapeMap -import io.circe.Json -import io.circe.syntax._ - -/** Data class representing the output of an conversion operation (input schema -> output schema) - * - * @param msg Output informational message after processing. Used in case of error. - * @param schema Input schema - * @param schemaFormat Input schema format - * @param schemaEngine Input schema engine - * @param targetSchemaFormat Target schema format - * @param targetSchemaEngine Target schema engine - * @param result Output schema - * @param resultShapeMap Output shapemap - */ -case class SchemaConversionResult( - msg: String, - schema: Option[String], - schemaFormat: Option[String], - schemaEngine: Option[String], - targetSchemaFormat: Option[String], - targetSchemaEngine: Option[String], - result: Option[String], - resultShapeMap: Option[ShapeMap] -) { - - /** Convert a conversion result to its JSON representation - * - * @return JSON information of the conversion result - */ - def toJson: Json = Json.fromFields( - List( - ("message", Json.fromString(msg)) - ) ++ - maybeField("schema", schema, Json.fromString) ++ - maybeField("schemaFormat", schemaFormat, Json.fromString) ++ - maybeField("schemaEngine", schemaEngine, Json.fromString) ++ - maybeField("targetSchemaFormat", targetSchemaFormat, Json.fromString) ++ - maybeField("targetSchemaEngine", targetSchemaEngine, Json.fromString) ++ - maybeField("result", result, Json.fromString) ++ - maybeField( - "shapeMap", - resultShapeMap, - (sm: ShapeMap) => sm.toString.asJson - ) - ) -} - -object SchemaConversionResult { - - /** @param msg Error message contained in the result - * @return A SchemaConversionResult consisting of a single error message and no data - */ - def fromMsg(msg: String): SchemaConversionResult = - SchemaConversionResult(msg, None, None, None, None, None, None, None) - - /** @return A SchemaConversionResult, given all the parameters needed to build it (schemas, formats, results, etc.) - */ - def fromConversion( - source: String, - schemaFormat: String, - schemaEngine: String, - optTargetSchemaFormat: Option[String], - optTargetSchemaEngine: Option[String], - result: String, - resultShapeMap: ShapeMap - ): SchemaConversionResult = - SchemaConversionResult( - s"Conversion ($schemaFormat/$schemaEngine) => (${optTargetSchemaFormat - .getOrElse("")}/${optTargetSchemaEngine.getOrElse("")}) successful", - Some(source), - Some(schemaFormat), - Some(schemaEngine), - optTargetSchemaFormat, - optTargetSchemaEngine, - Some(result), - Some(resultShapeMap) - ) -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaInfo.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaInfo.scala deleted file mode 100644 index 59d9fcf8..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaInfo.scala +++ /dev/null @@ -1,66 +0,0 @@ -package es.weso.rdfshape.server.api.routes.schema.logic - -import com.typesafe.scalalogging.LazyLogging -import io.circe.Json - -/** Data class representing the data contained in a schema - * - * @param schemaType Name of the schema - * @param schemaEngine Engine of the schema - * @param wellFormed Is the schema well formed - * @param shapes List of shapes in the schema - * @param shapesPrefixMap Prefix map of the shapes in the schema - * @param errors Errors in the schema - */ -private[schema] case class SchemaInfo( - schemaType: Option[String], - schemaEngine: Option[String], - wellFormed: Boolean, - shapes: List[String], - shapesPrefixMap: List[(String, String)], - errors: List[String] -) { - - /** Transform a Schema Info result to a JSON representation - * - * @return JSON representation of the schema information - */ - def toJson: Json = Json.fromFields( - List( - ("schemaType", schemaType.fold(Json.Null)(Json.fromString)), - ("schemaEngine", schemaEngine.fold(Json.Null)(Json.fromString)), - ("wellFormed", Json.fromBoolean(wellFormed)), - ("shapes", Json.fromValues(shapes.map(Json.fromString))), - ( - "shapesPrefixMap", - Json.fromValues( - shapesPrefixMap.map(pair => - Json.fromFields( - List( - ("prefix", Json.fromString(pair._1)), - ("uri", Json.fromString(pair._2)) - ) - ) - ) - ) - ), - ("error", Json.fromValues(errors.map(Json.fromString))) - ) - ) -} - -/** Static utilities of the SchemaInfoReply class - */ -object SchemaInfo extends LazyLogging { - - /** Create an empty SchemaInfoReply with an error message. - * Used when errors occur extracting the schema information - * - * @param msg Message attached to the failing schema - * @return Empty SchemaInfoReply object with no data except for an error message - */ - def fromError(msg: String): SchemaInfo = { - logger.debug(s"SchemaInfoReply from $msg") - SchemaInfo(None, None, wellFormed = false, List(), List(), List(msg)) - } -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaInfoResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaInfoResult.scala deleted file mode 100644 index 41daf028..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaInfoResult.scala +++ /dev/null @@ -1,11 +0,0 @@ -package es.weso.rdfshape.server.api.routes.schema.logic - -import io.circe.Json - -case class SchemaInfoResult( - schema: String, - schemaFormat: String, - schemaEngine: String, - shapes: Json, - prefixMap: Json -) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala deleted file mode 100644 index 37feb280..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/SchemaOperations.scala +++ /dev/null @@ -1,360 +0,0 @@ -package es.weso.rdfshape.server.api.routes.schema.logic - -import cats.effect.IO -import com.typesafe.scalalogging.LazyLogging -import es.weso.rdf.jena.RDFAsJenaModel -import es.weso.rdf.nodes.IRI -import es.weso.rdf.{InferenceEngine, RDFBuilder, RDFReasoner} -import es.weso.rdfshape.server.api.definitions.ApiDefaults -import es.weso.rdfshape.server.api.definitions.UmlDefinitions.umlOptions -import es.weso.rdfshape.server.api.format.dataFormats.SchemaFormat -import es.weso.schema.{Result, Schema, ShaclexSchema, ValidationTrigger} -import es.weso.shacl.converter.Shacl2ShEx -import es.weso.shapemaps.ShapeMap -import es.weso.uml.Schema2UML -import io.circe.Json - -/** Static utilities used by the {@link es.weso.rdfshape.server.api.routes.schema.service.SchemaService} - * to operate on schemas - */ -private[api] object SchemaOperations extends LazyLogging { - - /** Long value used as a "no time" value for errored validations - */ - private val NoTime = 0L - - /** Obtain the information from an schema - * - * @param schema Input schema - * @return Schema information as a data instance of {@link SchemaInfo}. - */ - def schemaInfo(schema: Schema): SchemaInfo = { - val info = schema.info - SchemaInfo( - Some(info.schemaName), - Some(info.schemaEngine), - info.isWellFormed, - schema.shapes, - schema.pm.pm.toList.map { case (prefix, iri) => (prefix.str, iri.str) }, - info.errors - ) - } - - /** @param schema Input schema - * @return JSON representation of the schema as a Cytoscape graph to be drawn on clients (or an error message) - */ - // TODO: return another status code on failure, so that clients can handle it - def schemaCytoscape(schema: Schema): Json = { - val eitherJson = for { - pair <- Schema2UML.schema2UML(schema) - } yield { - val (uml, _) = pair - uml.toJson - } - eitherJson.fold( - e => - Json.fromFields( - List( - ("error", Json.fromString(s"Error converting to schema 2 JSON: $e")) - ) - ), - identity - ) - } - - /** @param schema Input schema - * @return JSON representation of the schema as a Graphviz graph to be drawn on clients (or an error message) - */ - // TODO: return another status code on failure, so that clients can handle it - def schemaVisualize(schema: Schema): IO[Json] = for { - pair <- schema2SVG(schema) - } yield { - val (svg, plantuml) = pair - val info = schema.info - val fields: List[(String, Json)] = - List( - ("schemaType", Json.fromString(info.schemaName)), - ("schemaEngine", Json.fromString(info.schemaEngine)), - ("svg", Json.fromString(svg)), - ("plantUml", Json.fromString(plantuml)) - ) - Json.fromFields(fields) - } - - def schema2SVG(schema: Schema): IO[(String, String)] = { - val eitherUML = Schema2UML.schema2UML(schema) - eitherUML.fold( - e => { - val errMsg = s"Error in SVG conversion: $e" - logger.error(errMsg) - IO.raiseError(new RuntimeException(errMsg)) - // IO.pure((s"SVG conversion: $e", s"Error converting UML: $e")) - }, - pair => { - val (uml, _) = pair - logger.debug(s"UML converted: $uml") - (for { - str <- uml.toSVG(umlOptions) - } yield { - (str, uml.toPlantUML(umlOptions)) - }).handleErrorWith(e => - IO.raiseError( - new RuntimeException(s"SVG conversion error: ${e.getMessage}") - ) - ) - } - ) - } - - /** @param result Schema validation result - * @return JSON representation of the schema validation result - */ - def schemaResult2json(result: Result): IO[Json] = for { - emptyRes <- RDFAsJenaModel.empty - json <- emptyRes.use(emptyBuilder => result.toJson(emptyBuilder)) - } yield json - - /** Get base URI - * - * @return default URI obtained from current folder - */ - def getBase: Option[String] = ApiDefaults.relativeBase.map(_.str) - - /** For a given data (raw text) and schema, attempt to validate it with WESO libraries - * - * @param data Input RDF data - * @param optDataFormat RDF data format (optional) - * @param optSchema Input validation schema (optional) - * @param optSchemaFormat Validation schema format (optional) - * @param optSchemaEngine Validation schema engine (optional) - * @param tp Trigger mode - * @param optInference Validation inference (optional) - * @param relativeBase Relative base (optional) - * @param builder RDF builder - * @return - */ - // TODO: redo -// private[api] def schemaValidateStr( -// data: String, -// optDataFormat: Option[DataFormat], -// optSchema: Option[String], -// optSchemaFormat: Option[SchemaFormat], -// optSchemaEngine: Option[String], -// tp: TriggerMode, -// optInference: Option[String], -// relativeBase: Option[IRI], -// builder: RDFBuilder -// ): IO[(Result, Option[ValidationTrigger], Long)] = { -// val dp = DataSingle.empty.copy( -// data = Some(data), -// optDataFormat = optDataFormat, -// inference = optInference -// ) -// val sp = Schema.empty.copy( -// schema = optSchema, -// schemaFormat = optSchemaFormat.getOrElse(SchemaFormat.defaultFormat), -// schemaEngine = optSchemaEngine -// ) -// -// val result: IO[(Result, Option[ValidationTrigger], Long)] = for { -// pair <- dp.getData(relativeBase) -// (_, resourceRdf) = pair -// result <- resourceRdf.use(rdf => -// for { -// pairSchema <- sp.getSchema(Some(rdf)) -// (_, eitherSchema) = pairSchema -// schema <- IO.fromEither( -// eitherSchema.leftMap(s => -// new RuntimeException(s"Error obtaining schema: $s") -// ) -// ) -// res <- schemaValidate(rdf, schema, tp, relativeBase, builder) -// } yield res -// ) -// } yield result -// -// result.attempt.flatMap(_.fold(e => schemaErr(e.getMessage), IO.pure)) -// } - - /** For a given data and schema, attempt to validate it with WESO libraries - * - * @param rdf Input RDF data - * @param schema Input schema - * @param triggerMode Trigger mode - * @param relativeBase Relative base (optional) - * @param builder RDF builder - * @return - */ - def schemaValidate( - rdf: RDFReasoner, - schema: Schema, - triggerMode: TriggerMode, - relativeBase: Option[IRI], - builder: RDFBuilder - ): IO[(Result, Option[ValidationTrigger], Long)] = { - - val base = relativeBase.map(_.str) // Some(FileUtils.currentFolderURL) - val triggerModeStr = triggerMode.triggerModeStr - for { - prefixMap <- rdf.getPrefixMap - shapeMapRaw = triggerMode.shapeMap.rawShapeMap.getOrElse("") - pair <- - ValidationTrigger.findTrigger( - triggerModeStr, - shapeMapRaw, - base, - None, - None, - prefixMap, - schema.pm - ) match { - case Left(msg) => - schemaErr( - s"Cannot obtain trigger: $triggerModeStr\nshapeMap: $shapeMapRaw\nmsg: $msg" - ) - case Right(trigger) => - val run = for { - startTime <- IO { - System.nanoTime() - } - result <- schema.validate(rdf, trigger, builder) - endTime <- IO { - System.nanoTime() - } - time: Long = endTime - startTime - } yield (result, Some(trigger), time) - run.handleErrorWith(e => { - val msg = s"Error validating: ${e.getMessage}" - logger.error(msg) - schemaErr(s"Error validating: ${e.getMessage}") - }) - } - } yield pair - } - - /** Given an error message, return an empty schema validation result containing it - * - * @param msg error message - * @return Empty schema validation result containing the error message - */ - private def schemaErr(msg: String) = - IO((Result.errStr(s"Error: $msg"), None, NoTime)) - - /** Given an input schema, convert it to another output schema with the parameters specified. - * - * @param schema Input schema - * @param schemaStr Input schema contents - * @param schemaFormat Input schema format - * @param schemaEngine Input schema engine - * @param optTargetSchemaFormat Output schema desired format - * @param optTargetSchemaEngine Output schema desired engine - * @return Optionally, the raw output schema contents - */ - private[schema] def convertSchema( - schema: Schema, - schemaStr: Option[String], - schemaFormat: SchemaFormat, - schemaEngine: String, - optTargetSchemaFormat: Option[SchemaFormat], - optTargetSchemaEngine: Option[String] - ): IO[SchemaConversionResult] = { - val result: IO[SchemaConversionResult] = for { - pair <- doSchemaConversion( - schema, - optTargetSchemaFormat.map(_.name), - optTargetSchemaEngine - ) - sourceStr <- schemaStr match { - case None => schema.serialize(schemaFormat.name) - case Some(source) => IO(source) - } - (resultStr, resultShapeMap) = pair - } yield SchemaConversionResult.fromConversion( - sourceStr, - schemaFormat.name, - schemaEngine, - optTargetSchemaFormat.map(_.name), - optTargetSchemaEngine, - resultStr, - resultShapeMap - ) - - for { - either <- result.attempt - } yield either.fold( - err => SchemaConversionResult.fromMsg(s"Error converting schema: $err"), - identity - ) - } - - private def doSchemaConversion( - schema: Schema, - targetSchemaFormat: Option[String], - optTargetSchemaEngine: Option[String] - ): IO[(String, ShapeMap)] = { - logger.debug( - s"Schema conversion, name: ${schema.name}, targetSchema: $targetSchemaFormat" - ) - val default = for { - str <- schema.convert(targetSchemaFormat, optTargetSchemaEngine, None) - } yield (str, ShapeMap.empty) - schema match { - case shacl: ShaclexSchema => - optTargetSchemaEngine.map(_.toUpperCase()) match { - case Some("SHEX") => - logger.debug("Schema conversion: SHACLEX -> SHEX") - Shacl2ShEx - .shacl2ShEx(shacl.schema) - .fold( - e => - IO.raiseError( - new RuntimeException( - s"Error converting SHACL -> ShEx: $e" - ) - ), - pair => { - val (schema, shapeMap) = pair - logger.debug(s"shapeMap: $shapeMap") - for { - emptyBuilder <- RDFAsJenaModel.empty - str <- emptyBuilder.use(builder => - es.weso.shex.Schema.serialize( - schema, - targetSchemaFormat.getOrElse("SHEXC"), - None, - builder - ) - ) - } yield (str, shapeMap) - } - ) - case _ => default - } - case _ => default - } - } - - /** Apply inference - * - * @param rdf Data over which the inference should be applied - * @param inferenceName Name of the inference to be applied - * @return The RDF data after applying the inference - */ - private[schema] def applyInference( - rdf: RDFReasoner, - inferenceName: Option[String] - ): IO[RDFReasoner] = inferenceName match { - case None => IO.pure(rdf) - case Some(name) => - InferenceEngine.fromString(name) match { - case Left(str) => - IO.raiseError( - new RuntimeException( - s"Error parsing inference engine: $name: $str" - ) - ) - case Right(engine) => rdf.applyInference(engine) - } - } - -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/TriggerMode.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/TriggerMode.scala deleted file mode 100644 index dedf1cb3..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/TriggerMode.scala +++ /dev/null @@ -1,85 +0,0 @@ -package es.weso.rdfshape.server.api.routes.schema.logic - -import cats.effect.IO -import com.typesafe.scalalogging.LazyLogging -import es.weso.rdfshape.server.api.definitions.ApiDefaults -import es.weso.rdfshape.server.api.format.dataFormats.ShapeMapFormat -import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMap -import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ -import es.weso.rdfshape.server.api.utils.parameters.PartsMap -import es.weso.shapemaps.{ShapeMap => ShapeMapW} - -/** Data class representing a TriggerMode and its current source. - * - * @param triggerModeStr Trigger mode name - * @param shapeMap Inner shapemap associated to the TriggerMode - */ -sealed case class TriggerMode private ( - triggerModeStr: String, - shapeMap: ShapeMap -) extends LazyLogging { - - /** Inner shapemap structure of the shapemap contained in this instance - * - * @return A ShapeMap instance used by WESO libraries in validation - */ - lazy val innerShapeMap: Either[String, ShapeMapW] = shapeMap.innerShapeMap -} - -private[api] object TriggerMode extends LazyLogging { - - /** Given a request's parameters, try to extract a TriggerMode instance from them - * - * @param partsMap Request's parameters - * @return Either the trigger mode or an error message - */ - def mkTriggerMode( - partsMap: PartsMap - ): IO[Either[String, TriggerMode]] = { - for { - // Get data sent in que query - triggerMode <- partsMap.optPartValue(TriggerModeParameter.name) - paramShapemap <- partsMap.optPartValue(ShapeMapParameter.name) - - shapeMapFormat <- ShapeMapFormat.fromRequestParams( - ShapeMapFormatParameter.name, - partsMap - ) - activeShapeMapTab <- partsMap.optPartValue( - ShapemapSourceParameter.name - ) - - // Get companion shapemap - maybeShapeMap <- ShapeMap.mkShapeMap( - paramShapemap, - shapeMapFormat, - None, - activeShapeMapTab - ) - - } yield { - maybeShapeMap.flatMap(sm => mkTriggerMode(triggerMode, sm)) - } - - } - - /** Create a TriggerMode instance, given its mode and shapemap - * - * @param triggerMode Optionally, the trigger mode name - * @param shapeMap Optionally, the inner shapemap associated to the TriggerMode - * @return A new TriggerMode based on the given parameters - */ - private def mkTriggerMode( - triggerMode: Option[String], - shapeMap: ShapeMap - ): Either[String, TriggerMode] = { - Right( - TriggerMode( - triggerModeStr = triggerMode.getOrElse(ApiDefaults.defaultTriggerMode), - shapeMap = shapeMap - ) - ) - - } - -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/aux/SchemaAdapter.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/aux/SchemaAdapter.scala new file mode 100644 index 00000000..28d389d1 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/aux/SchemaAdapter.scala @@ -0,0 +1,41 @@ +package es.weso.rdfshape.server.api.routes.schema.logic.aux + +import es.weso.rdfshape.server.api.definitions.ApiDefaults +import es.weso.rdfshape.server.api.routes.schema.logic.types.Schema +import es.weso.schema.{Schemas, Schema => SchemaW} +import io.circe.{Decoder, Encoder, HCursor, Json} + +/** Adapter, codecs and utils between the server's Schema class ([[Schema]]) + * and shaclex Schema engine types ([[SchemaW]]) + */ +private[schema] object SchemaAdapter { + + /** For a given schema engine name, try to map it to the schema it represents + * + * @param engineName Name (String) of the given schema engine + * @return The schema engine corresponding to the given name, if available + */ + def schemaEngineFromString(engineName: String): Option[SchemaW] = { + Schemas.availableSchemas + .find(schema => schema.name.toLowerCase == engineName.toLowerCase()) + } + + /** Simple encoder for [[SchemaW]] instances, simplifying them to their name + */ + implicit val encodeEngine: Encoder[SchemaW] = (schemaEngine: SchemaW) => + Json.fromString(schemaEngine.name) + + /** Auxiliary decoder for data inference + */ + implicit val decodeEngine: Decoder[SchemaW] = + (cursor: HCursor) => + for { + engineName <- cursor.value.as[String] + + engine = Schemas.availableSchemas + .find( + _.name.toLowerCase == engineName.toLowerCase + ) + .getOrElse(ApiDefaults.defaultSchemaEngine) + } yield engine +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/aux/SchemaOperations.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/aux/SchemaOperations.scala new file mode 100644 index 00000000..6a2eae66 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/aux/SchemaOperations.scala @@ -0,0 +1,102 @@ +package es.weso.rdfshape.server.api.routes.schema.logic.aux + +import cats.effect.IO +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf.{InferenceEngine, RDFReasoner} +import es.weso.rdfshape.server.api.definitions.UmlDefinitions.umlOptions +import es.weso.rdfshape.server.api.routes.schema.service.SchemaService +import es.weso.schema.Schema +import es.weso.uml.Schema2UML +import io.circe.Json + +/** Static utilities used by [[SchemaService]] + * to operate on schemas + */ +private[api] object SchemaOperations extends LazyLogging { + + /** Long value used as a "no time" value for errored validations + */ + private val NoTime = 0L + + /** @param schema Input schema + * @return JSON representation of the schema as a Cytoscape graph to be drawn on clients (or an error message) + */ + // TODO: return another status code on failure, so that clients can handle it + def schemaCytoscape(schema: Schema): Json = { + val eitherJson = for { + pair <- Schema2UML.schema2UML(schema) + } yield { + val (uml, _) = pair + uml.toJson + } + eitherJson.fold( + e => + Json.fromFields( + List( + ("error", Json.fromString(s"Error converting to schema 2 JSON: $e")) + ) + ), + identity + ) + } + + /** @param schema Input schema + * @return JSON representation of the schema as a Graphviz graph to be drawn on clients (or an error message) + */ + // TODO: return another status code on failure, so that clients can handle it + def schemaVisualize(schema: Schema): IO[Json] = for { + pair <- schema2SVG(schema) + } yield { + val (svg, plantuml) = pair + val info = schema.info + val fields: List[(String, Json)] = + List( + ("schemaType", Json.fromString(info.schemaName)), + ("schemaEngine", Json.fromString(info.schemaEngine)), + ("svg", Json.fromString(svg)), + ("plantUml", Json.fromString(plantuml)) + ) + Json.fromFields(fields) + } + + def schema2SVG(schema: Schema): IO[(String, String)] = { + val eitherUML = Schema2UML.schema2UML(schema) + eitherUML.fold( + err => { + val errMsg = s"Error in SVG conversion: $err" + logger.error(errMsg) + IO.raiseError(new RuntimeException(errMsg)) + // IO.pure((s"SVG conversion: $e", s"Error converting UML: $e")) + }, + pair => { + val (uml, _) = pair + logger.debug(s"UML converted: $uml") + (for { + str <- uml.toSVG(umlOptions) + } yield { + (str, uml.toPlantUML(umlOptions)) + }).handleErrorWith(e => + IO.raiseError( + new RuntimeException(s"SVG conversion error: ${e.getMessage}") + ) + ) + } + ) + } + + /** Apply inference + * + * @param rdf Data over which the inference should be applied + * @param inferenceEngine Inference engine to be applied + * @return The RDF data after applying the inference + * (or the intact data if no inference was provided) + */ + private[schema] def applyInference( + rdf: RDFReasoner, + inferenceEngine: Option[InferenceEngine] + ): IO[RDFReasoner] = inferenceEngine match { + case None => IO.pure(rdf) + case Some(engine) => + rdf.applyInference(engine) + } +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaConvert.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaConvert.scala new file mode 100644 index 00000000..3fc3b9f5 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaConvert.scala @@ -0,0 +1,115 @@ +package es.weso.rdfshape.server.api.routes.schema.logic.operations + +import cats.effect.IO +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf.jena.RDFAsJenaModel +import es.weso.rdfshape.server.api.format.dataFormats.schemaFormats.SchemaFormat +import es.weso.rdfshape.server.api.routes.schema.logic.SchemaSource +import es.weso.rdfshape.server.api.routes.schema.logic.types.{ + Schema, + SchemaSimple +} +import es.weso.schema.{ShExSchema, ShaclexSchema, Schema => SchemaW} +import es.weso.shacl.converter.Shacl2ShEx +import io.circe.syntax._ +import io.circe.{Encoder, Json} + +/** Data class representing the output of a schema-conversion operation + * + * @param inputSchema Schema used as input of the operation + * @param result [[Schema]] resulting from the conversion + */ +final case class SchemaConvert private ( + override val inputSchema: Schema, + targetFormat: Option[String], + targetEngine: Option[String], + result: Schema +) extends SchemaOperation(SchemaConvert.successMessage, inputSchema) + +private[api] object SchemaConvert extends LazyLogging { + + private val successMessage = "Conversion successful" + + /** JSON encoder for [[SchemaConvert]] + */ + implicit val encodeSchemaConvertOperation: Encoder[SchemaConvert] = + (schemaConvert: SchemaConvert) => + Json.fromFields( + List( + ("message", Json.fromString(schemaConvert.successMessage)), + ("schema", schemaConvert.inputSchema.asJson), + ("result", schemaConvert.result.asJson) + ) + ) + + /** Perform the actual conversion operation between Schema formats + * + * @param schema Input conversion schema + * @param targetFormat Target format + * @param targetEngine Target engine + * @return A new [[Schema]] instance + */ + def schemaConvert( + schema: Schema, + targetFormat: SchemaFormat, + targetEngine: Option[SchemaW] + ): IO[Schema] = { + logger.info( + s"Schema conversion target format/engine: ${targetFormat.name}/${targetEngine + .map(_.name)}" + ) + + // Check the schema engine + schema.engine match { + case Some(engine) => + engine match { + // Test that we are using shaclex schemas, specifically ShEx, + // which implements conversion + case shaclex: ShaclexSchema + if shaclex.getClass == classOf[ShExSchema] => + logger.debug("Schema conversion: SHACLEX -> SHEX") + Shacl2ShEx.shacl2ShEx( + schema = shaclex.schema, + nodesPrefixMap = Option(shaclex.pm) + ) match { + case Left(err) => + val msg = s"Error converting schema: $err" + logger.error(msg) + IO.raiseError(new RuntimeException(msg)) + case Right(newSchema) => + // ShapeMap generated here as well, but unneeded + val (schema, _) = newSchema + for { + emptySchemaBuilder <- RDFAsJenaModel.empty + rawString <- emptySchemaBuilder.use(builder => + es.weso.shex.Schema.serialize( + schema, + targetFormat.name, + None, + builder + ) + ) + } yield SchemaSimple( + schemaPre = Option(rawString), + schemaFormat = targetFormat, + schemaEngine = engine, + schemaSource = SchemaSource.TEXT + ) + } + + case _ => + IO.raiseError( + new RuntimeException( + "Could not perform conversion, unknown input schema engine" + ) + ) + } + case None => + IO.raiseError( + new RuntimeException( + "Could not perform conversion, unspecified input schema engine" + ) + ) + } + } +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaInfo.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaInfo.scala new file mode 100644 index 00000000..d06d419a --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaInfo.scala @@ -0,0 +1,93 @@ +package es.weso.rdfshape.server.api.routes.schema.logic.operations + +import cats.effect.IO +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf.PrefixMap +import es.weso.rdfshape.server.api.routes.schema.logic.aux.SchemaAdapter._ +import es.weso.rdfshape.server.api.routes.schema.logic.operations.SchemaInfo.SchemaInfoResult +import es.weso.rdfshape.server.api.routes.schema.logic.types.Schema +import es.weso.rdfshape.server.utils.json.JsonUtils.prefixMap2JsonArray +import io.circe.syntax.EncoderOps +import io.circe.{Encoder, Json} + +/** Data class representing the output of a schema-information operation + * + * @param inputSchema Schema used as input of the operation + * @param result [[SchemaInfoResult]] containing the resulting schema information + */ +final case class SchemaInfo private ( + override val inputSchema: Schema, + result: SchemaInfoResult +) extends SchemaOperation(SchemaInfo.successMessage, inputSchema) + +private[api] object SchemaInfo extends LazyLogging { + + private val successMessage = "Well formed Schema" + + /** Given an input data, get information about it + * + * @param schema Input schema instance of any type + * @return A [[SchemaInfo]] instance with the information of the input schema + */ + + def schemaInfo(schema: Schema): IO[SchemaInfo] = for { + model <- schema.getSchema + modelInfo = model.map(m => (m.shapes, m.pm)) + + results <- modelInfo match { + case Right((shapes, prefixMap)) => + IO { + SchemaInfo( + inputSchema = schema, + result = SchemaInfoResult( + schema = schema, + shapes = shapes, + prefixMap = prefixMap + ) + ) + } + case Left(err) => + IO.raiseError(new RuntimeException(err)) + } + } yield results + + /** Case class representing the results to be returned when performing a schema-info operation + * + * @param schema Schema operated on + * @param shapes Shapes in the schema + * @param prefixMap Prefix map in the schema + */ + final case class SchemaInfoResult private ( + schema: Schema, + shapes: List[String], + prefixMap: PrefixMap + ) + + /** JSON encoder for [[SchemaInfoResult]] + */ + private implicit val encodeSchemaInfoResult: Encoder[SchemaInfoResult] = + (schemaInfoResult: SchemaInfoResult) => + Json.fromFields( + List( + ("format", schemaInfoResult.schema.format.asJson), + ("engine", schemaInfoResult.schema.engine.asJson), + ( + "shapes", + Json.fromValues(schemaInfoResult.shapes.map(Json.fromString)) + ), + ("prefixMap", prefixMap2JsonArray(schemaInfoResult.prefixMap)) + ) + ) + + /** JSON encoder for [[SchemaInfo]] + */ + implicit val encodeSchemaInfoOperation: Encoder[SchemaInfo] = + (schemaInfo: SchemaInfo) => + Json.fromFields( + List( + ("message", Json.fromString(schemaInfo.successMessage)), + ("schema", schemaInfo.inputSchema.asJson), + ("result", schemaInfo.result.asJson) + ) + ) +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaOperation.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaOperation.scala new file mode 100644 index 00000000..781114b7 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaOperation.scala @@ -0,0 +1,20 @@ +package es.weso.rdfshape.server.api.routes.schema.logic.operations + +import es.weso.rdfshape.server.api.routes.schema.logic.types.Schema + +/** General definition of operations that operate on [[Schema]]s + * + * @param successMessage Message attached to the result of the operation + * @param inputSchema Schema operated on + */ +private[operations] abstract class SchemaOperation( + val successMessage: String = SchemaOperation.successMessage, + val inputSchema: Schema +) + +private[operations] object SchemaOperation { + + /** Dummy success message + */ + private val successMessage = "Operation completed successfully" +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaValidate.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaValidate.scala new file mode 100644 index 00000000..f22fb60b --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaValidate.scala @@ -0,0 +1,126 @@ +package es.weso.rdfshape.server.api.routes.schema.logic.operations + +import cats.effect.IO +import cats.effect.unsafe.implicits.global +import cats.implicits.catsSyntaxTuple2Semigroupal +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf.jena.RDFAsJenaModel +import es.weso.rdfshape.server.api.routes.data.logic.types.Data +import es.weso.rdfshape.server.api.routes.schema.logic.operations.SchemaInfo.SchemaInfoResult +import es.weso.rdfshape.server.api.routes.schema.logic.trigger.{ + TriggerMode, + TriggerModeType, + TriggerShapeMap, + TriggerTargetDeclarations +} +import es.weso.rdfshape.server.api.routes.schema.logic.types.Schema +import es.weso.schema.{Result => ValidationResult} +import io.circe.syntax.EncoderOps +import io.circe.{Encoder, Json} + +/** Data class representing the output of a schema-data-validation operation + * + * @param inputSchema Schema used as input of the operation + * @param validationTrigger [[TriggerShapeMap]] triggering the operation + * @param result [[SchemaInfoResult]] containing the resulting schema information + */ +final case class SchemaValidate private ( + inputData: Data, + override val inputSchema: Schema, + validationTrigger: TriggerMode, + result: ValidationResult +) extends SchemaOperation(SchemaValidate.successMessage, inputSchema) + +private[api] object SchemaValidate extends LazyLogging { + private val successMessage = "Validation successful" + + /** For a given RDF [[Data]] plus a given [[Schema]], attempt to validate + * the data according to the schema using WESO libraries. + * + * @param data [[Data]] to be validated + * @param schema [[Schema]] used for validation + * @param trigger [[TriggerMode]] indicating the origin and the type of + * validation being performed (see [[TriggerModeType]] + * @return A [[SchemaValidate]] instance holding the validation inputs and results + */ + def schemaValidate( + data: Data, + schema: Schema, + trigger: TriggerMode + ): IO[SchemaValidate] = for { + // Get a builder and the data reasoner + builderResource <- RDFAsJenaModel.empty + rdfResource <- data.toRdf() + result <- (builderResource, rdfResource).tupled.use { case (builder, rdf) => + trigger match { + // ShEx validation with Shapemap | + // SHACL validation with target declarations + case TriggerShapeMap(_) | TriggerTargetDeclarations() => + for { + innerSchema <- schema.getSchema + result = innerSchema.flatMap(s => { + trigger.getValidationTrigger.map(vt => { + s.validate(rdf, vt, builder) + }) + }) + validation <- result match { + // Check for Lefts, errors while getting the schema, triggers... + case Left(err) => IO.raiseError(new RuntimeException(err)) + /* If no errors are found here, we can return the IO performing + * the validation */ + case Right(validationOp) => validationOp + } + } yield validation + + // Invalid trigger type, exit + case other => + IO.raiseError( + new RuntimeException( + s"Unexpected validation trigger (${other.triggerModeType})" + ) + ) + } + } + } yield SchemaValidate( + inputData = data, + inputSchema = schema, + validationTrigger = trigger, + result = result + ) + + /** Convert a [[ValidationResult]] to its JSON representation + * @note Exceptionally uses unsafeRun + */ + implicit val encodeValidationResult: Encoder[ValidationResult] = + (validationResult: ValidationResult) => + RDFAsJenaModel.empty + .flatMap(_.use(validationResult.toJson(_))) + .unsafeRunSync() + + /** Convert a [[SchemaValidate]] to its JSON representation + * + * @return JSON representation of the validation operation, + * used for API responses + * @note + */ + implicit val encodeSchemaValidation: Encoder[SchemaValidate] = + (schemaValidate: SchemaValidate) => { + + // Convert ValidationResult to JSON + val validationResultJson: IO[Json] = for { + emptyResource <- RDFAsJenaModel.empty + json <- emptyResource.use(schemaValidate.result.toJson(_)) + } yield json + + Json.fromFields( + List( + ("message", Json.fromString(schemaValidate.successMessage)), + ("data", schemaValidate.inputData.asJson), + ("schema", schemaValidate.inputSchema.asJson), + ("trigger", schemaValidate.validationTrigger.asJson), + // UnsafeRun exceptionally. + ("result", validationResultJson.unsafeRunSync()) + ) + ) + } +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerMode.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerMode.scala new file mode 100644 index 00000000..119ef73d --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerMode.scala @@ -0,0 +1,102 @@ +package es.weso.rdfshape.server.api.routes.schema.logic.trigger + +import cats.effect.IO +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdfshape.server.api.routes.schema.logic.trigger.TriggerModeType._ +import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters.TriggerModeParameter +import es.weso.rdfshape.server.api.utils.parameters.PartsMap +import es.weso.schema.ValidationTrigger +import io.circe.{Decoder, Encoder, HCursor} + +/** Common trait to all schemas, whichever its nature + */ +trait TriggerMode { + + /** Corresponding type of this adapter inside [[ValidationTrigger]] + */ + val triggerModeType: TriggerModeType + + /** Get the inner [[ValidationTrigger]], which is used internally for schema validations + * + * @return Either the inner [[ValidationTrigger]] logical model as used by WESO libraries, + * or an error extracting the model + */ + def getValidationTrigger: Either[String, ValidationTrigger] +} + +object TriggerMode extends TriggerModeCompanion[TriggerMode] { + + /** Dummy implementation meant to be overridden. + * If called on a general [[TriggerMode]] instance, pattern match among the available types to + * use the correct implementation + */ + override implicit val encodeTriggerMode: Encoder[TriggerMode] = { + case tsm: TriggerShapeMap => TriggerShapeMap.encodeTriggerMode(tsm) + case ttd: TriggerTargetDeclarations => + TriggerTargetDeclarations.encodeTriggerMode(ttd) + } + + /** Dummy implementation meant to be overridden + * If called on a general [[TriggerMode]] instance, pattern match among the available types to + * use the correct implementation + */ + override implicit val decodeTriggerMode: Decoder[TriggerMode] = + (cursor: HCursor) => { + this.getClass match { + case tsm if tsm == classOf[TriggerShapeMap] => + TriggerShapeMap.decodeTriggerMode(cursor) + case ttd if ttd == classOf[TriggerTargetDeclarations] => + TriggerTargetDeclarations.decodeTriggerMode(cursor) + } + } + + /** General implementation delegating on subclasses + */ + override def mkTriggerMode( + partsMap: PartsMap + ): IO[Either[String, TriggerMode]] = + for { + /* 1. Make some checks on the parameters to distinguish between + * TriggerMode types */ + triggerModeType <- partsMap.optPartValue(TriggerModeParameter.name) + // 2. Delegate on the correct sub-class for creating the Schema + maybeTriggerMode <- triggerModeType match { + // A triggerMode was sent, pattern match to all possibilities + case Some(triggerModeStr) => + triggerModeStr match { + // ShapeMap: ShEx validation + case SHAPEMAP => TriggerShapeMap.mkTriggerMode(partsMap) + // TargetDecls: SHACL validation + case TARGET_DECLARATIONS => + TriggerTargetDeclarations.mkTriggerMode(partsMap) + // Invalid value received for trigger mode + case _ => IO.pure(Left("Invalid value received for trigger mode")) + } + // No trigger mode was sent, error + case None => IO.pure(Left("Could not find a trigger mode")) + } + } yield maybeTriggerMode +} + +/** Static utilities to be used with [[TriggerMode]] representations + * + * @tparam T Specific [[TriggerMode]] representation to be handled + */ +private[schema] trait TriggerModeCompanion[T <: TriggerMode] + extends LazyLogging { + + /** Encoder used to transform [[TriggerMode]] instances to JSON values + */ + implicit val encodeTriggerMode: Encoder[T] + + /** Decoder used to extract [[TriggerMode]] instances from JSON values + */ + implicit val decodeTriggerMode: Decoder[T] + + /** Given a request's parameters, try to extract an instance of [[TriggerMode]] (type [[T]]) from them + * + * @param partsMap Request's parameters + * @return Either the [[TriggerMode]] instance or an error message + */ + def mkTriggerMode(partsMap: PartsMap): IO[Either[String, T]] +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerModeType.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerModeType.scala new file mode 100644 index 00000000..5265a637 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerModeType.scala @@ -0,0 +1,13 @@ +package es.weso.rdfshape.server.api.routes.schema.logic.trigger + +/** Enumeration of the different possible Validation Triggers sent by the client. + * The trigger sent indicates the API how to proceed with validations + */ +private[schema] object TriggerModeType extends Enumeration { + type TriggerModeType = String + + val SHAPEMAP = "shapeMap" + val TARGET_DECLARATIONS = "targetDecls" + + val defaultSchemaSource: TriggerModeType = SHAPEMAP +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerShapeMap.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerShapeMap.scala new file mode 100644 index 00000000..e3abfdc2 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerShapeMap.scala @@ -0,0 +1,72 @@ +package es.weso.rdfshape.server.api.routes.schema.logic.trigger + +import cats.effect.IO +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdfshape.server.api.routes.schema.logic.trigger.TriggerModeType.TriggerModeType +import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMap +import es.weso.rdfshape.server.api.utils.parameters.PartsMap +import es.weso.schema.{ShapeMapTrigger, ValidationTrigger} +import es.weso.shapemaps.{ShapeMap => ShapeMapW} +import io.circe.syntax.EncoderOps +import io.circe.{Decoder, Encoder, HCursor, Json} + +/** Data class representing a validation trigger enabled by a shapemap, + * for ShEx validations. + * + * @param shapeMap Inner shapemap associated to the [[TriggerShapeMap()]] + */ +sealed case class TriggerShapeMap private ( + shapeMap: ShapeMap +) extends TriggerMode + with LazyLogging { + + /** Inner shapemap structure of the shapemap contained in this instance + * + * @return A ShapeMap instance used by WESO libraries in validation + */ + lazy val innerShapeMap: Either[String, ShapeMapW] = shapeMap.innerShapeMap + + override val triggerModeType: TriggerModeType = TriggerModeType.SHAPEMAP + + override def getValidationTrigger: Either[String, ValidationTrigger] = + innerShapeMap.map(ShapeMapTrigger(_)) +} + +private[api] object TriggerShapeMap + extends TriggerModeCompanion[TriggerShapeMap] + with LazyLogging { + + /** Given a request's parameters, try to extract a TriggerMode instance from them + * + * @param partsMap Request's parameters + * @return Either the trigger mode or an error message + */ + def mkTriggerMode( + partsMap: PartsMap + ): IO[Either[String, TriggerShapeMap]] = { + for { + // Get companion shapemap from params + maybeShapeMap <- ShapeMap.mkShapeMap(partsMap) + + // Create TriggerMode instance + maybeTriggerMode = maybeShapeMap.map(shapeMap => + TriggerShapeMap(shapeMap) + ) + + } yield maybeTriggerMode + } + + override implicit val encodeTriggerMode: Encoder[TriggerShapeMap] = + (tsm: TriggerShapeMap) => + Json.obj( + ("type", tsm.triggerModeType.asJson), + ("shapeMap", tsm.shapeMap.asJson) + ) + + override implicit val decodeTriggerMode: Decoder[TriggerShapeMap] = + (cursor: HCursor) => + for { + shapeMap <- cursor.downField("shapeMap").as[ShapeMap] + decoded = TriggerShapeMap(shapeMap) + } yield decoded +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerTargetDeclarations.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerTargetDeclarations.scala new file mode 100644 index 00000000..6953ae60 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerTargetDeclarations.scala @@ -0,0 +1,47 @@ +package es.weso.rdfshape.server.api.routes.schema.logic.trigger + +import cats.effect.IO +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdfshape.server.api.routes.schema.logic.trigger.TriggerModeType.TriggerModeType +import es.weso.rdfshape.server.api.utils.parameters.PartsMap +import es.weso.schema.{TargetDeclarations, ValidationTrigger} +import io.circe.syntax.EncoderOps +import io.circe.{Decoder, Encoder, HCursor, Json} + +/** Data class representing a validation trigger enabled by target declarations, + * for SHACL validations. + */ +sealed case class TriggerTargetDeclarations private () + extends TriggerMode + with LazyLogging { + + override val triggerModeType: TriggerModeType = + TriggerModeType.TARGET_DECLARATIONS + + override def getValidationTrigger: Either[String, ValidationTrigger] = + Right(TargetDeclarations) +} + +private[api] object TriggerTargetDeclarations + extends TriggerModeCompanion[TriggerTargetDeclarations] + with LazyLogging { + + /** Given a request's parameters, try to extract a TriggerMode instance from them + * + * @param partsMap Request's parameters + * @return Either the trigger mode or an error message + */ + def mkTriggerMode( + partsMap: PartsMap + ): IO[Either[String, TriggerTargetDeclarations]] = + IO.pure(Right(TriggerTargetDeclarations())) + + override implicit val encodeTriggerMode: Encoder[TriggerTargetDeclarations] = + (tsm: TriggerTargetDeclarations) => + Json.obj( + ("type", tsm.triggerModeType.asJson) + ) + + override implicit val decodeTriggerMode: Decoder[TriggerTargetDeclarations] = + (_: HCursor) => Decoder.resultInstance.pure(TriggerTargetDeclarations()) +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/types/Schema.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/types/Schema.scala new file mode 100644 index 00000000..bfedd20a --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/types/Schema.scala @@ -0,0 +1,111 @@ +package es.weso.rdfshape.server.api.routes.schema.logic.types + +import cats.effect.IO +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf.nodes.IRI +import es.weso.rdfshape.server.api.definitions.ApiDefaults +import es.weso.rdfshape.server.api.format.dataFormats.schemaFormats.SchemaFormat +import es.weso.rdfshape.server.api.routes.schema.logic.SchemaSource.SchemaSource +import es.weso.rdfshape.server.api.utils.parameters.PartsMap +import es.weso.schema.{Schema => SchemaW} +import io.circe.{Decoder, Encoder, HCursor} + +/** Common trait to all schemas, whichever its nature + */ +trait Schema { + + /** Either the raw schema contents represented as a String, + * or the error occurred when trying to parse the schema + */ + lazy val rawSchema: Either[String, String] = Left("") + + /** Default URI obtained from current folder + */ + lazy val base: Option[IRI] = ApiDefaults.relativeBase + + /** Source where the schema comes from + */ + val schemaSource: SchemaSource + + /** Format of the schema + */ + val format: Option[SchemaFormat] = None + + /** Engine used for operating the schema + */ + val engine: Option[SchemaW] = None + + /** Get the inner schema entity of type [[SchemaW]], which is used internally for schema operations + * + * @return Either the inner Schema logical model as used by WESO libraries, + * or an error extracting the model + */ + def getSchema: IO[Either[String, SchemaW]] +} + +object Schema extends SchemaCompanion[Schema] { + + /** Dummy implementation meant to be overridden + * + * @note Resort by default to [[SchemaSimple]]'s empty representation + */ + override val emptySchema: Schema = SchemaSimple.emptySchema + + /** Dummy implementation meant to be overridden. + * If called on a general [[Schema]] instance, pattern match among the available data types to + * use the correct implementation + */ + override implicit val encodeSchema: Encoder[Schema] = { + case ss: SchemaSimple => SchemaSimple.encodeSchema(ss) + } + + /** Dummy implementation meant to be overridden + * If called on a general [[Schema]] instance, pattern match among the available data types to + * use the correct implementation + * + * @note Defaults to [[SchemaSimple]]'s implementation of decoding data + */ + override implicit val decodeSchema: Decoder[Schema] = (cursor: HCursor) => { + this.getClass match { + case ss if ss == classOf[SchemaSimple] => + SchemaSimple.decodeSchema(cursor) + } + } + + /** Build a [[Schema]] from request parameters + * + * @param partsMap Request parameters + * @note General implementation delegating on subclasses + */ + override def mkSchema(partsMap: PartsMap): IO[Either[String, Schema]] = for { + // 1. Make some checks on the parameters to distinguish between Schema types + // 2. Delegate on the correct sub-class for creating the Schema + maybeSchema <- SchemaSimple.mkSchema(partsMap) + } yield maybeSchema +} + +/** Static utilities to be used with [[Schema]] representations + * + * @tparam S Specific [[Schema]] representation to be handled + */ +private[schema] trait SchemaCompanion[S <: Schema] extends LazyLogging { + + /** Empty instance of the [[Schema]] representation in use + */ + val emptySchema: S + + /** Encoder used to transform [[Schema]] instances to JSON values + */ + implicit val encodeSchema: Encoder[S] + + /** Decoder used to extract [[Schema]] instances from JSON values + */ + implicit val decodeSchema: Decoder[S] + + /** Given a request's parameters, try to extract an instance of [[Schema]] (type [[S]]) from them + * + * @param partsMap Request's parameters + * @return Either the [[Schema]] instance or an error message + */ + def mkSchema(partsMap: PartsMap): IO[Either[String, S]] +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/types/SchemaSimple.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/types/SchemaSimple.scala new file mode 100644 index 00000000..e64ddf14 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/types/SchemaSimple.scala @@ -0,0 +1,177 @@ +package es.weso.rdfshape.server.api.routes.schema.logic.types + +import cats.effect._ +import cats.implicits.toBifunctorOps +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf.PrefixMap +import es.weso.rdfshape.server.api.definitions.ApiDefaults +import es.weso.rdfshape.server.api.format.dataFormats.schemaFormats.{ + SchemaFormat, + ShaclFormat +} +import es.weso.rdfshape.server.api.routes.schema.logic.SchemaSource +import es.weso.rdfshape.server.api.routes.schema.logic.SchemaSource.SchemaSource +import es.weso.rdfshape.server.api.routes.schema.logic.aux.SchemaAdapter +import es.weso.rdfshape.server.api.routes.schema.logic.aux.SchemaAdapter._ +import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ +import es.weso.rdfshape.server.api.utils.parameters.PartsMap +import es.weso.rdfshape.server.utils.networking.NetworkingUtils.getUrlContents +import es.weso.schema.{Schemas, Schema => SchemaW} +import io.circe.syntax.EncoderOps +import io.circe.{Decoder, Encoder, HCursor, Json} + +/** Case class representing a single Schema instance with its inner content, format, engine and source + * + * @param schemaPre Schema data, as it is received before being processed depending on the [[schemaSource]] + * @param schemaFormat Schema format + * @param schemaEngine Schema engine of type [[SchemaW]], used to know how to internally process schema operations + * @param schemaSource Origin source, used to know how to process the raw data + */ +sealed case class SchemaSimple( + private val schemaPre: Option[String], + private val schemaFormat: SchemaFormat, + private val schemaEngine: SchemaW, + override val schemaSource: SchemaSource +) extends Schema + with LazyLogging { + + /** Given the user input ([[schemaPre]]) for the schema and its source, fetch the Schema contents using the input in the way the source needs it + * (e.g.: for URLs, fetch the input with a web request; for files, decode the input; for raw data, do nothing) + * + * @return Either an error creating the raw data or a String containing the final schema text + */ + override lazy val rawSchema: Either[String, String] = schemaPre match { + case None => Left("Could not build the Schema from empty data") + case Some(userSchema) => + schemaSource match { + case SchemaSource.TEXT | SchemaSource.FILE => Right(userSchema) + case SchemaSource.URL => + getUrlContents(userSchema) + + case other => + val msg = s"Unknown schema source: $other" + logger.warn(msg) + Left(msg) + + } + } + // Override and make publicly available the trait properties + override val format: Option[SchemaFormat] = Option(schemaFormat) + override val engine: Option[SchemaW] = Option(schemaEngine) + + def getPrefixMap: IO[Option[PrefixMap]] = for { + model <- getSchema + } yield model.map(_.pm).toOption + + override def getSchema: IO[Either[String, SchemaW]] = { + rawSchema match { + case Right(schemaStr) => + for { + schemaW <- Schemas + .fromString( + str = schemaStr, + format = schemaFormat.name, + schemaEngine.name, + base.map(_.str) + ) + .attempt // Catch unexpected exceptions early + + } yield schemaW.leftMap(err => + Option(err.getMessage).getOrElse( + "Unknown error processing the schema" + ) + ) + // No schema data, propagate the error + case Left(err) => IO.pure(Left(err)) + } + } + +} + +private[api] object SchemaSimple + extends SchemaCompanion[SchemaSimple] + with LazyLogging { + + /** Empty schema representation, with no inner data and all defaults or None + */ + override lazy val emptySchema: SchemaSimple = + SchemaSimple( + schemaPre = None, + schemaFormat = ApiDefaults.defaultSchemaFormat, + schemaEngine = ApiDefaults.defaultSchemaEngine, + schemaSource = ApiDefaults.defaultSchemaSource + ) + + override def mkSchema(partsMap: PartsMap): IO[Either[String, SchemaSimple]] = + for { + // Schema param as sent by client + paramSchema <- partsMap.optPartValue(SchemaParameter.name) + paramFormat <- SchemaFormat.fromRequestParams( + SchemaFormatParameter.name, + partsMap + ) + paramEngine <- partsMap.optPartValue(SchemaEngineParameter.name) + paramSource <- partsMap.optPartValue(SchemaSourceParameter.name) + _ = Schemas.availableSchemaNames + // Confirm format and engine or resort to defaults + schemaFormat = paramFormat.getOrElse(ApiDefaults.defaultSchemaFormat) + schemaEngine = paramEngine + .flatMap(SchemaAdapter.schemaEngineFromString) + .getOrElse(ApiDefaults.defaultSchemaEngine) + + // Check the client's selected source + schemaSource = paramSource.getOrElse(SchemaSource.defaultSchemaSource) + _ = logger.debug( + s"Schema received ($schemaFormat) - Source: $schemaSource" + ) + + // Base for the result + schema = SchemaSimple( + schemaPre = paramSchema, + schemaFormat = schemaFormat, + schemaEngine = schemaEngine, + schemaSource = schemaSource + ) + + } yield schema.rawSchema.fold( + err => Left(err), + _ => Right(schema) + ) + + override implicit val encodeSchema: Encoder[SchemaSimple] = + (schema: SchemaSimple) => { + Json.obj( + ("schema", schema.rawSchema.toOption.asJson), + ("format", schema.schemaFormat.asJson), + ("engine", schema.schemaEngine.asJson), + ("source", schema.schemaSource.asJson) + ) + } + override implicit val decodeSchema: Decoder[SchemaSimple] = + (cursor: HCursor) => + for { + schema <- cursor.downField("schema").as[Option[String]] + + schemaFormat <- cursor + .downField("schemaFormat") + .as[ShaclFormat] + + schemaEngine <- + cursor + .downField("schemaEngine") + .as[SchemaW] + + schemaSource <- cursor + .downField("schemaSource") + .as[SchemaSource] + .orElse(Right(SchemaSource.defaultSchemaSource)) + + decoded = SchemaSimple.emptySchema.copy( + schemaPre = schema, + schemaFormat, + schemaEngine, + schemaSource + ) + + } yield decoded +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala index 8d196fa1..7f7071d5 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala @@ -2,19 +2,21 @@ package es.weso.rdfshape.server.api.routes.schema.service import cats.effect._ import com.typesafe.scalalogging.LazyLogging -import es.weso.rdfshape.server.api.definitions.ApiDefaults -import es.weso.rdfshape.server.api.definitions.ApiDefaults.defaultSchemaEngineName import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api -import es.weso.rdfshape.server.api.format.dataFormats.SchemaFormat +import es.weso.rdfshape.server.api.format.dataFormats.schemaFormats.{ + ShExFormat, + ShaclFormat +} import es.weso.rdfshape.server.api.routes.ApiService -import es.weso.rdfshape.server.api.routes.schema.logic.Schema -import es.weso.rdfshape.server.api.routes.schema.logic.SchemaOperations._ -import es.weso.rdfshape.server.api.utils.OptEitherF._ +import es.weso.rdfshape.server.api.routes.schema.logic.operations.SchemaInfo +import es.weso.rdfshape.server.api.routes.schema.logic.trigger.TriggerModeType +import es.weso.rdfshape.server.api.routes.schema.logic.types.Schema import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.utils.json.JsonUtils.errorResponseJson -import es.weso.schema._ +import es.weso.schema.{Schemas, ShExSchema} import io.circe.Json +import io.circe.syntax.EncoderOps import org.http4s._ import org.http4s.circe._ import org.http4s.client.Client @@ -39,8 +41,8 @@ class SchemaService(client: Client[IO]) /** Returns a JSON array with the accepted schema engines for ShEx */ case GET -> Root / `api` / `verb` / "engines" => - val engines = Schemas.availableSchemaNames - val json = Json.fromValues(engines.map(str => Json.fromString(str))) + val engineNames = Schemas.availableSchemaNames + val json = Json.fromValues(engineNames.map(Json.fromString)) Ok(json) /** Returns a JSON array with the accepted schema engines for SHACL @@ -66,269 +68,260 @@ class SchemaService(client: Client[IO]) */ case GET -> Root / `api` / `verb` / "formats" :? SchemaEngineParameter(optSchemaEngine) => - val schemaEngine = optSchemaEngine.getOrElse(Schemas.defaultSchemaName) - val res = Schemas - .lookupSchema(schemaEngine) - .attempt - .map( - _.fold( - _ => - errorResponseJson( - s"Schema engine: $schemaEngine not found. Available engines = ${Schemas.availableSchemaNames - .mkString(",")}", - NotFound - ), - schema => - Ok(Json.fromValues(schema.formats.toList.map(Json.fromString))) - ) + val maybeFormats = for { + schema <- Schemas.lookupSchema( + optSchemaEngine.getOrElse(Schemas.defaultSchemaName) + ) + formats = schema match { + case ShExSchema(_) => ShExFormat.availableFormats + case _ => ShaclFormat.availableFormats + } + } yield Json.fromValues( + formats.map(format => Json.fromString(format.name)) + ) + + // Handle errors + maybeFormats + .flatMap(Ok(_)) + .handleErrorWith(err => + errorResponseJson(err.getMessage, InternalServerError) ) - res.flatten - /** Returns a JSON array with the accepted triggerModes + /** Returns a JSON array with the accepted Trigger Modes */ case GET -> Root / `api` / `verb` / "triggerModes" => - val triggerModes = ValidationTrigger.triggerValues.map(_._1) - val json = Json.fromValues(triggerModes.map(Json.fromString)) + val json = Json.fromValues( + List(TriggerModeType.SHAPEMAP, TriggerModeType.TARGET_DECLARATIONS).map( + Json.fromString + ) + ) Ok(json) /** Obtain information about an schema. * Receives a JSON object with the input schema information: - * - schema [String]: Raw schema data - * - schemaUrl [String]: Url containing the schema - * - schemaFile [File Object]: File containing schema + * - schema [String]: Schema data (raw, URL containing the schema or File with the schema) + * - schemaSource [String]: Identifies the source of the schema (raw, URL, file...) * - schemaFormat [String]: Format of the schema - * - schemaEngine [String]: Engine used to process the schema - * - activeSchemaTab [String]: Identifies the source of the schema (raw, URL, file...) - * Returns a JSON object with the schema information: - * - schemaType [String]: Type of the schema - * - schemaEngine [String]: Engine of the schema - * - wellFormed [Boolean]: Whether if the schema is well formed or not - * - shapes [Array]: Array of the shapes in the schema - * - shapesPrefixMap [Array]: Array of the prefixes in the schema - * - prefix [String]: Prefix key - * - uri [String]: Prefix URI - * - errors [Array]: Array of errors in the schema + * - schemaEngine [String]: Engine used to process the schema (ignored for ShEx) + * Returns a JSON object with the operation results. See [[SchemaInfo.encodeSchemaInfoOperation]]. */ // TODO: show errors in a friendlier way in the client's UI case req @ POST -> Root / `api` / `verb` / "info" => req.decode[Multipart[IO]] { m => { val partsMap = PartsMap(m.parts) - logger.info(s"POST info partsMap. $partsMap") - val r: IO[Json] = for { - schemaPair <- Schema.mkSchema(partsMap, None) - (schema, sp) = schemaPair - } yield { - schemaInfo(schema).toJson - } + for { - e <- r.attempt - v <- e.fold( - t => { - errorResponseJson( - t.getMessage, - BadRequest - ) - }, - Ok(_) + // Get the schema from the partsMap + eitherSchema <- Schema.mkSchema(partsMap) + response <- eitherSchema.fold( + // If there was an error parsing the schema, return it + err => errorResponseJson(err, InternalServerError), + // Else, try and compute the schema info + schema => + SchemaInfo + .schemaInfo(schema) + .flatMap(info => Ok(info.asJson)) + .handleErrorWith(err => + errorResponseJson(err.getMessage, InternalServerError) + ) ) - } yield v + + } yield response } - } - /** Convert a given schema to another accepted format. - * Receives a JSON object with the input schema information: - * - schema [String]: Raw schema data - * - schemaUrl [String]: Url containing the schema - * - schemaFile [File Object]: File containing schema - * - schemaFormat [String]: Format of the schema - * - targetSchemaFormat [String]: Desired format after conversion of the schema - * - schemaEngine [String]: Engine used to process the schema - * - activeSchemaTab [String]: Identifies the source of the schema (raw, URL, file...) - * Returns a JSON object with the converted schema information: - * - message [String]: Informational message on success - * - schema [String]: Original input schema - * - schemaFormat [String]: Format of the original schema - * - schemaEngine [String]: Engine of the conversion - * - targetSchemaFormat [String]: Format of the output schema - * - result [String]: Output schema - * - shapeMap [String]: Output shapemap, if any - */ - case req @ POST -> Root / `api` / `verb` / "convert" => - req.decode[Multipart[IO]] { m => - { - val partsMap = PartsMap(m.parts) - logger.info(s"POST info partsMap. $partsMap") - val r: IO[Json] = for { - schemaPair <- Schema.mkSchema(partsMap, None) - (schema, sp) = schemaPair + /** Convert a given schema to another accepted format. + * Receives a JSON object with the input schema information: + * - schema [String]: Raw schema data + * - schemaUrl [String]: Url containing the schema + * - schemaFile [File Object]: File containing schema + * - schemaFormat [String]: Format of the schema + * - targetSchemaFormat [String]: Desired format after conversion of the schema + * - schemaEngine [String]: Engine used to process the schema + * - activeSchemaTab [String]: Identifies the source of the schema (raw, URL, file...) + * Returns a JSON object with the converted schema information: + * - message [String]: Informational message on success + * - schema [String]: Original input schema + * - schemaFormat [String]: Format of the original schema + * - schemaEngine [String]: Engine of the conversion + * - targetSchemaFormat [String]: Format of the output schema + * - result [String]: Output schema + * - shapeMap [String]: Output shapemap, if any + */ + // case req @ POST -> Root / `api` / `verb` / "convert" => + // req.decode[Multipart[IO]] { m => + // { + // val partsMap = PartsMap(m.parts) + // logger.info(s"POST info partsMap. $partsMap") + // val r: IO[Json] = for { + // schemaPair <- SchemaSimple.mkSchema(partsMap, None) + // (schema, sp) = schemaPair + // + // targetSchemaFormat <- optEither2f( + // sp.targetSchemaFormat, + // SchemaFormat.fromString + // ) + // converted <- convertSchema( + // schema, + // sp.schemaPre, + // sp.schemaFormat, + // sp.schemaEngine.getOrElse(defaultSchemaEngineName), + // targetSchemaFormat, + // sp.targetSchemaEngine + // ) + // } yield { + // converted.toJson + // } + // for { + // e <- r.attempt + // v <- e.fold( + // t => errorResponseJson(t.getMessage, InternalServerError), + // Ok(_) + // ) + // } yield v + // } + // } - targetSchemaFormat <- optEither2f( - sp.targetSchemaFormat, - SchemaFormat.fromString - ) - converted <- convertSchema( - schema, - sp.schema, - sp.schemaFormat, - sp.schemaEngine.getOrElse(defaultSchemaEngineName), - targetSchemaFormat, - sp.targetSchemaEngine - ) - } yield { - converted.toJson - } - for { - e <- r.attempt - v <- e.fold( - t => errorResponseJson(t.getMessage, InternalServerError), - Ok(_) - ) - } yield v - } - } + /** Convert a given schema to a UML visualization using PlantUML. + * Receives a JSON object with the input schema information: + * - schema [String]: Raw schema data + * - schemaUrl [String]: Url containing the schema + * - schemaFile [File Object]: File containing schema + * - schemaFormat [String]: Format of the schema + * - schemaEngine [String]: Engine used to process the schema + * - activeSchemaTab [String]: Identifies the source of the schema (raw, URL, file...) + * Returns a JSON object with the converted schema information: + * - schemaType [String]: Type of the schema + * - schemaEngine [String]: Engine of the schema + * - svg [String]: Array of the shapes in the schema + * - plantUml [String]: Array of the shapes in the schema + */ + // case req @ POST -> Root / `api` / `verb` / "visualize" => + // req.decode[Multipart[IO]] { m => + // { + // val partsMap = PartsMap(m.parts) + // val r: IO[Json] = for { + // schemaPair <- SchemaSimple.mkSchema(partsMap, None) + // (schema, _) = schemaPair + // v <- schemaVisualize(schema) + // } yield { + // v + // } + // for { + // e <- r.attempt + /* v <- e.fold(t => errorResponseJson(t.getMessage, BadRequest), Ok(_)) */ + // } yield v + // } + // } - /** Convert a given schema to a UML visualization using PlantUML. - * Receives a JSON object with the input schema information: - * - schema [String]: Raw schema data - * - schemaUrl [String]: Url containing the schema - * - schemaFile [File Object]: File containing schema - * - schemaFormat [String]: Format of the schema - * - schemaEngine [String]: Engine used to process the schema - * - activeSchemaTab [String]: Identifies the source of the schema (raw, URL, file...) - * Returns a JSON object with the converted schema information: - * - schemaType [String]: Type of the schema - * - schemaEngine [String]: Engine of the schema - * - svg [String]: Array of the shapes in the schema - * - plantUml [String]: Array of the shapes in the schema - */ - case req @ POST -> Root / `api` / `verb` / "visualize" => - req.decode[Multipart[IO]] { m => - { - val partsMap = PartsMap(m.parts) - val r: IO[Json] = for { - schemaPair <- Schema.mkSchema(partsMap, None) - (schema, _) = schemaPair - v <- schemaVisualize(schema) - } yield { - v - } - for { - e <- r.attempt - v <- e.fold(t => errorResponseJson(t.getMessage, BadRequest), Ok(_)) - } yield v - } - } + // TODO: test and include in the client + // case req @ POST -> Root / `api` / `verb` / "cytoscape" => + // req.decode[Multipart[IO]] { m => + // { + // val partsMap = PartsMap(m.parts) + // logger.info(s"POST info partsMap. $partsMap") + // val r: IO[Json] = for { + // schemaPair <- SchemaSimple.mkSchema(partsMap, None) + // (schema, _) = schemaPair + // } yield { + // schemaCytoscape(schema) + // } + // for { + // e <- r.attempt + /* v <- e.fold(t => errorResponseJson(t.getMessage, BadRequest), Ok(_)) */ + // } yield v + // } + // } - // TODO: test and include in the client - case req @ POST -> Root / `api` / `verb` / "cytoscape" => - req.decode[Multipart[IO]] { m => - { - val partsMap = PartsMap(m.parts) - logger.info(s"POST info partsMap. $partsMap") - val r: IO[Json] = for { - schemaPair <- Schema.mkSchema(partsMap, None) - (schema, _) = schemaPair - } yield { - schemaCytoscape(schema) - } - for { - e <- r.attempt - v <- e.fold(t => errorResponseJson(t.getMessage, BadRequest), Ok(_)) - } yield v - } + // TODO: Enhance API response + /** Validates RDF data against a given schema-shapemap. + * Receives a JSON object with the input data, schema and shapemap information: + * - data [String]: RDF data + * - dataUrl [String]: Url containing the RDF data + * - dataFile [File Object]: File containing RDF data + * - dataFormat [String]: Format of the RDF data + * - inference [String]: Inference to be applied + * - activeDataTab [String]: Identifies the source of the data (raw, URL, file...) + * - endpoint [String]: Additional endpoint to serve as a source of data + * - schema [String]: Raw schema data + * - schemaUrl [String]: Url containing the schema + * - schemaFile [File Object]: File containing the schema + * - schemaFormat [String]: Format of the schema + * - schemaEngine [String]: Engine used to process the schema + * - activeSchemaTab [String]: Identifies the source of the schema (raw, URL, file...) + * - triggerMode [String]: Validation trigger mode + * - shapeMap [String]: Raw shapemap data + * - shapeMapUrl [String]: Url containing the shapemap + * - shapeMapFile [File Object]: File containing the shapemap + * - shapeMapFormat [String]: Format of the shapemap + * - activeShapeMapTab [String]: Identifies the source of the shapemap (raw, URL, file...) + * Returns a JSON object with the converted schema information: + * - valid [Boolean]: Whether the data is at least partially valid or not + * - message [String]: Informational message + * - validationReport [String]: Additional validation information + * - schema [String]: Original input schema + * - nodesPrefixMap [Object]: Key/value structure with the data prefixes + * - shapesPrefixMap [Object]: Key/value structure with the schema prefixes + * - shapeMap [Array]: Array containing the validation results for each node. Each result has: + * - node [String]: Full name of the affected node + * - shape [String]: Full name of the affected shape + * - status [String]: Whether this node conforms this shape + * - appInfo [Object]: Additional information on why the node conforms or not + * - errors [Array]: Array of errors in the validation + */ + /* TODO: redo */ + // case req @ POST -> Root / `api` / `verb` / "validate" => + // req.decode[Multipart[IO]] { m => + // { + // val partsMap = PartsMap(m.parts) + // val r = for { + // dataPair <- DataSingle.getData(partsMap, relativeBase) + // (resourceRdf, dp) = dataPair + // res <- for { + // emptyRes <- RDFAsJenaModel.empty + // vv <- (resourceRdf, emptyRes).tupled.use { case (rdf, builder) => + // for { + // schemaPair <- Schema.mkSchema(partsMap, Some(rdf)) + // (schema, _) = schemaPair + /* maybeTriggerMode <- TriggerMode.mkTriggerMode(partsMap) */ + // newRdf <- applyInference(rdf, dp.inference) + // ret <- maybeTriggerMode match { + // case Left(err) => + // IO.raiseError( + // new RuntimeException( + // s"Could not obtain validation trigger: $err" + // ) + // ) + // case Right(triggerMode) => + // for { + // r <- io2f( + // schemaValidate( + // newRdf, + // schema, + // triggerMode, + // relativeBase, + // builder + // ) + // ) + // json <- io2f(schemaResult2json(r._1)) + // } yield json + // } + // } yield ret + // } + // } yield vv + // } yield res + // + // for { + // e <- r.attempt + // res <- e.fold( + // exc => errorResponseJson(exc.getMessage, BadRequest), + // json => Ok(json) + // ) + // } yield res + // } + // } } - - // TODO: Enhance API response - /** Validates RDF data against a given schema-shapemap. - * Receives a JSON object with the input data, schema and shapemap information: - * - data [String]: RDF data - * - dataUrl [String]: Url containing the RDF data - * - dataFile [File Object]: File containing RDF data - * - dataFormat [String]: Format of the RDF data - * - inference [String]: Inference to be applied - * - activeDataTab [String]: Identifies the source of the data (raw, URL, file...) - * - endpoint [String]: Additional endpoint to serve as a source of data - * - schema [String]: Raw schema data - * - schemaUrl [String]: Url containing the schema - * - schemaFile [File Object]: File containing the schema - * - schemaFormat [String]: Format of the schema - * - schemaEngine [String]: Engine used to process the schema - * - activeSchemaTab [String]: Identifies the source of the schema (raw, URL, file...) - * - triggerMode [String]: Validation trigger mode - * - shapeMap [String]: Raw shapemap data - * - shapeMapUrl [String]: Url containing the shapemap - * - shapeMapFile [File Object]: File containing the shapemap - * - shapeMapFormat [String]: Format of the shapemap - * - activeShapeMapTab [String]: Identifies the source of the shapemap (raw, URL, file...) - * Returns a JSON object with the converted schema information: - * - valid [Boolean]: Whether the data is at least partially valid or not - * - message [String]: Informational message - * - validationReport [String]: Additional validation information - * - schema [String]: Original input schema - * - nodesPrefixMap [Object]: Key/value structure with the data prefixes - * - shapesPrefixMap [Object]: Key/value structure with the schema prefixes - * - shapeMap [Array]: Array containing the validation results for each node. Each result has: - * - node [String]: Full name of the affected node - * - shape [String]: Full name of the affected shape - * - status [String]: Whether this node conforms this shape - * - appInfo [Object]: Additional information on why the node conforms or not - * - errors [Array]: Array of errors in the validation - */ - /* TODO: redo */ - // case req @ POST -> Root / `api` / `verb` / "validate" => - // req.decode[Multipart[IO]] { m => - // { - // val partsMap = PartsMap(m.parts) - // val r = for { - // dataPair <- DataSingle.getData(partsMap, relativeBase) - // (resourceRdf, dp) = dataPair - // res <- for { - // emptyRes <- RDFAsJenaModel.empty - /* vv <- (resourceRdf, emptyRes).tupled.use { case (rdf, builder) => */ - // for { - // schemaPair <- Schema.mkSchema(partsMap, Some(rdf)) - // (schema, _) = schemaPair - // maybeTriggerMode <- TriggerMode.mkTriggerMode(partsMap) - // newRdf <- applyInference(rdf, dp.inference) - // ret <- maybeTriggerMode match { - // case Left(err) => - // IO.raiseError( - // new RuntimeException( - // s"Could not obtain validation trigger: $err" - // ) - // ) - // case Right(triggerMode) => - // for { - // r <- io2f( - // schemaValidate( - // newRdf, - // schema, - // triggerMode, - // relativeBase, - // builder - // ) - // ) - // json <- io2f(schemaResult2json(r._1)) - // } yield json - // } - // } yield ret - // } - // } yield vv - // } yield res - // - // for { - // e <- r.attempt - // res <- e.fold( - // exc => errorResponseJson(exc.getMessage, BadRequest), - // json => Ok(json) - // ) - // } yield res - // } - // } } - private val relativeBase = ApiDefaults.relativeBase - } object SchemaService { diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala index 4a21801e..2f95a054 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala @@ -10,25 +10,22 @@ import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMapSource.{ } import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ import es.weso.rdfshape.server.api.utils.parameters.PartsMap -import es.weso.rdfshape.server.utils.error.exceptions.JsonConversionException -import es.weso.rdfshape.server.utils.json.JsonUtils.maybeField import es.weso.rdfshape.server.utils.networking.NetworkingUtils.getUrlContents import es.weso.shapemaps.{ShapeMap => ShapeMapW} -import io.circe.Json +import io.circe.syntax.EncoderOps +import io.circe.{Decoder, Encoder, HCursor, Json} /** Data class representing a ShapeMap and its current source. * * @note Invalid initial data is accepted, but may cause exceptions when operating with it (like converting to JSON). - * @param shapeMapPre Shapemap contents, as received before being processed depending on the [[shapeMapSource]] - * @param shapeMapFormat Shapemap format - * @param targetShapeMapFormat Optionally, the shapemap target format (only for conversion operations) - * @param shapeMapSource Active source, used to know which source the shapemap comes from + * @param shapeMapPre Shapemap contents, as received before being processed depending on the [[source]] + * @param format Shapemap format + * @param source Active source, used to know which source the shapemap comes from */ sealed case class ShapeMap private ( private val shapeMapPre: Option[String], - shapeMapFormat: ShapeMapFormat, - targetShapeMapFormat: Option[ShapeMapFormat], - shapeMapSource: String + format: ShapeMapFormat, + source: ShapeMapSource ) extends LazyLogging { /** Given the (user input) for the shapeMap and its source, fetch the shapeMap contents using the input in the way the source needs it @@ -36,7 +33,7 @@ sealed case class ShapeMap private ( * * @return Optionally, a String containing the final text of the shapeMap query */ - lazy val rawShapeMap: Option[String] = shapeMapSource match { + lazy val rawShapeMap: Option[String] = source match { case ShapeMapSource.TEXT | ShapeMapSource.FILE => shapeMapPre case ShapeMapSource.URL => @@ -55,53 +52,64 @@ sealed case class ShapeMap private ( rawShapeMap match { case Some(shapeMapStr) => ShapeMapW - .fromString(shapeMapStr, shapeMapFormat.name) match { + .fromString(shapeMapStr, format.name) match { case Left(errorList) => Left(errorList.toList.mkString("\n")) case Right(shapeMap) => Right(shapeMap) } case None => Left("Cannot extract the ShapeMap from an empty instance") } } - - /** JSON representation of this shapemap to be used in API responses - * - * @return JSON information of the shapemap (raw content, format, JSON structure) or an - */ - @throws(classOf[JsonConversionException]) - lazy val shapeMapJson: Json = { - innerShapeMap match { - case Left(err) => throw JsonConversionException(err) - case Right(dataShapeMap) => - Json.fromFields( - maybeField("shapeMap", rawShapeMap, Json.fromString) ++ - maybeField( - "shapeMapFormat", - Some(shapeMapFormat), - (format: ShapeMapFormat) => Json.fromString(format.name) - ) ++ - maybeField( - "shapeMapJson", - Some(dataShapeMap.toJson), - identity[Json] - ) - ) - } - - } } private[api] object ShapeMap extends LazyLogging { /** Placeholder value used for the shapemap whenever an empty shapemap is issued/needed. */ - private val emptyShapeMap = + val emptyShapeMap: ShapeMap = ShapeMap( shapeMapPre = None, - shapeMapFormat = ApiDefaults.defaultShapeMapFormat, - targetShapeMapFormat = None, - shapeMapSource = ShapeMapSource.defaultShapeMapSource + format = ApiDefaults.defaultShapeMapFormat, + source = ShapeMapSource.defaultShapeMapSource ) + /** JSON representation of this shapemap to be used in API responses + * + * @return JSON information of the shapemap (raw content, format, JSON structure) + */ + implicit val encodeShapeMap: Encoder[ShapeMap] = + (shapeMap: ShapeMap) => + Json.obj( + ("shapeMap", shapeMap.rawShapeMap.asJson), + ("format", shapeMap.format.asJson), + ("inner", shapeMap.innerShapeMap.toOption.map(_.toJson).asJson) + ) + + /** Decode JSON into [[ShapeMap]] instances + * + * @return [[ShapeMap]] instance created from JSON data + */ + implicit val decodeShapeMap: Decoder[ShapeMap] = + (cursor: HCursor) => + for { + shapeMap <- cursor.downField("shapeMap").as[Option[String]] + + shapeMapFormat <- cursor + .downField("shapeMapFormat") + .as[ShapeMapFormat] + + shapeMapSource <- cursor + .downField("shapeMapSource") + .as[ShapeMapSource] + .orElse(Right(ShapeMapSource.defaultShapeMapSource)) + + decoded = ShapeMap.emptyShapeMap.copy( + shapeMapPre = shapeMap, + format = shapeMapFormat, + source = shapeMapSource + ) + + } yield decoded + /** Given a request's parameters, try to extract a shapemap from them * * @param partsMap Request's parameters @@ -112,62 +120,27 @@ private[api] object ShapeMap extends LazyLogging { ): IO[Either[String, ShapeMap]] = { for { // Get data sent in que query - paramShapemap <- partsMap.optPartValue(ShapeMapParameter.name) - shapeMapFormat <- ShapeMapFormat.fromRequestParams( + paramShapeMap <- partsMap.optPartValue(ShapeMapParameter.name) + paramFormat <- ShapeMapFormat.fromRequestParams( ShapeMapFormatParameter.name, partsMap ) - targetShapeMapFormat <- ShapeMapFormat.fromRequestParams( - TargetShapeMapFormatParameter.name, - partsMap - ) - activeShapeMapSource <- partsMap.optPartValue( + + paramSource <- partsMap.optPartValue( ShapemapSourceParameter.name ) _ = logger.debug( - s"Getting ShapeMap from params. ShapeMap tab: $activeShapeMapSource" + s"Getting ShapeMap from params. ShapeMap tab: $paramSource" ) - // Create the shapemap depending on the client's selected method - maybeShapeMap <- mkShapeMap( - paramShapemap, - shapeMapFormat, - targetShapeMapFormat, - activeShapeMapSource + // Create the shapemap instance + shapeMap = ShapeMap( + shapeMapPre = paramShapeMap, + format = paramFormat.getOrElse(ApiDefaults.defaultShapeMapFormat), + source = paramSource.getOrElse(defaultShapeMapSource) ) - } yield maybeShapeMap + } yield shapeMap.innerShapeMap.map(_ => shapeMap) } - - /** Create a ShapeMap instance, given its source and format - * - * @param optShapeMapData Optionally, the contents of the shapemap - * @param optShapeMapFormat Optionally, the format of the shapemap - * @param optTargetShapeMapFormat Optionally, the target format of the shapemap (for conversions) - * @param optShapeMapSource Optionally, the indicator of the shapemap source (raw, url or file) - * @return A new ShapeMap based on the given parameters - */ - private[api] def mkShapeMap( - optShapeMapData: Option[String], - optShapeMapFormat: Option[ShapeMapFormat], - optTargetShapeMapFormat: Option[ShapeMapFormat], - optShapeMapSource: Option[ShapeMapSource] - ): IO[Either[String, ShapeMap]] = - for { - shapeMap <- IO { - ShapeMap( - shapeMapPre = optShapeMapData, - shapeMapFormat = - optShapeMapFormat.getOrElse(ApiDefaults.defaultShapeMapFormat), - targetShapeMapFormat = optTargetShapeMapFormat, - shapeMapSource = optShapeMapSource.getOrElse(defaultShapeMapSource) - ) - } - - result = shapeMap.rawShapeMap match { - case Some(_) => Right(shapeMap) - case None => Left("Could not build the shapeMap") - } - } yield result } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala index d61e4646..6d061a2a 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala @@ -10,6 +10,7 @@ import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMap.mkShapeMap import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.utils.json.JsonUtils.errorResponseJson import io.circe._ +import io.circe.syntax.EncoderOps import org.http4s._ import org.http4s.circe._ import org.http4s.client.Client @@ -76,7 +77,7 @@ class ShapeMapService(client: Client[IO]) // Try to get JSON representation case Right(_) => Try { - shapeMap.shapeMapJson + shapeMap.asJson } match { case Failure(exc) => errorResponseJson(exc.getMessage, InternalServerError) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseSchema.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseSchema.scala new file mode 100644 index 00000000..d10db48c --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseSchema.scala @@ -0,0 +1,134 @@ +//package es.weso.rdfshape.server.api.routes.wikibase.logic +// +//import cats.effect._ +//import com.typesafe.scalalogging.LazyLogging +//import es.weso.rdf.RDFReasoner +//import es.weso.rdfshape.server.api.routes.schema.logic.types.Schema +/* import + * es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters.WdSchemaParameter */ +//import es.weso.rdfshape.server.api.utils.parameters.PartsMap +//import es.weso.rdfshape.server.wikibase._ +//import es.weso.schema.{Schemas, Schema => SchemaW} +//import org.http4s._ +//import org.http4s.client._ +//import org.http4s.dsl.io._ +// +//case class WikibaseSchema( +// maybeSchemaParam: Option[Schema], +// maybeEntitySchema: Option[String], +// schemaStr: Option[String], +// wikibase: Wikibase = Wikidata +//) { +// +// def getSchema( +// maybeData: Option[RDFReasoner], +// client: Client[IO] +// ): IO[(Option[String], Either[String, SchemaW])] = { +// (maybeSchemaParam, maybeEntitySchema) match { +// case (None, None) => +// IO.pure((None, Left(s"No values for entity schema or schema"))) +// case (Some(schemaParam), None) => schemaParam.getSchema() +// case (None, Some(entitySchema)) => +// schemaFromEntitySchema(entitySchema, client) +// case (Some(schemaParam), Some(entitySchema)) => +// schemaFromEntitySchema(entitySchema, client) +// +// } +// } +// +// def schemaFromEntitySchema( +// es: String, +// client: Client[IO] +// ): IO[(Option[String], Either[String, SchemaW])] = { +// val uriSchema = wikibase.schemaEntityUri(es) +// val r: IO[(SchemaW, String)] = for { +// strSchema <- deref(uriSchema, client) +// schema <- Schemas.fromString(strSchema, "ShEXC", "ShEx") +// } yield (schema, strSchema) +// r.attempt.map { +// case Left(t) => (None, Left(t.getMessage)) +// case Right(pair) => +// val (schema, str) = pair +// (Some(str), Right(schema)) +// } +// +// } +// +// private def deref(uri: Uri, client: Client[IO]): IO[String] = { +// val reqSchema: Request[IO] = Request(method = GET, uri = uri) +// client.expect[String](reqSchema) +// } +//} +// +//object WikibaseSchema extends LazyLogging { +// +// private[api] def mkSchema( +// partsMap: PartsMap, +// data: Option[RDFReasoner], +// client: Client[IO] +// ): IO[(SchemaW, WikibaseSchema)] = { +// val r: IO[(SchemaW, WikibaseSchema)] = for { +// sp <- mkWikibaseSchemaParam(partsMap) +// p <- sp.getSchema(data, client) +// (maybeStr, maybeSchema) = p +// res <- maybeSchema match { +// case Left(str) => +// IO.raiseError( +// new RuntimeException(s"Error obtaining wikibase parameters: $str") +// ) +// case Right(schema) => IO.pure((schema, sp.copy(schemaStr = maybeStr))) +// } +// } yield res +// r +// } +// +// /** Build a [[WikibaseSchema]] from request parameters +// * +// * @param partsMap Request parameters +// * @return Either the [[WikibaseSchema]] or an error constructing it +// */ +// private[api] def mkWikibaseSchemaParam( +// partsMap: PartsMap +// ): IO[Either[String, WikibaseSchema]] = +// for { +// // WD Schema param as sent by client +// paramWdSchema <- partsMap.optPartValue(WdSchemaParameter.name) +// // endpointStr <- partsMap.partValue("endpoint") +// // endpoint <- either2f(IRI.fromString(endpointStr)) +// maybeSchema <- Schema.mkSchema(partsMap) +// result <- (paramWdSchema, maybeSchema) match { +// case (None, Left(err)) => +// val msg = +// s"Could not user supplied param and missing wdschema param: $err" +// logger.error(msg) +// IO.pure(Left(msg)) +// +// case (None, Right(schema)) => +// IO.pure( +// Right( +// WikibaseSchema.empty.copy(maybeSchemaParam = Option(schema)) +// ) +// ) +// case (Some(wdSchema), Left(err)) => +// logger.error(s"Could not build user supplied schema: $err") +// IO.pure( +// Right( +// WikibaseSchema.empty.copy(maybeEntitySchema = Option(wdSchema)) +// ) +// ) +// case (Some(wdSchema), Right(schema)) => +// IO.pure( +// Right( +// WikibaseSchema.empty +// .copy( +// maybeSchemaParam = Option(schema), +// maybeEntitySchema = Option(wdSchema) +// ) +// ) +// ) +// } +// } yield result +// +// private[api] def empty: WikibaseSchema = +// WikibaseSchema(None, None, None) +//} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseSchemaParam.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseSchemaParam.scala deleted file mode 100644 index 58151820..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseSchemaParam.scala +++ /dev/null @@ -1,118 +0,0 @@ -package es.weso.rdfshape.server.api.routes.wikibase.logic - -import cats.effect._ -import es.weso.rdf.RDFReasoner -import es.weso.rdfshape.server.api.routes.schema.logic.Schema -import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters.WdSchemaParameter -import es.weso.rdfshape.server.api.utils.parameters.PartsMap -import es.weso.rdfshape.server.wikibase._ -import es.weso.schema.{Schemas, Schema => SchemaW} -import org.http4s._ -import org.http4s.client._ -import org.http4s.dsl.io._ - -case class WikibaseSchemaParam( - maybeSchemaParam: Option[Schema], - maybeEntitySchema: Option[String], - schemaStr: Option[String], - wikidata: Wikibase = Wikidata -) { - - def getSchema( - maybeData: Option[RDFReasoner], - client: Client[IO] - ): IO[(Option[String], Either[String, SchemaW])] = { - (maybeSchemaParam, maybeEntitySchema) match { - case (None, None) => - IO.pure((None, Left(s"No values for entity schema or schema"))) - case (Some(schemaParam), None) => schemaParam.getSchema(maybeData) - case (None, Some(entitySchema)) => - schemaFromEntitySchema(entitySchema, client) - case (Some(schemaParam), Some(entitySchema)) => - schemaFromEntitySchema(entitySchema, client) - - } - } - - def schemaFromEntitySchema( - es: String, - client: Client[IO] - ): IO[(Option[String], Either[String, SchemaW])] = { - val uriSchema = wikidata.schemaEntityUri(es) - val r: IO[(SchemaW, String)] = for { - strSchema <- deref(uriSchema, client) - schema <- Schemas.fromString(strSchema, "ShEXC", "ShEx") - } yield (schema, strSchema) - r.attempt.map { - case Left(t) => (None, Left(t.getMessage)) - case Right(pair) => - val (schema, str) = pair - (Some(str), Right(schema)) - } - - } - - private def deref(uri: Uri, client: Client[IO]): IO[String] = { - val reqSchema: Request[IO] = Request(method = GET, uri = uri) - client.expect[String](reqSchema) - } -} - -object WikibaseSchemaParam { - - private[api] def mkSchema( - partsMap: PartsMap, - data: Option[RDFReasoner], - client: Client[IO] - ): IO[(SchemaW, WikibaseSchemaParam)] = { - val r: IO[(SchemaW, WikibaseSchemaParam)] = for { - sp <- mkWikibaseSchemaParam(partsMap) - p <- sp.getSchema(data, client) - (maybeStr, maybeSchema) = p - res <- maybeSchema match { - case Left(str) => - IO.raiseError( - new RuntimeException(s"Error obtaining wikibase parameters: $str") - ) - case Right(schema) => IO.pure((schema, sp.copy(schemaStr = maybeStr))) - } - } yield res - r - } - - private[api] def mkWikibaseSchemaParam( - partsMap: PartsMap - ): IO[WikibaseSchemaParam] = - for { - maybeSchema <- partsMap.eitherPartValue(WdSchemaParameter.name) - // endpointStr <- partsMap.partValue("endpoint") - // endpoint <- either2f(IRI.fromString(endpointStr)) - maybeSchemaParam <- Schema.mkSchema(partsMap).attempt - result <- (maybeSchema, maybeSchemaParam) match { - case (Left(_), Right(sp)) => - ok_f(WikibaseSchemaParam.empty.copy(maybeSchemaParam = Some(sp))) - case (Right(s), Left(_)) => - ok_f(WikibaseSchemaParam.empty.copy(maybeEntitySchema = Some(s))) - case (Right(s), Right(sp)) => - ok_f( - WikibaseSchemaParam.empty - .copy(maybeSchemaParam = Some(sp), maybeEntitySchema = Some(s)) - ) - case (Left(s), Left(errSp)) => - err_f(s"Error building schema param:\n${errSp}\n${s}") - } - } yield result - - private[api] def empty: WikibaseSchemaParam = - WikibaseSchemaParam(None, None, None) - - // TODO: Move this code to es.weso.utils.IOUtils - private def ok_f[A](v: A): IO[A] = IO.pure(v) - - private def err_f[A](err: String): IO[A] = - IO.raiseError[A](new RuntimeException(err)) - - private def either2f[A](e: Either[String, A]): IO[A] = - e.fold(s => IO.raiseError(new RuntimeException(s)), IO.pure) - -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseService.scala index f979bea5..bc9b5c42 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseService.scala @@ -1,45 +1,14 @@ package es.weso.rdfshape.server.api.routes.wikibase.service -import cats.data._ import cats.effect._ -import cats.implicits._ import com.typesafe.scalalogging.LazyLogging -import es.weso.rdf.jena.RDFAsJenaModel -import es.weso.rdf.nodes.IRI -import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.routes.ApiService -import es.weso.rdfshape.server.api.routes.schema.logic.SchemaOperations.schemaResult2json -import es.weso.rdfshape.server.api.routes.wikibase.logic.WikibaseEntity.{ - uriToEntity, - uriToEntity2 -} -import es.weso.rdfshape.server.api.routes.wikibase.logic.{ - WikibaseEntity, - WikibaseSchemaParam -} -import es.weso.rdfshape.server.api.routes.wikibase.service.WikibaseServiceUtils.{ - convertEntities, - convertLanguages, - mkShexerParams -} -import es.weso.rdfshape.server.api.utils.OptEitherF.ioFromEither -import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ -import es.weso.rdfshape.server.api.utils.parameters.PartsMap -import es.weso.rdfshape.server.utils.json.JsonUtils.errorResponseJson -import es.weso.schema.{Schema, ShapeMapTrigger} -import es.weso.schemaInfer.{InferOptions, SchemaInfer} -import es.weso.shapemaps.{Status => _, _} -import es.weso.utils.IOUtils._ -import es.weso.wikibaserdf._ -import io.circe._ +import es.weso.shapemaps.{Status => _} import org.http4s._ -import org.http4s.circe._ import org.http4s.client._ import org.http4s.client.middleware.FollowRedirect import org.http4s.dsl._ -import org.http4s.headers._ import org.http4s.implicits._ -import org.http4s.multipart._ /** API service to handle wikibase (and mostly wikidata) related operations * Acts as an intermediate proxy between clients and the MediaWiki API @@ -66,6 +35,9 @@ class WikibaseService(client: Client[IO]) */ def routes: HttpRoutes[IO] = HttpRoutes.of[IO] { + // TODO: uncomment routes and refactor along with wikishape client + case _ => InternalServerError("Pending") + /** Search for wikidata entities using MediaWiki's API. Search based on entity ID * See https://www.wikidata.org/w/api.php?action=help&modules=wbgetentities * Receives a Wikidata entity label and a language and fetches entities in Wikidata @@ -73,63 +45,63 @@ class WikibaseService(client: Client[IO]) * - language [String]: Response desired language * Returns a JSON object after querying MediaWiki's "wbgetentities" endpoint */ - case GET -> Root / `api` / `verb` / "entityLabel" :? - WdEntityParameter(entity) +& - LanguageParameter(language) => - val uri = wikidataUri - .withPath(Uri.Path.unsafeFromString("/w/api.php")) - .withQueryParam("action", "wbgetentities") - .withQueryParam("props", "labels") - .withQueryParam("ids", entity) - .withQueryParam("languages", language) - .withQueryParam("format", "json") - - logger.debug(s"wikidata searchEntity uri: ${uri.toString}") - - val req: Request[IO] = Request(method = GET, uri = uri) - for { - either <- client.run(req).use { - case Status.Successful(r) => - r.attemptAs[Json].leftMap(_.message).value - case r => - r.as[String] - .map(b => - s"Request $req failed with status ${r.status.code} and body $b" - .asLeft[Json] - ) - } - resp <- Ok(either.fold(Json.fromString, identity)) - } yield resp + // case GET -> Root / `api` / `verb` / "entityLabel" :? + // WdEntityParameter(entity) +& + // LanguageParameter(language) => + // val uri = wikidataUri + // .withPath(Uri.Path.unsafeFromString("/w/api.php")) + // .withQueryParam("action", "wbgetentities") + // .withQueryParam("props", "labels") + // .withQueryParam("ids", entity) + // .withQueryParam("languages", language) + // .withQueryParam("format", "json") + // + // logger.debug(s"wikidata searchEntity uri: ${uri.toString}") + // + // val req: Request[IO] = Request(method = GET, uri = uri) + // for { + // either <- client.run(req).use { + // case Status.Successful(r) => + // r.attemptAs[Json].leftMap(_.message).value + // case r => + // r.as[String] + // .map(b => + /* s"Request $req failed with status ${r.status.code} and body $b" */ + // .asLeft[Json] + // ) + // } + // resp <- Ok(either.fold(Json.fromString, identity)) + // } yield resp /** Search for wikidata schemas using MediaWiki's API. * Receives a Wikidata schema label and fetches schemas in Wikidata * - wdSchema [String]: Wikidata schema label * Returns a JSON object after manually querying the schema's page */ - case GET -> Root / `api` / `verb` / "schemaContent" :? - WdSchemaParameter(wdSchema) => - val uri = wikidataUri.withPath( - Uri.Path.unsafeFromString(s"/wiki/Special:EntitySchemaText/$wdSchema") - ) - - val req: Request[IO] = Request(method = GET, uri = uri) - for { - eitherValues <- client.run(req).use { - case Status.Successful(r) => - r.attemptAs[String].leftMap(_.message).value - case r => - r.as[String] - .map(b => - s"Request $req failed with status ${r.status.code} and body $b" - .asLeft[String] - ) - } - json: Json = eitherValues.fold( - e => Json.fromFields(List(("error", Json.fromString(e)))), - s => Json.fromFields(List(("result", Json.fromString(s)))) - ) - resp <- Ok(json) - } yield resp + // case GET -> Root / `api` / `verb` / "schemaContent" :? + // WdSchemaParameter(wdSchema) => + // val uri = wikidataUri.withPath( + /* Uri.Path.unsafeFromString(s"/wiki/Special:EntitySchemaText/$wdSchema") */ + // ) + // + // val req: Request[IO] = Request(method = GET, uri = uri) + // for { + // eitherValues <- client.run(req).use { + // case Status.Successful(r) => + // r.attemptAs[String].leftMap(_.message).value + // case r => + // r.as[String] + // .map(b => + /* s"Request $req failed with status ${r.status.code} and body $b" */ + // .asLeft[String] + // ) + // } + // json: Json = eitherValues.fold( + // e => Json.fromFields(List(("error", Json.fromString(e)))), + // s => Json.fromFields(List(("result", Json.fromString(s)))) + // ) + // resp <- Ok(json) + // } yield resp /** Search for entities in a wikibase using MediaWiki's API. Search based on entity labels. * See https://www.wikidata.org/w/api.php?action=help&modules=wbsearchentities @@ -141,49 +113,49 @@ class WikibaseService(client: Client[IO]) * - continue [Int]: Offset where to continue a search * Returns a JSON object after querying MediaWiki's "wbsearchentities" endpoint */ - case GET -> Root / `api` / `verb` / "searchEntity" :? - EndpointParameter(maybeEndpoint) +& - LabelParameter(label) +& - LanguageParameter(language) +& - LimitParameter(maybelimit) +& - ContinueParameter(maybeContinue) => - val limit: String = maybelimit.getOrElse(defaultLimit.toString) - val continue: String = maybeContinue.getOrElse(defaultContinue.toString) - val endpoint: String = maybeEndpoint.getOrElse(wikidataUrl) - - logger.debug(s"Wikibase entity search with endpoint: $endpoint") - - val uri = Uri - .unsafeFromString(endpoint) - .withPath(Uri.Path.unsafeFromString("/w/api.php")) - .withQueryParam("action", "wbsearchentities") - .withQueryParam("search", label) - .withQueryParam("language", language) - .withQueryParam("limit", limit) - .withQueryParam("continue", continue) - .withQueryParam("format", "json") - - logger.debug(s"wikidata searchEntity uri: $uri") - - val req: Request[IO] = Request(method = GET, uri = uri) - - for { - eitherValues <- client.run(req).use { - case Status.Successful(r) => - r.attemptAs[Json].leftMap(_.message).value - case r => - r.as[String] - .map(b => - s"Request $req failed with status ${r.status.code} and body $b" - .asLeft[Json] - ) - } - eitherResult = for { - json <- eitherValues - converted <- convertEntities(json) - } yield converted - resp <- Ok(eitherResult.fold(Json.fromString, identity)) - } yield resp + // case GET -> Root / `api` / `verb` / "searchEntity" :? + // EndpointParameter(maybeEndpoint) +& + // LabelParameter(label) +& + // LanguageParameter(language) +& + // LimitParameter(maybelimit) +& + // ContinueParameter(maybeContinue) => + // val limit: String = maybelimit.getOrElse(defaultLimit.toString) + /* val continue: String = maybeContinue.getOrElse(defaultContinue.toString) */ + // val endpoint: String = maybeEndpoint.getOrElse(wikidataUrl) + // + // logger.debug(s"Wikibase entity search with endpoint: $endpoint") + // + // val uri = Uri + // .unsafeFromString(endpoint) + // .withPath(Uri.Path.unsafeFromString("/w/api.php")) + // .withQueryParam("action", "wbsearchentities") + // .withQueryParam("search", label) + // .withQueryParam("language", language) + // .withQueryParam("limit", limit) + // .withQueryParam("continue", continue) + // .withQueryParam("format", "json") + // + // logger.debug(s"wikidata searchEntity uri: $uri") + // + // val req: Request[IO] = Request(method = GET, uri = uri) + // + // for { + // eitherValues <- client.run(req).use { + // case Status.Successful(r) => + // r.attemptAs[Json].leftMap(_.message).value + // case r => + // r.as[String] + // .map(b => + /* s"Request $req failed with status ${r.status.code} and body $b" */ + // .asLeft[Json] + // ) + // } + // eitherResult = for { + // json <- eitherValues + // converted <- convertEntities(json) + // } yield converted + // resp <- Ok(eitherResult.fold(Json.fromString, identity)) + // } yield resp /** Search for properties in a wikibase using MediaWiki's API. Search based on property labels. * See https://www.wikidata.org/w/api.php?action=help&modules=wbsearchentities @@ -195,51 +167,51 @@ class WikibaseService(client: Client[IO]) * - continue [Int]: Offset where to continue a search * Returns a JSON object after querying MediaWiki's "wbsearchentities" endpoint. */ - case GET -> Root / `api` / `verb` / "searchProperty" :? - EndpointParameter(maybeEndpoint) +& - LabelParameter(label) +& - LanguageParameter(language) +& - LimitParameter(maybelimit) +& - ContinueParameter(maybeContinue) => - val limit: String = maybelimit.getOrElse(defaultLimit.toString) - val continue: String = maybeContinue.getOrElse(defaultContinue.toString) - val endpoint: String = maybeEndpoint.getOrElse(wikidataUrl) - - logger.debug(s"Wikibase property search with endpoint: $endpoint") - - val uri = Uri - .fromString(endpoint) - .valueOr(throw _) - .withPath(Uri.Path.unsafeFromString("/w/api.php")) - .withQueryParam("action", "wbsearchentities") - .withQueryParam("search", label) - .withQueryParam("language", language) - .withQueryParam("limit", limit) - .withQueryParam("continue", continue) - .withQueryParam("type", "property") - .withQueryParam("format", "json") - - logger.debug(s"wikidata searchProperty uri: $uri") - - val req: Request[IO] = Request(method = GET, uri = uri) - - for { - eitherValues <- client.run(req).use { - case Status.Successful(r) => - r.attemptAs[Json].leftMap(_.message).value - case r => - r.as[String] - .map(b => - s"Request $req failed with status ${r.status.code} and body $b" - .asLeft[Json] - ) - } - eitherResult = for { - json <- eitherValues - converted <- convertEntities(json) - } yield converted - resp <- Ok(eitherResult.fold(Json.fromString, identity)) - } yield resp + // case GET -> Root / `api` / `verb` / "searchProperty" :? + // EndpointParameter(maybeEndpoint) +& + // LabelParameter(label) +& + // LanguageParameter(language) +& + // LimitParameter(maybelimit) +& + // ContinueParameter(maybeContinue) => + // val limit: String = maybelimit.getOrElse(defaultLimit.toString) + /* val continue: String = maybeContinue.getOrElse(defaultContinue.toString) */ + // val endpoint: String = maybeEndpoint.getOrElse(wikidataUrl) + // + // logger.debug(s"Wikibase property search with endpoint: $endpoint") + // + // val uri = Uri + // .fromString(endpoint) + // .valueOr(throw _) + // .withPath(Uri.Path.unsafeFromString("/w/api.php")) + // .withQueryParam("action", "wbsearchentities") + // .withQueryParam("search", label) + // .withQueryParam("language", language) + // .withQueryParam("limit", limit) + // .withQueryParam("continue", continue) + // .withQueryParam("type", "property") + // .withQueryParam("format", "json") + // + // logger.debug(s"wikidata searchProperty uri: $uri") + // + // val req: Request[IO] = Request(method = GET, uri = uri) + // + // for { + // eitherValues <- client.run(req).use { + // case Status.Successful(r) => + // r.attemptAs[Json].leftMap(_.message).value + // case r => + // r.as[String] + // .map(b => + /* s"Request $req failed with status ${r.status.code} and body $b" */ + // .asLeft[Json] + // ) + // } + // eitherResult = for { + // json <- eitherValues + // converted <- convertEntities(json) + // } yield converted + // resp <- Ok(eitherResult.fold(Json.fromString, identity)) + // } yield resp /** Search for lexemes in a wikibase using MediaWiki's API. Search based on lexeme labels. * See https://www.wikidata.org/w/api.php?action=help&modules=wbsearchentities @@ -251,90 +223,90 @@ class WikibaseService(client: Client[IO]) * - continue [Int]: Offset where to continue a search * Returns a JSON object after querying MediaWiki's "wbsearchentities" endpoint. */ - case GET -> Root / `api` / `verb` / "searchLexeme" :? - EndpointParameter(maybeEndpoint) +& - LabelParameter(label) +& - LanguageParameter(language) +& - LimitParameter(maybelimit) +& - ContinueParameter(maybeContinue) => - val limit: String = maybelimit.getOrElse(defaultLimit.toString) - val continue: String = maybeContinue.getOrElse(defaultContinue.toString) - val endpoint: String = maybeEndpoint.getOrElse(wikidataUrl) - - logger.debug(s"Wikibase lexeme search with endpoint: $endpoint") - - val uri = Uri - .fromString(endpoint) - .valueOr(throw _) - .withPath(Uri.Path.unsafeFromString("/w/api.php")) - .withQueryParam("action", "wbsearchentities") - .withQueryParam("search", label) - .withQueryParam("language", language) - .withQueryParam("limit", limit) - .withQueryParam("continue", continue) - .withQueryParam("type", "lexeme") - .withQueryParam("format", "json") - - logger.debug(s"wikidata searchLexeme uri: $uri") - - val req: Request[IO] = Request(method = GET, uri = uri) - for { - eitherValues <- client.run(req).use { - case Status.Successful(r) => - r.attemptAs[Json].leftMap(_.message).value - case r => - r.as[String] - .map(b => - s"Request $req failed with status ${r.status.code} and body $b" - .asLeft[Json] - ) - } - eitherResult = for { - json <- eitherValues - converted <- convertEntities(json) - } yield converted - resp <- Ok(eitherResult.fold(Json.fromString, identity)) - } yield resp + // case GET -> Root / `api` / `verb` / "searchLexeme" :? + // EndpointParameter(maybeEndpoint) +& + // LabelParameter(label) +& + // LanguageParameter(language) +& + // LimitParameter(maybelimit) +& + // ContinueParameter(maybeContinue) => + // val limit: String = maybelimit.getOrElse(defaultLimit.toString) + /* val continue: String = maybeContinue.getOrElse(defaultContinue.toString) */ + // val endpoint: String = maybeEndpoint.getOrElse(wikidataUrl) + // + // logger.debug(s"Wikibase lexeme search with endpoint: $endpoint") + // + // val uri = Uri + // .fromString(endpoint) + // .valueOr(throw _) + // .withPath(Uri.Path.unsafeFromString("/w/api.php")) + // .withQueryParam("action", "wbsearchentities") + // .withQueryParam("search", label) + // .withQueryParam("language", language) + // .withQueryParam("limit", limit) + // .withQueryParam("continue", continue) + // .withQueryParam("type", "lexeme") + // .withQueryParam("format", "json") + // + // logger.debug(s"wikidata searchLexeme uri: $uri") + // + // val req: Request[IO] = Request(method = GET, uri = uri) + // for { + // eitherValues <- client.run(req).use { + // case Status.Successful(r) => + // r.attemptAs[Json].leftMap(_.message).value + // case r => + // r.as[String] + // .map(b => + /* s"Request $req failed with status ${r.status.code} and body $b" */ + // .asLeft[Json] + // ) + // } + // eitherResult = for { + // json <- eitherValues + // converted <- convertEntities(json) + // } yield converted + // resp <- Ok(eitherResult.fold(Json.fromString, identity)) + // } yield resp /** Search for all the languages used in a wikibase instance. * - endpoint [String]: SPARQL query endpoint of the target wikibase instance. Defaults to Wikidata. * Returns a JSON object with the array of languages returned by the endpoint. */ - case GET -> Root / `api` / `verb` / "languages" :? - EndpointParameter(maybeEndpoint) => - val endpoint: String = maybeEndpoint.getOrElse(wikidataUrl) - logger.debug(s"Wikibase language search with endpoint: $endpoint") - - val uri = Uri - .fromString(endpoint) - .valueOr(throw _) - .withPath(Uri.Path.unsafeFromString("/w/api.php")) - .withQueryParam("action", "query") - .withQueryParam("meta", "wbcontentlanguages") - .withQueryParam("wbclcontext", "term") - .withQueryParam("wbclprop", "code|autonym") - .withQueryParam("format", "json") - - val req: Request[IO] = Request(method = GET, uri = uri) - for { - eitherValues <- client.run(req).use { - case Status.Successful(r) => - r.attemptAs[Json].leftMap(_.message).value - case r => - r.as[String] - .map(b => - s"Request $req failed with status ${r.status.code} and body $b" - .asLeft[Json] - ) - } - eitherResult = for { - json <- eitherValues - converted <- convertLanguages(json) - } yield converted - resp <- Ok( - eitherResult.fold(Json.fromString, identity) - ) - } yield resp + // case GET -> Root / `api` / `verb` / "languages" :? + // EndpointParameter(maybeEndpoint) => + // val endpoint: String = maybeEndpoint.getOrElse(wikidataUrl) + // logger.debug(s"Wikibase language search with endpoint: $endpoint") + // + // val uri = Uri + // .fromString(endpoint) + // .valueOr(throw _) + // .withPath(Uri.Path.unsafeFromString("/w/api.php")) + // .withQueryParam("action", "query") + // .withQueryParam("meta", "wbcontentlanguages") + // .withQueryParam("wbclcontext", "term") + // .withQueryParam("wbclprop", "code|autonym") + // .withQueryParam("format", "json") + // + // val req: Request[IO] = Request(method = GET, uri = uri) + // for { + // eitherValues <- client.run(req).use { + // case Status.Successful(r) => + // r.attemptAs[Json].leftMap(_.message).value + // case r => + // r.as[String] + // .map(b => + /* s"Request $req failed with status ${r.status.code} and body $b" */ + // .asLeft[Json] + // ) + // } + // eitherResult = for { + // json <- eitherValues + // converted <- convertLanguages(json) + // } yield converted + // resp <- Ok( + // eitherResult.fold(Json.fromString, identity) + // ) + // } yield resp /** Execute a given SPARQL query to a given SPARQL endpoint of a wikibase instance. * Receives a target endpoint and the query text. @@ -346,40 +318,40 @@ class WikibaseService(client: Client[IO]) * - results [Object]: Query results * - bindings: [Array]: Query results, each item being an object mapping each variable to its value */ - case req @ POST -> Root / `api` / `verb` / "query" => - req.decode[Multipart[IO]] { m => - { - val partsMap = PartsMap(m.parts) - for { - optQuery <- partsMap.optPartValue("query") - optEndpoint <- partsMap.optPartValue("endpoint") - endpoint = optEndpoint.getOrElse(wikidataQueryUri.toString()) - query = optQuery.getOrElse("") - req: Request[IO] = - Request( - method = GET, - uri = Uri - .fromString(endpoint) - .valueOr(throw _) - .withQueryParam("query", query) - ) - .withHeaders( - `Accept`(MediaType.application.`json`) - ) - eitherValue <- client.run(req).use { - case Status.Successful(r) => - r.attemptAs[Json].leftMap(_.message).value - case r => - r.as[String] - .map(b => - s"Request $req failed with status ${r.status.code} and body $b" - .asLeft[Json] - ) - } - resp <- Ok(eitherValue.fold(Json.fromString, identity)) - } yield resp - } - } + // case req @ POST -> Root / `api` / `verb` / "query" => + // req.decode[Multipart[IO]] { m => + // { + // val partsMap = PartsMap(m.parts) + // for { + // optQuery <- partsMap.optPartValue("query") + // optEndpoint <- partsMap.optPartValue("endpoint") + // endpoint = optEndpoint.getOrElse(wikidataQueryUri.toString()) + // query = optQuery.getOrElse("") + // req: Request[IO] = + // Request( + // method = GET, + // uri = Uri + // .fromString(endpoint) + // .valueOr(throw _) + // .withQueryParam("query", query) + // ) + // .withHeaders( + // `Accept`(MediaType.application.`json`) + // ) + // eitherValue <- client.run(req).use { + // case Status.Successful(r) => + // r.attemptAs[Json].leftMap(_.message).value + // case r => + // r.as[String] + // .map(b => + /* s"Request $req failed with status ${r.status.code} and body $b" */ + // .asLeft[Json] + // ) + // } + // resp <- Ok(eitherValue.fold(Json.fromString, identity)) + // } yield resp + // } + // } /** Attempts to extract an schema (ShEx) from a given entity present in wikidata. * Receives an entity URI: @@ -388,64 +360,64 @@ class WikibaseService(client: Client[IO]) * - entity [String]: URI of the entity whose information we searched * - result [String]: Extracted schema */ - case req @ POST -> Root / `api` / `verb` / "extract" => - req.decode[Multipart[IO]] { m => - val partsMap = PartsMap(m.parts) - val r: EitherT[IO, String, Response[IO]] = for { - label <- EitherT(partsMap.eitherPartValue("entity")) - info <- either2es[WikibaseEntity](uriToEntity(label)) - _ <- { - logger.debug(s"Extraction URI: ${info.uri}"); - ok_esf[Unit, IO](()) - } - strRdf <- io2es(redirectClient.expect[String](info.uri)) - eitherInferred <- io2es( - RDFAsJenaModel - .fromString(strRdf, "TURTLE") - .flatMap( - _.use(rdf => - for { - rdfSerialized <- rdf.serialize("TURTLE") - nodeSelector = RDFNodeSelector(IRI(label)) - inferred <- SchemaInfer.runInferSchema( - rdf, - nodeSelector, - "ShEx", - IRI(s"http://example.org/Shape_${info.localName}"), - InferOptions.defaultOptions.copy(maxFollowOn = 3) - ) - } yield inferred - ) - ) - ) - pair <- either2es[(Schema, ResultShapeMap)](eitherInferred) - shExCStr <- io2es({ - val (schema, _) = pair - schema.serialize("SHEXC") - }) - _ <- { - logger.trace(s"ShExC str: $shExCStr"); - ok_es[Unit](()) - } - resp <- io2es( - Ok( - Json.fromFields( - List( - ("entity", Json.fromString(label)), - ("result", Json.fromString(shExCStr)) - ) - ) - ) - ) - } yield resp - for { - either <- r.value - resp <- either.fold( - err => errorResponseJson(err, InternalServerError), - r => IO.pure(r) - ) - } yield resp - } + // case req @ POST -> Root / `api` / `verb` / "extract" => + // req.decode[Multipart[IO]] { m => + // val partsMap = PartsMap(m.parts) + // val r: EitherT[IO, String, Response[IO]] = for { + // label <- EitherT(partsMap.eitherPartValue("entity")) + // info <- either2es[WikibaseEntity](uriToEntity(label)) + // _ <- { + // logger.debug(s"Extraction URI: ${info.uri}"); + // ok_esf[Unit, IO](()) + // } + // strRdf <- io2es(redirectClient.expect[String](info.uri)) + // eitherInferred <- io2es( + // RDFAsJenaModel + // .fromString(strRdf, "TURTLE") + // .flatMap( + // _.use(rdf => + // for { + // rdfSerialized <- rdf.serialize("TURTLE") + // nodeSelector = RDFNodeSelector(IRI(label)) + // inferred <- SchemaInfer.runInferSchema( + // rdf, + // nodeSelector, + // "ShEx", + // IRI(s"http://example.org/Shape_${info.localName}"), + // InferOptions.defaultOptions.copy(maxFollowOn = 3) + // ) + // } yield inferred + // ) + // ) + // ) + // pair <- either2es[(Schema, ResultShapeMap)](eitherInferred) + // shExCStr <- io2es({ + // val (schema, _) = pair + // schema.serialize("SHEXC") + // }) + // _ <- { + // logger.trace(s"ShExC str: $shExCStr"); + // ok_es[Unit](()) + // } + // resp <- io2es( + // Ok( + // Json.fromFields( + // List( + // ("entity", Json.fromString(label)), + // ("result", Json.fromString(shExCStr)) + // ) + // ) + // ) + // ) + // } yield resp + // for { + // either <- r.value + // resp <- either.fold( + // err => errorResponseJson(err, InternalServerError), + // r => IO.pure(r) + // ) + // } yield resp + // } // TODO: This one doesn't work. It gives a timeout response /** Attempts to extract an schema (ShEx) from a given entity present in wikidata using "shexer". @@ -456,36 +428,36 @@ class WikibaseService(client: Client[IO]) * - entity [String]: URI of the entity whose information we searched * - result [String]: Extracted schema */ - case req @ POST -> Root / `api` / `verb` / "shexer" => - req.decode[Multipart[IO]] { m => - val partsMap = PartsMap(m.parts) - val r: EitherT[IO, String, Response[IO]] = for { - label <- EitherT(partsMap.eitherPartValue("entity")) - jsonParams <- either2es[Json](mkShexerParams(label)) - postRequest = Request[IO]( - method = POST, - uri = uri"http://156.35.94.158:8081/shexer" - ).withHeaders(`Content-Type`(MediaType.application.`json`)) - .withEntity[Json](jsonParams) - _ <- { - logger.debug(s"URI: ${jsonParams.spaces2}"); - ok_es[Unit](()) - } - result <- f2es(redirectClient.expect[Json](postRequest)) - _ <- { - logger.trace(s"Result\n${result.spaces2}"); - ok_es[Unit](()) - } - resp <- f2es(Ok(result)) - } yield resp - for { - either <- r.value - resp <- either.fold( - err => errorResponseJson(err, InternalServerError), - r => IO.pure(r) - ) - } yield resp - } + // case req @ POST -> Root / `api` / `verb` / "shexer" => + // req.decode[Multipart[IO]] { m => + // val partsMap = PartsMap(m.parts) + // val r: EitherT[IO, String, Response[IO]] = for { + // label <- EitherT(partsMap.eitherPartValue("entity")) + // jsonParams <- either2es[Json](mkShexerParams(label)) + // postRequest = Request[IO]( + // method = POST, + // uri = uri"http://156.35.94.158:8081/shexer" + // ).withHeaders(`Content-Type`(MediaType.application.`json`)) + // .withEntity[Json](jsonParams) + // _ <- { + // logger.debug(s"URI: ${jsonParams.spaces2}"); + // ok_es[Unit](()) + // } + // result <- f2es(redirectClient.expect[Json](postRequest)) + // _ <- { + // logger.trace(s"Result\n${result.spaces2}"); + // ok_es[Unit](()) + // } + // resp <- f2es(Ok(result)) + // } yield resp + // for { + // either <- r.value + // resp <- either.fold( + // err => errorResponseJson(err, InternalServerError), + // r => IO.pure(r) + // ) + // } yield resp + // } /** Validate entities in a wikibase using wikidata schemas or shape expressions. * Receives several data: @@ -498,54 +470,39 @@ class WikibaseService(client: Client[IO]) * - shape [String]: Shape of the schema which will be compared against the entity * Returns a JSON object with the results (pending). */ - case req @ POST -> Root / `api` / `verb` / "validate" => - logger.debug(s"Wikidata validate request: $req") - req.decode[Multipart[IO]] { m => - val partsMap = PartsMap(m.parts) - val r: IO[Response[IO]] = for { - eitherItem <- partsMap.eitherPartValue("entity") - _ <- { - logger.debug(eitherItem.toString); - IO.pure(()) - } - item <- ioFromEither(eitherItem) - _ <- { - logger.debug(item); - IO.pure(()) - } - info <- ioFromEither(uriToEntity2(item)) - _ <- { - logger.debug(info.toString); - IO.pure(()) - } - pair <- WikibaseSchemaParam.mkSchema(partsMap, None, client) - _ <- { - logger.debug(pair.toString()); - IO.pure(()) - } - (schema, wbp) = pair - iriItem <- ioFromEither(IRI.fromString(info.sourceUri)) - shapeMap <- ioFromEither(ShapeMap.empty.add(iriItem, Start)) - triggerMode = ShapeMapTrigger(shapeMap) - result <- for { - res1 <- WikibaseRDF.wikidata - res2 <- RDFAsJenaModel.empty - vv <- (res1, res2).tupled.use { case (rdf, builder) => - for { - r <- schema.validate(rdf, triggerMode, builder) - json <- schemaResult2json(r) - } yield json - } - } yield vv - resp <- Ok(result) - } yield resp - r.attempt.flatMap( - _.fold( - s => errorResponseJson(s.getMessage, InternalServerError), - IO.pure - ) - ) - } + // case req @ POST -> Root / `api` / `verb` / "validate" => + // logger.debug(s"Wikidata validate request: $req") + // req.decode[Multipart[IO]] { m => + // val partsMap = PartsMap(m.parts) + // val r: IO[Response[IO]] = for { + // eitherEntity <- partsMap.eitherPartValue("entity") + // item <- ioFromEither(eitherEntity) + // info <- ioFromEither(uriToEntity2(item)) + // pair <- WikibaseSchema.mkSchema(partsMap, None, client) + // + // (schema, wbp) = pair + // iriItem <- ioFromEither(IRI.fromString(info.sourceUri)) + // shapeMap <- ioFromEither(ShapeMap.empty.add(iriItem, Start)) + // triggerMode = ShapeMapTrigger(shapeMap) + // result <- for { + // res1 <- WikibaseRDF.wikidata + // res2 <- RDFAsJenaModel.empty + // vv <- (res1, res2).tupled.use { case (rdf, builder) => + // for { + /* validationResult <- schema.validate(rdf, triggerMode, builder) */ + /* // json <- schemaResult2json(validationResult) */ + /* } yield SchemaValidate.encodeValidationResult(validationResult) */ + // } + // } yield vv + // resp <- Ok(result) + // } yield resp + // r.attempt.flatMap( + // _.fold( + // s => errorResponseJson(s.getMessage, InternalServerError), + // IO.pure + // ) + // ) + // } } } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseServiceUtils.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseServiceUtils.scala index 14f9852b..af305a67 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseServiceUtils.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseServiceUtils.scala @@ -8,6 +8,7 @@ private[service] class WikibaseServiceUtils {} /** Static utilities used by the Wikibase service */ +//noinspection HttpUrlsUsage,SpellCheckingInspection object WikibaseServiceUtils { /** For a given entity, create the JSON structure accepted by the Shexer API diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala index aa48f8dd..555ea4a9 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala @@ -16,8 +16,6 @@ object IncomingRequestParameters { lazy val targetDataFormat = "targetDataFormat" lazy val schema = "schema" - lazy val schemaUrl = "schemaUrl" - lazy val schemaFile = "schemaFile" lazy val schemaFormat = "schemaFormat" lazy val schemaEngine = "schemaEngine" lazy val targetSchemaFormat = "targetSchemaFormat" @@ -30,10 +28,9 @@ object IncomingRequestParameters { lazy val node = "node" lazy val nodeSelector = "nodeSelector" - lazy val shapemap = "shapemap" - lazy val shape_map = "shape-map" - lazy val shapemapFormat = "shapemapFormat" - lazy val targetShapemapFormat = "targetShapemapFormat" + lazy val shapemap = "shapemap" + lazy val shape_map = "shape-map" + lazy val shapemapFormat = "shapemapFormat" lazy val query = "query" @@ -89,27 +86,16 @@ object IncomingRequestParameters { val name: String = targetDataFormat } - /** Parameter expected to contain raw schema data (URL encoded) + /** Parameter expected to contain schema contents (URL encoded) + * + * @note These contents may be raw data, a URL with the schema or a File with the schema. + * The source of the schema is therefore specified by [[SchemaSourceParameter]] */ object SchemaParameter extends OptionalQueryParamDecoderMatcher[String](schema) { val name: String = schema } - /** Parameter expected to contain a URL where a validation schema is located - */ - object SchemaUrlParameter - extends OptionalQueryParamDecoderMatcher[String](schemaUrl) { - val name: String = schemaUrl - } - - /** Parameter expected to contain the contents a file where a validation schema is located - */ - object SchemaFileParameter - extends OptionalQueryParamDecoderMatcher[String](schemaFile) { - val name: String = schemaFile - } - /** Parameter expected to contain an schema format name, referencing the user's schema format */ object SchemaFormatParameter @@ -203,13 +189,6 @@ object IncomingRequestParameters { val name: String = shapemapFormat } - /** Parameter expected to contain a shapemap format name, referencing the target format of a conversion - */ - object TargetShapeMapFormatParameter - extends OptionalQueryParamDecoderMatcher[String](targetShapemapFormat) { - val name: String = targetShapemapFormat - } - /** Parameter expected to contain SPARQL query data contents (URL encoded) * * @note These contents may be raw data, a URL with the query or a File with the query. diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtils.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtils.scala index 53618e7d..94f44fd1 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtils.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/json/JsonUtils.scala @@ -100,14 +100,48 @@ object JsonUtils extends Http4sDsl[IO] { /** Convert a given prefix map to JSON format for API operations * * @param prefixMap Input prefix map - * @return JSON representation of the prefix map + * @return JSON representation of the prefix map (as an object) + * @note Example return: { + * "schema": "", + * "xsd: "" + * } */ - def prefixMap2Json(prefixMap: PrefixMap): Json = { + def prefixMap2JsonObject(prefixMap: PrefixMap): Json = { Json.fromFields(prefixMap.pm.map { case (prefix, iri) => (prefix.str, Json.fromString(iri.getLexicalForm)) }) } + /** Convert a given prefix map to JSON format for API operations + * + * @param prefixMap Input prefix map + * @return JSON representation of the prefix map (as an array of items) + * @note Example return: + * [ + * { + * "prefixName": "schema", + * "prefixIRI": "", + * }, + * { + * "prefixName": "xsd", + * "prefixIRI": "", + * } + * ] + */ + def prefixMap2JsonArray(prefixMap: PrefixMap): Json = { + Json.fromValues(prefixMap.pm.map { case (prefix, iri) => + Json.fromFields( + List( + ( + "prefixName", + Json.fromString(if(prefix.str.isBlank) ":" else prefix.str) + ), + ("prefixIRI", Json.fromString(iri.toString())) + ) + ) + }) + } + /** @param iri IRI to be converted * @param prefixMap Optionally, the prefix map with the IRI to be converted * @return JSON representation of the IRI From 6afe0d71a09ae082e1db70275bbb773d456be89f Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Mon, 20 Dec 2021 11:45:29 +0100 Subject: [PATCH 28/32] Further documenting and refactoring --- build.sbt | 2 +- docs/api-usage/usage_examples.md | 8 +- .../server/api/definitions/ApiDefaults.scala | 6 +- .../rdfshape/server/api/format/Format.scala | 2 +- .../api/format/dataFormats/DataFormat.scala | 4 +- .../format/dataFormats/GraphicFormat.scala | 22 +- .../api/format/dataFormats/RdfFormat.scala | 24 +- .../schemaFormats/SchemaFormat.scala | 1 + .../data/logic/operations/DataConvert.scala | 87 ++--- .../data/logic/types/DataEndpoint.scala | 2 +- .../routes/data/logic/types/DataSingle.scala | 9 +- .../api/routes/data/service/DataService.scala | 16 +- .../endpoint/logic/query/SparqlQuery.scala | 4 +- .../routes/fetch/service/FetchService.scala | 21 +- .../schema/logic/aux/SchemaOperations.scala | 102 ------ .../logic/operations/SchemaConvert.scala | 346 +++++++++++++++--- .../logic/operations/SchemaValidate.scala | 5 +- .../schema/logic/trigger/TriggerMode.scala | 27 +- .../logic/trigger/TriggerShapeMap.scala | 62 +++- .../trigger/TriggerTargetDeclarations.scala | 25 +- .../routes/schema/logic/types/Schema.scala | 3 +- .../schema/logic/types/SchemaSimple.scala | 31 +- .../routes/schema/service/SchemaService.scala | 313 ++++++++-------- .../api/routes/shapemap/logic/ShapeMap.scala | 33 +- .../wikibase/service/WikibaseService.scala | 3 +- .../IncomingRequestParameters.scala | 48 +-- 26 files changed, 716 insertions(+), 490 deletions(-) delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/aux/SchemaOperations.scala diff --git a/build.sbt b/build.sbt index ae819a74..84ec7fc4 100644 --- a/build.sbt +++ b/build.sbt @@ -302,7 +302,7 @@ lazy val loggingVersion = "3.9.4" lazy val groovyVersion = "3.0.8" lazy val munitVersion = "0.7.27" lazy val munitEffectVersion = "1.0.6" -lazy val plantumlVersion = "8059" +lazy val plantumlVersion = "1.2021.14" lazy val scalajVersion = "2.4.2" lazy val scalatagsVersion = "0.9.4" // WESO dependencies diff --git a/docs/api-usage/usage_examples.md b/docs/api-usage/usage_examples.md index b63d21b8..d7c7cc74 100644 --- a/docs/api-usage/usage_examples.md +++ b/docs/api-usage/usage_examples.md @@ -14,7 +14,7 @@ curl https://api.rdfshape.weso.es/api/schema/validate -G --data-urlencode 'data= ## Validate (POST with file input) ``` -curl -X POST -F 'data=@data.ttl' -F 'schema=@data.shex' -F 'shapeMap=@data.shapeMap' -F 'triggerMode=ShapeMap' -F 'engine=ShEx' https://api.rdfshape.weso.es/api/schema/validate +curl -X POST -F 'data=@data.ttl' -F 'schema=@data.shex' -F 'shapeMap=@data.shapeMap' -F 'triggerMode=shapeMap' -F 'engine=ShEx' https://api.rdfshape.weso.es/api/schema/validate ``` ## Wikidata examples @@ -37,9 +37,9 @@ the [Cytoscape component](https://github.com/plotly/react-cytoscapejs). ``` curl -k -i -X POST -H "Content-type:multipart/form-data" 'https://api.rdfshape.weso.es/api/data/convert' \ - --form-string 'dataUrl=http://tb.plazi.org/GgServer/rdf/9D767B515A0BFFC3C0F7919FF301FC8D' \ - --form-string 'dataFormatUrl=rdf/xml' --form-string 'targetDataFormat=JSON' \ + --form-string 'data=http://tb.plazi.org/GgServer/rdf/9D767B515A0BFFC3C0F7919FF301FC8D' \ + --form-string 'dataFormat=rdf/xml' --form-string 'dataSource=byUrl' --form-string 'targetDataFormat=JSON' \ ``` Notice that as we are querying the @API_URL@ service, which requires -a SSL connection, we use option `-k`. \ No newline at end of file +an SSL connection, we use option `-k`. \ No newline at end of file diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala index 73c6838c..de9956ee 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala @@ -8,7 +8,7 @@ import es.weso.rdfshape.server.api.format.dataFormats.schemaFormats.{ } import es.weso.rdfshape.server.api.format.dataFormats.{ DataFormat, - RDFFormat, + RdfFormat, ShapeMapFormat } import es.weso.rdfshape.server.api.routes.data.logic.DataSource @@ -19,13 +19,14 @@ import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMapSource import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMapSource.ShapeMapSource import es.weso.schema.{Schema, Schemas, ShapeMapTrigger} import es.weso.shapemaps.ShapeMap +import es.weso.utils.FileUtils /** Application-wide defaults */ case object ApiDefaults { val availableDataFormats: List[DataFormat] = DataFormat.availableFormats val defaultDataFormat: DataFormat = DataFormat.defaultFormat - val defaultRdfFormat: RDFFormat = RDFFormat.defaultFormat + val defaultRdfFormat: RdfFormat = RdfFormat.defaultFormat val availableSchemaFormats: List[SchemaFormat] = SchemaFormat.availableFormats val defaultSchemaFormat: SchemaFormat = ShaclFormat.defaultFormat val defaultSchemaFormatName: String = defaultSchemaFormat.name @@ -52,5 +53,6 @@ case object ApiDefaults { val defaultActiveShapeMapTab = "#shapeMapTextArea" val defaultShapeLabel: IRI = IRI("Shape") val relativeBase: Some[IRI] = Some(IRI("internal://base/")) + def localBase: IRI = IRI(FileUtils.currentFolderURL) } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala index a080551d..4b7d63be 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/Format.scala @@ -86,7 +86,7 @@ trait FormatCompanion[F <: Format] extends LazyLogging { * @return Optionally, a new Format instance of type F with the format */ def fromRequestParams( - parameter: String, + parameter: String = "format", parameterMap: PartsMap ): IO[Option[F]] = { for { diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/DataFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/DataFormat.scala index ca4baff5..52e38947 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/DataFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/DataFormat.scala @@ -4,7 +4,7 @@ import es.weso.rdfshape.server.api.format._ import es.weso.rdfshape.server.api.format.dataFormats.schemaFormats.SchemaFormat import org.http4s.MediaType -/** Extension of the Format interface to represent RDF data formats +/** Extension of the Format interface to represent generic data formats (RDF, schema, shapeMaps...) */ class DataFormat(formatName: String, formatMimeType: MediaType) extends Format { override val name: String = formatName @@ -16,7 +16,7 @@ class DataFormat(formatName: String, formatMimeType: MediaType) extends Format { object DataFormat extends FormatCompanion[DataFormat] { override lazy val availableFormats: List[DataFormat] = - (RDFFormat.availableFormats ++ + (RdfFormat.availableFormats ++ SchemaFormat.availableFormats ++ HtmlFormat.availableFormats ++ GraphicFormat.availableFormats ++ diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/GraphicFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/GraphicFormat.scala index 4f64c0d9..06f3dbbe 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/GraphicFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/GraphicFormat.scala @@ -1,6 +1,6 @@ package es.weso.rdfshape.server.api.format.dataFormats -import es.weso.rdfshape.server.api.format.FormatCompanion +import es.weso.rdfshape.server.api.format.{Format, FormatCompanion} import org.http4s.MediaType /** Dummy class to differentiate formats used for graphical representations from the more generic DataFormat @@ -8,18 +8,20 @@ import org.http4s.MediaType * @see {@link DataFormat} */ sealed class GraphicFormat(formatName: String, formatMimeType: MediaType) - extends DataFormat(formatName, formatMimeType) {} + extends DataFormat(formatName, formatMimeType) { + def this(format: Format) = { + this(format.name, format.mimeType) + } +} /** Companion object with all RDFFormat static utilities */ object GraphicFormat extends FormatCompanion[GraphicFormat] { override lazy val availableFormats: List[GraphicFormat] = - List( - Svg, - Png - ) + List(Svg, Png, PS) override val defaultFormat: GraphicFormat = Svg + } /** Represents the mime-type "image/svg+xml" @@ -37,3 +39,11 @@ case object Png formatName = "PNG", formatMimeType = MediaType.image.png ) + +/** Represents the mime-type "application/ps" + */ +case object PS + extends GraphicFormat( + formatName = "PS", + formatMimeType = new MediaType("application", "ps") + ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/RdfFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/RdfFormat.scala index 6677a840..80ac22f5 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/RdfFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/RdfFormat.scala @@ -7,14 +7,14 @@ import org.http4s.MediaType * * @see {@link DataFormat} */ -sealed class RDFFormat(formatName: String, formatMimeType: MediaType) - extends DataFormat(formatName, formatMimeType) {} +sealed class RdfFormat(formatName: String, formatMimeType: MediaType) + extends DataFormat(formatName, formatMimeType) /** Companion object with all RDFFormat static utilities */ -object RDFFormat extends FormatCompanion[RDFFormat] { +object RdfFormat extends FormatCompanion[RdfFormat] { - override lazy val availableFormats: List[RDFFormat] = + override lazy val availableFormats: List[RdfFormat] = List( Turtle, NTriples, @@ -24,13 +24,13 @@ object RDFFormat extends FormatCompanion[RDFFormat] { RdfXml, RdfJson ) - override val defaultFormat: RDFFormat = Turtle + override val defaultFormat: RdfFormat = Turtle } /** Represents the mime-type "text/turtle" */ case object Turtle - extends RDFFormat( + extends RdfFormat( formatName = "Turtle", formatMimeType = new MediaType("text", "turtle") ) @@ -38,7 +38,7 @@ case object Turtle /** Represents the mime-type "application/n-triples" */ case object NTriples - extends RDFFormat( + extends RdfFormat( formatName = "N-Triples", formatMimeType = new MediaType("application", "n-triples") ) @@ -46,7 +46,7 @@ case object NTriples /** Represents the mime-type "application/n-quads" */ case object NQuads - extends RDFFormat( + extends RdfFormat( formatName = "N-Quads", formatMimeType = new MediaType("application", "n-quads") ) @@ -54,7 +54,7 @@ case object NQuads /** Represents the mime-type "application/trig" */ case object Trig - extends RDFFormat( + extends RdfFormat( formatName = "TriG", formatMimeType = new MediaType("application", "trig") ) @@ -62,7 +62,7 @@ case object Trig /** Represents the mime-type "application/ld+json" */ case object JsonLd - extends RDFFormat( + extends RdfFormat( formatName = "JSON-LD", formatMimeType = new MediaType("application", "ld+json") ) @@ -70,7 +70,7 @@ case object JsonLd /** Represents the mime-type "application/rdf+xml" */ case object RdfXml - extends RDFFormat( + extends RdfFormat( formatName = "RDF/XML", formatMimeType = new MediaType("application", "rdf+xml") ) @@ -78,7 +78,7 @@ case object RdfXml /** Represents the mime-type "application/json" */ case object RdfJson - extends RDFFormat( + extends RdfFormat( formatName = "RDF/JSON", formatMimeType = MediaType.application.json ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/schemaFormats/SchemaFormat.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/schemaFormats/SchemaFormat.scala index b9bce5e8..1af48658 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/schemaFormats/SchemaFormat.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/format/dataFormats/schemaFormats/SchemaFormat.scala @@ -5,6 +5,7 @@ import es.weso.rdfshape.server.api.format.{Format, FormatCompanion} import org.http4s.MediaType /** Dummy class to differentiate shapemap formats from the more generic DataFormat + * * @see {@link DataFormat} */ class SchemaFormat(formatName: String, formatMimeType: MediaType) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataConvert.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataConvert.scala index 2b8c8807..e43cba68 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataConvert.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataConvert.scala @@ -5,11 +5,15 @@ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.NONE import es.weso.rdf.jena.RDFAsJenaModel import es.weso.rdf.sgraph.{RDF2SGraph, RDFDotPreferences} -import es.weso.rdfshape.server.api.format.dataFormats.DataFormat +import es.weso.rdfshape.server.api.format.dataFormats.{ + DataFormat, + Dot, + GraphicFormat, + RdfFormat, + Json => JsonFormat +} import es.weso.rdfshape.server.api.routes.data.logic.operations.DataConvert.successMessage import es.weso.rdfshape.server.api.routes.data.logic.types.{Data, DataSingle} -import es.weso.utils.IOUtils.either2io -import guru.nidi.graphviz.engine.Format import io.circe.syntax.EncoderOps import io.circe.{Encoder, Json} @@ -31,24 +35,11 @@ final case class DataConvert private ( */ private[api] object DataConvert extends LazyLogging { - /** List of graph format names - */ - private lazy val availableGraphFormatNames: immutable.Seq[String] = - availableGraphFormats.map(_.name) - /** List of available RDF format names (uppercase) */ private lazy val rdfDataFormatNames: immutable.Seq[String] = RDFAsJenaModel.availableFormats.map(_.toUpperCase) - /** List of available graph formats - */ - private lazy val availableGraphFormats = List( - GraphFormat("SVG", "application/svg", Format.SVG), - GraphFormat("PNG", "application/png", Format.PNG), - GraphFormat("PS", "application/ps", Format.PS) - ) - private val successMessage = "Conversion successful" /** Convert a [[DataConvert]] to its JSON representation @@ -71,7 +62,7 @@ private[api] object DataConvert extends LazyLogging { * * @param inputData Input conversion data * @param targetFormat Target format - * @return A new [[Data]] instance + * @return A new [[DataConvert]] instance with the conversion information */ def dataConvert( inputData: Data, @@ -91,51 +82,49 @@ private[api] object DataConvert extends LazyLogging { conversionResult <- rdf.use(rdfReasoner => { for { sgraph <- RDF2SGraph.rdf2sgraph(rdfReasoner) - convertedData <- targetFormat.name.toUpperCase match { + convertedData <- targetFormat match { // JSON: convert to JSON String and return a DataSingle with it - case "JSON" => + case JsonFormat => IO { DataSingle( dataPre = Option(sgraph.toJson.spaces2), - dataFormat = targetFormat, + dataFormat = JsonFormat, inference = targetInference, dataSource = inputData.dataSource ) } - case "DOT" => + case Dot => IO { DataSingle( dataPre = Option(sgraph.toDot(RDFDotPreferences.defaultRDFPrefs)), - dataFormat = targetFormat, + dataFormat = Dot, inference = targetInference, dataSource = inputData.dataSource ) } - case tFormat if rdfDataFormatNames.contains(tFormat) => - for { - data <- rdfReasoner.serialize(tFormat) - } yield DataSingle( - dataPre = Option(data), - dataFormat = targetFormat, - inference = targetInference, - dataSource = inputData.dataSource - ) - case tFormat if availableGraphFormatNames.contains(tFormat) => - for { - eitherFormat <- either2io(getTargetFormat(tFormat)) - dotStr = sgraph.toDot(RDFDotPreferences.defaultRDFPrefs) - data <- eitherFormat.fold( - err => IO.raiseError(new RuntimeException(err)), - _ => IO(dotStr) + case _ if RdfFormat.availableFormats.contains(targetFormat) => + rdfReasoner + .serialize(targetFormat.name) + .map(data => { + DataSingle( + dataPre = Option(data), + dataFormat = targetFormat, + inference = targetInference, + dataSource = inputData.dataSource + ) + }) + case _ if GraphicFormat.availableFormats.contains(targetFormat) => + IO { + DataSingle( + dataPre = + Option(sgraph.toDot(RDFDotPreferences.defaultRDFPrefs)), + dataFormat = targetFormat, + inference = targetInference, + dataSource = inputData.dataSource ) - } yield DataSingle( - dataPre = Option(data), - dataFormat = targetFormat, - inference = targetInference, - dataSource = inputData.dataSource - ) + } case t => IO.raiseError(new RuntimeException(s"Unsupported format: $t")) } @@ -143,14 +132,4 @@ private[api] object DataConvert extends LazyLogging { }) } yield conversionResult } - - private def getTargetFormat(str: String): Either[String, Format] = - str.toUpperCase match { - case "SVG" => Right(Format.SVG) - case "PNG" => Right(Format.PNG) - case "PS" => Right(Format.PS) - case _ => Left(s"Unsupported format $str") - } - - private case class GraphFormat(name: String, mime: String, fmt: Format) } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataEndpoint.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataEndpoint.scala index 7519a04b..ebeffe96 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataEndpoint.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataEndpoint.scala @@ -122,7 +122,7 @@ private[api] object DataEndpoint extends DataCompanion[DataEndpoint] { case Some(endpoint) => endpoint match { case endpointRegex(endpoint) => Some(endpoint) - case _ => None + case rawEndpoint => Some(rawEndpoint) } } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala index 5a831b47..7a8b3d32 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala @@ -6,7 +6,7 @@ import es.weso.rdf.jena._ import es.weso.rdf.nodes.IRI import es.weso.rdf.{InferenceEngine, NONE} import es.weso.rdfshape.server.api.definitions.ApiDefaults -import es.weso.rdfshape.server.api.format.dataFormats.{DataFormat, RDFFormat} +import es.weso.rdfshape.server.api.format.dataFormats.{DataFormat, RdfFormat} import es.weso.rdfshape.server.api.routes.data.logic.DataSource import es.weso.rdfshape.server.api.routes.data.logic.DataSource.DataSource import es.weso.rdfshape.server.api.routes.data.logic.aux.InferenceCodecs._ @@ -39,8 +39,8 @@ sealed case class DataSingle( * @return Either an error creating the raw data or a String containing the final text */ override lazy val rawData: Either[String, String] = - dataPre match { - case None => Left("Could not build the RDF from empty data") + dataPre.map(_.trim) match { + case None | Some("") => Left("Could not build the RDF from empty data") case Some(userData) => dataSource match { case DataSource.TEXT | // Raw text input by user @@ -153,6 +153,7 @@ private[api] object DataSingle DataFormatParameter.name, partsMap ) + paramInference <- partsMap.optPartValue(InferenceParameter.name) paramDataSource <- partsMap.optPartValue(DataSourceParameter.name) @@ -191,7 +192,7 @@ private[api] object DataSingle dataFormat <- cursor .downField("dataFormat") - .as[RDFFormat] + .as[RdfFormat] dataInference <- cursor diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala index a3cfef20..5263c9cc 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala @@ -51,7 +51,7 @@ class DataService(client: Client[IO]) /** Returns a JSON array with the accepted input or output RDF data formats */ case GET -> Root / `api` / `verb` / "formats" / "input" => - val formats = RDFFormat.availableFormats ++ HtmlFormat.availableFormats + val formats = RdfFormat.availableFormats ++ HtmlFormat.availableFormats val formatNames = formats.map(_.name) val json = Json.fromValues(formatNames.map(Json.fromString)) Ok(json) @@ -59,7 +59,7 @@ class DataService(client: Client[IO]) /** Returns a JSON array with the available output RDF data formats */ case GET -> Root / `api` / `verb` / "formats" / "output" => - val formatNames = RDFFormat.availableFormats.map(_.name) + val formatNames = RdfFormat.availableFormats.map(_.name) val json = Json.fromValues(formatNames.map(Json.fromString)) Ok(json) @@ -102,9 +102,10 @@ class DataService(client: Client[IO]) /** Obtain information about an RDF source. * Receives a JSON object with the input RDF information: * - data [String]: RDF data (raw, URL containing the data or File with the data) + * - dataFormat [String]: Format of the input RDF data * - dataSource [String]: Identifies the source of the data (raw, URL, file...) so that the server knows how to handle it * - dataFormat [String]: Format of the RDF data - * - inference [String]: Inference to be applied + * - inference [String]: Inference to be applied to the data * Returns a JSON object with the operation results. See [[DataInfo.encodeDataInfoOperation]] */ case req @ POST -> Root / `api` / `verb` / "info" => @@ -141,10 +142,14 @@ class DataService(client: Client[IO]) /** Convert an RDF source into another format/syntax. * Receives a JSON object with the input RDF information: * - data [String]: RDF data (raw, URL containing the data or File with the data) + * - dataFormat [String]: Format of the input RDF data * - dataSource [String]: Identifies the source of the data (raw, URL, file...) so that the server knows how to handle it * - targetDataFormat [String]: Format of the RDF data * - inference [String]: Inference to be applied * Returns a JSON object with the operation results. See [[DataConvert.encodeDataConversionOperation]]. + * @note The "convert" endpoint is invoked for data visualizations too, + * since these are just conversions to JSON, DOT, etc. later + * interpreted by the web client */ case req @ POST -> Root / `api` / `verb` / "convert" => req.decode[Multipart[IO]] { m => @@ -160,7 +165,6 @@ class DataService(client: Client[IO]) optTargetFormat = for { targetFormatStr <- optTargetFormatStr - // Standard data format or graphical format targetFormat <- DataFormat .fromString(targetFormatStr) .toOption @@ -203,8 +207,8 @@ class DataService(client: Client[IO]) /** Perform a SPARQL query on RDF data. * Receives a JSON object with the input RDF and query information: * - data [String]: RDF data (raw, URL containing the data or File with the data) + * - dataFormat [String]: Format of the input RDF data * - dataSource [String]: Identifies the source of the data (raw, URL, file...) so that the server knows how to handle it - * - dataFormat [String]: Format of the RDF data * - inference [String]: Inference to be applied * * - query [String]: SPARQL query data (raw, URL containing the data or File with the query) @@ -258,8 +262,8 @@ class DataService(client: Client[IO]) /** Attempt to extract a schema from an RDF source. * Receives a JSON object with the input RDF information: * - data [String]: RDF data (raw, URL containing the data or File with the data) + * - dataFormat [String]: Format of the input RDF data * - dataSource [String]: Identifies the source of the data (raw, URL, file...) so that the server knows how to handle it - * - dataFormat [String]: Format of the RDF data * - inference [String]: Inference to be applied * - nodeSelector [String]: Node selector to use * Returns a JSON object with the extraction information (see [[DataExtract.encodeDataExtractOperation]] diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/query/SparqlQuery.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/query/SparqlQuery.scala index 9ad0b838..23ec05c4 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/query/SparqlQuery.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/query/SparqlQuery.scala @@ -31,8 +31,8 @@ sealed case class SparqlQuery private ( * @return Either an error building the query text or a String containing the final text of the SPARQL query */ lazy val rawQuery: Either[String, String] = - queryPre match { - case None => Left("Could not build the query from empty data") + queryPre.map(_.trim) match { + case None | Some("") => Left("Could not build the query from empty data") case Some(userQuery) => querySource match { diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/fetch/service/FetchService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/fetch/service/FetchService.scala index daddde51..3f830901 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/fetch/service/FetchService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/fetch/service/FetchService.scala @@ -6,12 +6,10 @@ import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.routes.ApiService import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters.UrlParameter import es.weso.rdfshape.server.utils.json.JsonUtils.errorResponseJson +import es.weso.rdfshape.server.utils.networking.NetworkingUtils.getUrlContents import org.http4s._ import org.http4s.client.Client import org.http4s.dsl.Http4sDsl -import scalaj.http.Http - -import scala.util.{Failure, Success, Try} class FetchService() extends Http4sDsl[IO] with ApiService with LazyLogging { @@ -28,20 +26,9 @@ class FetchService() extends Http4sDsl[IO] with ApiService with LazyLogging { */ case GET -> Root / `api` / `verb` :? UrlParameter(url) => - Try { - Http(url).asString - } match { - case Success(res) if res.isSuccess => Ok(res.body) - case Success(res) => - errorResponseJson( - s"Could not fetch URL: status ${res.code}", - InternalServerError - ) - case Failure(exc) => - errorResponseJson( - s"Could not fetch URL: ${exc.getMessage}", - InternalServerError - ) + getUrlContents(url) match { + case Left(err) => errorResponseJson(err, InternalServerError) + case Right(content) => Ok(content) } } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/aux/SchemaOperations.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/aux/SchemaOperations.scala deleted file mode 100644 index 6a2eae66..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/aux/SchemaOperations.scala +++ /dev/null @@ -1,102 +0,0 @@ -package es.weso.rdfshape.server.api.routes.schema.logic.aux - -import cats.effect.IO -import com.typesafe.scalalogging.LazyLogging -import es.weso.rdf.{InferenceEngine, RDFReasoner} -import es.weso.rdfshape.server.api.definitions.UmlDefinitions.umlOptions -import es.weso.rdfshape.server.api.routes.schema.service.SchemaService -import es.weso.schema.Schema -import es.weso.uml.Schema2UML -import io.circe.Json - -/** Static utilities used by [[SchemaService]] - * to operate on schemas - */ -private[api] object SchemaOperations extends LazyLogging { - - /** Long value used as a "no time" value for errored validations - */ - private val NoTime = 0L - - /** @param schema Input schema - * @return JSON representation of the schema as a Cytoscape graph to be drawn on clients (or an error message) - */ - // TODO: return another status code on failure, so that clients can handle it - def schemaCytoscape(schema: Schema): Json = { - val eitherJson = for { - pair <- Schema2UML.schema2UML(schema) - } yield { - val (uml, _) = pair - uml.toJson - } - eitherJson.fold( - e => - Json.fromFields( - List( - ("error", Json.fromString(s"Error converting to schema 2 JSON: $e")) - ) - ), - identity - ) - } - - /** @param schema Input schema - * @return JSON representation of the schema as a Graphviz graph to be drawn on clients (or an error message) - */ - // TODO: return another status code on failure, so that clients can handle it - def schemaVisualize(schema: Schema): IO[Json] = for { - pair <- schema2SVG(schema) - } yield { - val (svg, plantuml) = pair - val info = schema.info - val fields: List[(String, Json)] = - List( - ("schemaType", Json.fromString(info.schemaName)), - ("schemaEngine", Json.fromString(info.schemaEngine)), - ("svg", Json.fromString(svg)), - ("plantUml", Json.fromString(plantuml)) - ) - Json.fromFields(fields) - } - - def schema2SVG(schema: Schema): IO[(String, String)] = { - val eitherUML = Schema2UML.schema2UML(schema) - eitherUML.fold( - err => { - val errMsg = s"Error in SVG conversion: $err" - logger.error(errMsg) - IO.raiseError(new RuntimeException(errMsg)) - // IO.pure((s"SVG conversion: $e", s"Error converting UML: $e")) - }, - pair => { - val (uml, _) = pair - logger.debug(s"UML converted: $uml") - (for { - str <- uml.toSVG(umlOptions) - } yield { - (str, uml.toPlantUML(umlOptions)) - }).handleErrorWith(e => - IO.raiseError( - new RuntimeException(s"SVG conversion error: ${e.getMessage}") - ) - ) - } - ) - } - - /** Apply inference - * - * @param rdf Data over which the inference should be applied - * @param inferenceEngine Inference engine to be applied - * @return The RDF data after applying the inference - * (or the intact data if no inference was provided) - */ - private[schema] def applyInference( - rdf: RDFReasoner, - inferenceEngine: Option[InferenceEngine] - ): IO[RDFReasoner] = inferenceEngine match { - case None => IO.pure(rdf) - case Some(engine) => - rdf.applyInference(engine) - } -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaConvert.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaConvert.scala index 3fc3b9f5..ce59c96f 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaConvert.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaConvert.scala @@ -3,14 +3,28 @@ package es.weso.rdfshape.server.api.routes.schema.logic.operations import cats.effect.IO import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.jena.RDFAsJenaModel +import es.weso.rdfshape.server.api.definitions.ApiDefaults +import es.weso.rdfshape.server.api.definitions.UmlDefinitions.umlOptions import es.weso.rdfshape.server.api.format.dataFormats.schemaFormats.SchemaFormat -import es.weso.rdfshape.server.api.routes.schema.logic.SchemaSource +import es.weso.rdfshape.server.api.format.dataFormats.{ + DataFormat, + GraphicFormat, + Svg, + Json => JsonFormat +} import es.weso.rdfshape.server.api.routes.schema.logic.types.{ Schema, SchemaSimple } -import es.weso.schema.{ShExSchema, ShaclexSchema, Schema => SchemaW} +import es.weso.schema.{ + JenaShacl, + ShExSchema, + ShaclTQ, + ShaclexSchema, + Schema => SchemaW +} import es.weso.shacl.converter.Shacl2ShEx +import es.weso.uml.{Schema2UML, UML} import io.circe.syntax._ import io.circe.{Encoder, Json} @@ -21,8 +35,8 @@ import io.circe.{Encoder, Json} */ final case class SchemaConvert private ( override val inputSchema: Schema, - targetFormat: Option[String], - targetEngine: Option[String], + targetFormat: SchemaFormat, + targetEngine: Option[SchemaW], result: Schema ) extends SchemaOperation(SchemaConvert.successMessage, inputSchema) @@ -38,78 +52,292 @@ private[api] object SchemaConvert extends LazyLogging { List( ("message", Json.fromString(schemaConvert.successMessage)), ("schema", schemaConvert.inputSchema.asJson), - ("result", schemaConvert.result.asJson) + ("result", schemaConvert.result.asJson), + ("targetDataFormat", schemaConvert.targetFormat.asJson) ) ) - /** Perform the actual conversion operation between Schema formats + /** Perform the actual conversion operation between Schema formats. * - * @param schema Input conversion schema - * @param targetFormat Target format - * @param targetEngine Target engine - * @return A new [[Schema]] instance + * @note Firstly, check the conversion target + * (another Schema, a Graphic visualization, etc.). + * Secondly, invoke the corresponding logic for each transformation + * @param inputSchema Input conversion schema + * @param targetFormat Target format + * @param optTargetEngine Target engine (discarded in visualizations) + * @return A new [[SchemaConvert]] instance with the conversion information */ def schemaConvert( - schema: Schema, - targetFormat: SchemaFormat, - targetEngine: Option[SchemaW] - ): IO[Schema] = { + inputSchema: Schema, + targetFormat: DataFormat, + optTargetEngine: Option[SchemaW] + ): IO[SchemaConvert] = { logger.info( - s"Schema conversion target format/engine: ${targetFormat.name}/${targetEngine - .map(_.name)}" + s"""Schema conversion targets: + - Format: ${targetFormat.name} + - Engine: ${optTargetEngine.map(_.name)}""" ) - // Check the schema engine - schema.engine match { - case Some(engine) => - engine match { - // Test that we are using shaclex schemas, specifically ShEx, - // which implements conversion - case shaclex: ShaclexSchema - if shaclex.getClass == classOf[ShExSchema] => - logger.debug("Schema conversion: SHACLEX -> SHEX") - Shacl2ShEx.shacl2ShEx( - schema = shaclex.schema, - nodesPrefixMap = Option(shaclex.pm) - ) match { - case Left(err) => - val msg = s"Error converting schema: $err" - logger.error(msg) - IO.raiseError(new RuntimeException(msg)) - case Right(newSchema) => - // ShapeMap generated here as well, but unneeded - val (schema, _) = newSchema - for { - emptySchemaBuilder <- RDFAsJenaModel.empty - rawString <- emptySchemaBuilder.use(builder => - es.weso.shex.Schema.serialize( - schema, - targetFormat.name, - None, - builder - ) - ) - } yield SchemaSimple( - schemaPre = Option(rawString), + // Check the format nature to see which logic to invoke + // 1) Schema to Schema conversion + if(SchemaFormat.availableFormats.contains(targetFormat)) { + schemaToSchema( + inputSchema, + new SchemaFormat(targetFormat), + optTargetEngine + ) + } + // 2) Schema to visualization + else if(GraphicFormat.availableFormats.contains(targetFormat)) { + schemaToVisualization(inputSchema, new GraphicFormat(targetFormat)) + } + // 3) Other conversions available + else + targetFormat match { + case JsonFormat => + schemaToJson(inputSchema) + case _ => + val msg = + s"Unavailable conversion from ${inputSchema.format.get.name} to ${targetFormat.name}" + logger.error(msg) + IO.raiseError(new RuntimeException(msg)) + } + + } + + /** * + * Auxiliary method for [[schemaConvert]] + * Convert a given Schema to another one, given the target engine and format + * + * @param inputSchema Input schema for conversion + * @param targetFormat Target format + * @param optTargetEngine Target engine + * @return A new [[SchemaConvert]] instance with the conversion information + */ + private def schemaToSchema( + inputSchema: Schema, + targetFormat: SchemaFormat, + optTargetEngine: Option[SchemaW] + ): IO[SchemaConvert] = { + // Get schema engine and target engine. If unavailable, throw errors + if(inputSchema.engine.isEmpty) { + throw new RuntimeException( + "Could not perform conversion, unknown input schema engine" + ) + } + if(optTargetEngine.isEmpty) { + throw new RuntimeException( + "Could not perform conversion, unknown target schema engine" + ) + } + + // Try to extract the inner-library schema from the user given schema + for { + maybeInnerSchema <- inputSchema.getSchema + innerSchema <- maybeInnerSchema match { + case Left(err) => IO.raiseError(new RuntimeException(err)) + case Right(schema) => IO.pure(schema) + } + + // Tuple with the input data and an empty representation of the output + conversionSchemas = ( + innerSchema, + optTargetEngine.get + ) + _ = logger.debug( + s"Schema conversion: ${conversionSchemas._1.name} -> ${conversionSchemas._2.name}" + ) + + result <- conversionSchemas match { + // If SHEX => SHEX or SHACL => SHACL, use the schema methods + case (schemaIn: SchemaW, schemaOut: SchemaW) + if schemaIn.getClass == schemaOut.getClass => + for { + rawOutputSchema <- schemaIn + .convert( + Option(targetFormat.name), + Option(schemaOut.name), + None + ) + outputSchema = SchemaSimple( + schemaPre = Option(rawOutputSchema.trim), + schemaFormat = targetFormat, + schemaEngine = schemaOut, + schemaSource = inputSchema.schemaSource + ) + } yield SchemaConvert( + inputSchema = inputSchema, + targetFormat = targetFormat, + targetEngine = Option(schemaOut), + result = outputSchema + ) + + // SHACL => SHEX, use implemented + case (shaclIn: ShaclexSchema, shexOut: ShExSchema) => + logger.debug("Schema conversion: SHACL(EX) -> SHEX") + for { + shexSchema <- shaclToShex(shaclIn, targetFormat) + outputSchema = shexSchema match { + case (_, schemaStr) => + SchemaSimple( + schemaPre = Option(schemaStr.trim), schemaFormat = targetFormat, - schemaEngine = engine, - schemaSource = SchemaSource.TEXT + schemaEngine = shexOut, + schemaSource = inputSchema.schemaSource ) } + } yield SchemaConvert( + inputSchema = inputSchema, + targetFormat = targetFormat, + targetEngine = outputSchema.engine, + result = outputSchema + ) + // SHEX => SHACL, not implemented + case (_: ShExSchema, ShaclexSchema(_) | JenaShacl(_) | ShaclTQ(_)) => + IO.raiseError( + new RuntimeException("Not implemented: ShEx to SHACL") + ) + case (a, b) => + logger.error(s"${a.getClass.getName}, ${b.getClass.getName}") + IO.raiseError( + new RuntimeException( + "Could not perform conversion: invalid schema inputs" + ) + ) + } + } yield result + } - case _ => - IO.raiseError( - new RuntimeException( - "Could not perform conversion, unknown input schema engine" - ) + /** Auxiliary method with the logic to convert from SHACL schemas to SHEX schemas + * + * @param inputSchema [[ShaclexSchema]] used for the conversion + * @return [[ShExSchema]] resulting of the conversion, along with the raw schema String + */ + private def shaclToShex( + inputSchema: ShaclexSchema, + targetFormat: SchemaFormat + ): IO[(ShExSchema, String)] = { + Shacl2ShEx.shacl2ShEx( + schema = inputSchema.schema, + nodesPrefixMap = Option(inputSchema.pm) + ) match { + case Left(err) => + val msg = s"Error converting schema: $err" + logger.error(msg) + IO.raiseError(new RuntimeException(msg)) + case Right(newSchema) => + // ShapeMap generated here as well, but unneeded + val (schemaW, _) = newSchema + for { + emptySchemaBuilder <- RDFAsJenaModel.empty + targetFormatStr = targetFormat.name + rawString <- emptySchemaBuilder.use(builder => + es.weso.shex.Schema.serialize( + schemaW, + targetFormatStr, + None, + builder + ) + ) + result <- ShExSchema.fromString(rawString, targetFormatStr, None) + } yield (result, rawString) + } + } + + /** * + * Auxiliary method for [[schemaConvert]] + * Convert a given Schema to its JSON representation, for latter use + * individually or with cytoscape + * + * @param inputSchema Input schema for conversion + * @return A new [[SchemaConvert]] instance with the conversion information + */ + private def schemaToJson(inputSchema: Schema): IO[SchemaConvert] = + for { + innerSchema <- inputSchema.getSchema + jsonSchema = innerSchema + .flatMap(s => Schema2UML.schema2UML(s)) + .map(_._1.toJson) + + outputSchema = jsonSchema.map(jsonData => + SchemaSimple( + schemaPre = Option(jsonData.spaces2), + schemaFormat = new SchemaFormat(JsonFormat), + schemaEngine = inputSchema.engine.get, + schemaSource = inputSchema.schemaSource + ) + ) + + conversion <- outputSchema.fold( + err => IO.raiseError(new RuntimeException(err)), + schema => + IO.pure( + SchemaConvert( + inputSchema = inputSchema, + targetFormat = new SchemaFormat(JsonFormat), + targetEngine = None, + result = schema ) + ) + ) + } yield conversion + + /** * + * Auxiliary method for [[schemaConvert]] + * Convert a given Schema to its SVG representation, for latter use + * in clients + * + * @param inputSchema Input schema for conversion + * @return A new [[SchemaConvert]] instance with the conversion information + */ + private def schemaToVisualization( + inputSchema: Schema, + targetFormat: GraphicFormat + ): IO[SchemaConvert] = for { + + maybeInnerSchema <- inputSchema.getSchema + maybeUml: Either[String, (UML, List[String])] = maybeInnerSchema.flatMap( + s => Schema2UML.schema2UML(s) + ) + maybeResult: Either[String, IO[Schema]] = maybeUml.flatMap { + case (uml, _) => + targetFormat match { + case Svg => + Right( + uml + .toSVG(umlOptions) + .map(svg => { + SchemaSimple( + schemaPre = Option(svg.trim), + schemaFormat = new SchemaFormat(Svg), + schemaEngine = inputSchema.engine + .getOrElse(ApiDefaults.defaultSchemaEngine), + schemaSource = inputSchema.schemaSource + ) + }) + ) + + case _ => Left(s"Unsupported visualization: ${targetFormat.name}") } - case None => + } + + conversion <- maybeResult.fold( + err => IO.raiseError(new RuntimeException(err)), + _.map(resultSchema => + SchemaConvert( + inputSchema = inputSchema, + targetFormat = resultSchema.format.get, + targetEngine = None, + result = resultSchema + ) + ).handleErrorWith(err => IO.raiseError( new RuntimeException( - "Could not perform conversion, unspecified input schema engine" + s"Unexpected error during conversion: ${err.getMessage}" ) ) - } - } + ) + ) + + } yield conversion } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaValidate.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaValidate.scala index f22fb60b..af119786 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaValidate.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaValidate.scala @@ -55,7 +55,7 @@ private[api] object SchemaValidate extends LazyLogging { trigger match { // ShEx validation with Shapemap | // SHACL validation with target declarations - case TriggerShapeMap(_) | TriggerTargetDeclarations() => + case _: TriggerShapeMap | _: TriggerTargetDeclarations => for { innerSchema <- schema.getSchema result = innerSchema.flatMap(s => { @@ -89,6 +89,7 @@ private[api] object SchemaValidate extends LazyLogging { ) /** Convert a [[ValidationResult]] to its JSON representation + * * @note Exceptionally uses unsafeRun */ implicit val encodeValidationResult: Encoder[ValidationResult] = @@ -103,7 +104,7 @@ private[api] object SchemaValidate extends LazyLogging { * used for API responses * @note */ - implicit val encodeSchemaValidation: Encoder[SchemaValidate] = + implicit val encodeSchemaValidateOperation: Encoder[SchemaValidate] = (schemaValidate: SchemaValidate) => { // Convert ValidationResult to JSON diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerMode.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerMode.scala index 119ef73d..622a65ba 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerMode.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerMode.scala @@ -2,7 +2,9 @@ package es.weso.rdfshape.server.api.routes.schema.logic.trigger import cats.effect.IO import com.typesafe.scalalogging.LazyLogging +import es.weso.rdfshape.server.api.routes.data.logic.types.Data import es.weso.rdfshape.server.api.routes.schema.logic.trigger.TriggerModeType._ +import es.weso.rdfshape.server.api.routes.schema.logic.types.Schema import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters.TriggerModeParameter import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.schema.ValidationTrigger @@ -16,6 +18,14 @@ trait TriggerMode { */ val triggerModeType: TriggerModeType + /** Optionally, the [[Data]] being validated in the validation using this trigger + */ + val data: Option[Data] + + /** Optionally, the [[Schema]] being used in the validation using this trigger + */ + val schema: Option[Schema] + /** Get the inner [[ValidationTrigger]], which is used internally for schema validations * * @return Either the inner [[ValidationTrigger]] logical model as used by WESO libraries, @@ -53,7 +63,9 @@ object TriggerMode extends TriggerModeCompanion[TriggerMode] { /** General implementation delegating on subclasses */ override def mkTriggerMode( - partsMap: PartsMap + partsMap: PartsMap, + data: Option[Data] = None, + schema: Option[Schema] = None ): IO[Either[String, TriggerMode]] = for { /* 1. Make some checks on the parameters to distinguish between @@ -65,10 +77,11 @@ object TriggerMode extends TriggerModeCompanion[TriggerMode] { case Some(triggerModeStr) => triggerModeStr match { // ShapeMap: ShEx validation - case SHAPEMAP => TriggerShapeMap.mkTriggerMode(partsMap) + case SHAPEMAP => + TriggerShapeMap.mkTriggerMode(partsMap, data, schema) // TargetDecls: SHACL validation case TARGET_DECLARATIONS => - TriggerTargetDeclarations.mkTriggerMode(partsMap) + TriggerTargetDeclarations.mkTriggerMode(partsMap, data, schema) // Invalid value received for trigger mode case _ => IO.pure(Left("Invalid value received for trigger mode")) } @@ -96,7 +109,13 @@ private[schema] trait TriggerModeCompanion[T <: TriggerMode] /** Given a request's parameters, try to extract an instance of [[TriggerMode]] (type [[T]]) from them * * @param partsMap Request's parameters + * @param data Optionally, the [[Data]] being validated in the validation using this trigger + * @param schema Optionally, the [[Schema]] being used in the validation using this trigger * @return Either the [[TriggerMode]] instance or an error message */ - def mkTriggerMode(partsMap: PartsMap): IO[Either[String, T]] + def mkTriggerMode( + partsMap: PartsMap, + data: Option[Data], + schema: Option[Schema] + ): IO[Either[String, T]] } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerShapeMap.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerShapeMap.scala index e3abfdc2..f8fd86ca 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerShapeMap.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerShapeMap.scala @@ -1,8 +1,12 @@ package es.weso.rdfshape.server.api.routes.schema.logic.trigger import cats.effect.IO +import cats.effect.unsafe.implicits.global import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf.PrefixMap +import es.weso.rdfshape.server.api.routes.data.logic.types.Data import es.weso.rdfshape.server.api.routes.schema.logic.trigger.TriggerModeType.TriggerModeType +import es.weso.rdfshape.server.api.routes.schema.logic.types.Schema import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMap import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.schema.{ShapeMapTrigger, ValidationTrigger} @@ -16,7 +20,9 @@ import io.circe.{Decoder, Encoder, HCursor, Json} * @param shapeMap Inner shapemap associated to the [[TriggerShapeMap()]] */ sealed case class TriggerShapeMap private ( - shapeMap: ShapeMap + shapeMap: ShapeMap, + override val data: Option[Data], + override val schema: Option[Schema] ) extends TriggerMode with LazyLogging { @@ -42,15 +48,55 @@ private[api] object TriggerShapeMap * @return Either the trigger mode or an error message */ def mkTriggerMode( - partsMap: PartsMap + partsMap: PartsMap, + data: Option[Data], + schema: Option[Schema] ): IO[Either[String, TriggerShapeMap]] = { + // Get data prefix map, if possible + val dataPrefixMap: Option[PrefixMap] = data + .map(data => + for { + rdf <- data.toRdf() + pm <- rdf.use(_.getPrefixMap) + } yield pm + ) + .map( + _.handleErrorWith(_ => + IO.raiseError( + new RuntimeException("Could not process the data provided") + ) + ) + .unsafeRunSync() + ) + + // Get schema prefix map, if possible + val schemaPrefixMap: Option[PrefixMap] = schema + .map(schema => + for { + schemaModel <- schema.getSchema + pm = schemaModel.map(_.pm).toOption + } yield pm + ) + .flatMap( + _.handleErrorWith(_ => + IO.raiseError( + new RuntimeException("Could not process the schema provided") + ) + ).unsafeRunSync() + ) + + // Form the shapemap and complete the trigger instance for { // Get companion shapemap from params - maybeShapeMap <- ShapeMap.mkShapeMap(partsMap) + maybeShapeMap <- ShapeMap.mkShapeMap( + partsMap, + nodesPrefixMap = dataPrefixMap, + shapesPrefixMap = schemaPrefixMap + ) // Create TriggerMode instance maybeTriggerMode = maybeShapeMap.map(shapeMap => - TriggerShapeMap(shapeMap) + TriggerShapeMap(shapeMap, data, schema) ) } yield maybeTriggerMode @@ -60,13 +106,17 @@ private[api] object TriggerShapeMap (tsm: TriggerShapeMap) => Json.obj( ("type", tsm.triggerModeType.asJson), - ("shapeMap", tsm.shapeMap.asJson) + ("shapeMap", tsm.shapeMap.asJson), + ("data", tsm.data.asJson), + ("schema", tsm.schema.asJson) ) override implicit val decodeTriggerMode: Decoder[TriggerShapeMap] = (cursor: HCursor) => for { shapeMap <- cursor.downField("shapeMap").as[ShapeMap] - decoded = TriggerShapeMap(shapeMap) + data <- cursor.downField("data").as[Option[Data]] + schema <- cursor.downField("schema").as[Option[Schema]] + decoded = TriggerShapeMap(shapeMap, data, schema) } yield decoded } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerTargetDeclarations.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerTargetDeclarations.scala index 6953ae60..b3c0fe02 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerTargetDeclarations.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerTargetDeclarations.scala @@ -2,7 +2,9 @@ package es.weso.rdfshape.server.api.routes.schema.logic.trigger import cats.effect.IO import com.typesafe.scalalogging.LazyLogging +import es.weso.rdfshape.server.api.routes.data.logic.types.Data import es.weso.rdfshape.server.api.routes.schema.logic.trigger.TriggerModeType.TriggerModeType +import es.weso.rdfshape.server.api.routes.schema.logic.types.Schema import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.schema.{TargetDeclarations, ValidationTrigger} import io.circe.syntax.EncoderOps @@ -11,8 +13,10 @@ import io.circe.{Decoder, Encoder, HCursor, Json} /** Data class representing a validation trigger enabled by target declarations, * for SHACL validations. */ -sealed case class TriggerTargetDeclarations private () - extends TriggerMode +sealed case class TriggerTargetDeclarations private ( + override val data: Option[Data], + override val schema: Option[Schema] +) extends TriggerMode with LazyLogging { override val triggerModeType: TriggerModeType = @@ -32,16 +36,25 @@ private[api] object TriggerTargetDeclarations * @return Either the trigger mode or an error message */ def mkTriggerMode( - partsMap: PartsMap + partsMap: PartsMap, + data: Option[Data], + schema: Option[Schema] ): IO[Either[String, TriggerTargetDeclarations]] = - IO.pure(Right(TriggerTargetDeclarations())) + IO.pure(Right(TriggerTargetDeclarations(data, schema))) override implicit val encodeTriggerMode: Encoder[TriggerTargetDeclarations] = (tsm: TriggerTargetDeclarations) => Json.obj( - ("type", tsm.triggerModeType.asJson) + ("type", tsm.triggerModeType.asJson), + ("data", tsm.data.asJson), + ("schema", tsm.schema.asJson) ) override implicit val decodeTriggerMode: Decoder[TriggerTargetDeclarations] = - (_: HCursor) => Decoder.resultInstance.pure(TriggerTargetDeclarations()) + (cursor: HCursor) => + for { + data <- cursor.downField("data").as[Option[Data]] + schema <- cursor.downField("schema").as[Option[Schema]] + decoded = TriggerTargetDeclarations(data, schema) + } yield decoded } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/types/Schema.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/types/Schema.scala index bfedd20a..a891e184 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/types/Schema.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/types/Schema.scala @@ -21,7 +21,8 @@ trait Schema { /** Default URI obtained from current folder */ - lazy val base: Option[IRI] = ApiDefaults.relativeBase + lazy val base: Option[IRI] = Some(ApiDefaults.localBase) + // ApiDefaults.relativeBase /** Source where the schema comes from */ diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/types/SchemaSimple.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/types/SchemaSimple.scala index e64ddf14..6c5f33b6 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/types/SchemaSimple.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/types/SchemaSimple.scala @@ -40,21 +40,22 @@ sealed case class SchemaSimple( * * @return Either an error creating the raw data or a String containing the final schema text */ - override lazy val rawSchema: Either[String, String] = schemaPre match { - case None => Left("Could not build the Schema from empty data") - case Some(userSchema) => - schemaSource match { - case SchemaSource.TEXT | SchemaSource.FILE => Right(userSchema) - case SchemaSource.URL => - getUrlContents(userSchema) - - case other => - val msg = s"Unknown schema source: $other" - logger.warn(msg) - Left(msg) - - } - } + override lazy val rawSchema: Either[String, String] = + schemaPre.map(_.trim) match { + case None | Some("") => Left("Could not build the Schema from empty data") + case Some(userSchema) => + schemaSource match { + case SchemaSource.TEXT | SchemaSource.FILE => Right(userSchema) + case SchemaSource.URL => + getUrlContents(userSchema) + + case other => + val msg = s"Unknown schema source: $other" + logger.warn(msg) + Left(msg) + + } + } // Override and make publicly available the trait properties override val format: Option[SchemaFormat] = Option(schemaFormat) override val engine: Option[SchemaW] = Option(schemaEngine) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala index 7f7071d5..9d5c04be 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala @@ -3,18 +3,30 @@ package es.weso.rdfshape.server.api.routes.schema.service import cats.effect._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api +import es.weso.rdfshape.server.api.format.dataFormats.DataFormat import es.weso.rdfshape.server.api.format.dataFormats.schemaFormats.{ ShExFormat, ShaclFormat } import es.weso.rdfshape.server.api.routes.ApiService -import es.weso.rdfshape.server.api.routes.schema.logic.operations.SchemaInfo -import es.weso.rdfshape.server.api.routes.schema.logic.trigger.TriggerModeType +import es.weso.rdfshape.server.api.routes.data.logic.types.Data +import es.weso.rdfshape.server.api.routes.schema.logic.aux.SchemaAdapter +import es.weso.rdfshape.server.api.routes.schema.logic.operations.{ + SchemaConvert, + SchemaInfo, + SchemaValidate +} +import es.weso.rdfshape.server.api.routes.schema.logic.trigger.{ + TriggerMode, + TriggerModeType, + TriggerShapeMap +} import es.weso.rdfshape.server.api.routes.schema.logic.types.Schema +import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMap import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.utils.json.JsonUtils.errorResponseJson -import es.weso.schema.{Schemas, ShExSchema} +import es.weso.schema.{Schemas, ShExSchema, Result => ValidationResult} import io.circe.Json import io.circe.syntax.EncoderOps import org.http4s._ @@ -103,9 +115,8 @@ class SchemaService(client: Client[IO]) * - schemaSource [String]: Identifies the source of the schema (raw, URL, file...) * - schemaFormat [String]: Format of the schema * - schemaEngine [String]: Engine used to process the schema (ignored for ShEx) - * Returns a JSON object with the operation results. See [[SchemaInfo.encodeSchemaInfoOperation]]. + * Returns a JSON object with the operation results. See [[SchemaInfo.encodeSchemaInfoOperation]]. */ - // TODO: show errors in a friendlier way in the client's UI case req @ POST -> Root / `api` / `verb` / "info" => req.decode[Multipart[IO]] { m => { @@ -129,149 +140,153 @@ class SchemaService(client: Client[IO]) } yield response } + } - /** Convert a given schema to another accepted format. - * Receives a JSON object with the input schema information: - * - schema [String]: Raw schema data - * - schemaUrl [String]: Url containing the schema - * - schemaFile [File Object]: File containing schema - * - schemaFormat [String]: Format of the schema - * - targetSchemaFormat [String]: Desired format after conversion of the schema - * - schemaEngine [String]: Engine used to process the schema - * - activeSchemaTab [String]: Identifies the source of the schema (raw, URL, file...) - * Returns a JSON object with the converted schema information: - * - message [String]: Informational message on success - * - schema [String]: Original input schema - * - schemaFormat [String]: Format of the original schema - * - schemaEngine [String]: Engine of the conversion - * - targetSchemaFormat [String]: Format of the output schema - * - result [String]: Output schema - * - shapeMap [String]: Output shapemap, if any - */ - // case req @ POST -> Root / `api` / `verb` / "convert" => - // req.decode[Multipart[IO]] { m => - // { - // val partsMap = PartsMap(m.parts) - // logger.info(s"POST info partsMap. $partsMap") - // val r: IO[Json] = for { - // schemaPair <- SchemaSimple.mkSchema(partsMap, None) - // (schema, sp) = schemaPair - // - // targetSchemaFormat <- optEither2f( - // sp.targetSchemaFormat, - // SchemaFormat.fromString - // ) - // converted <- convertSchema( - // schema, - // sp.schemaPre, - // sp.schemaFormat, - // sp.schemaEngine.getOrElse(defaultSchemaEngineName), - // targetSchemaFormat, - // sp.targetSchemaEngine - // ) - // } yield { - // converted.toJson - // } - // for { - // e <- r.attempt - // v <- e.fold( - // t => errorResponseJson(t.getMessage, InternalServerError), - // Ok(_) - // ) - // } yield v - // } - // } + /** Convert a given schema to another accepted format (this includes + * graphic formats for visualizations). + * * Receives a JSON object with the input schema information: + * - schema [String]: Schema data (raw, URL containing the schema or File with the schema) + * - schemaSource [String]: Identifies the source of the schema (raw, URL, file...) + * - schemaFormat [String]: Format of the schema + * - schemaEngine [String]: Engine used to process the schema (ignored for ShEx) + * - targetSchemaFormat [String]: Desired format after conversion of the schema + * Returns a JSON object with the operation results. See [[SchemaConvert.encodeSchemaConvertOperation]]. + */ + case req @ POST -> Root / `api` / `verb` / "convert" => + req.decode[Multipart[IO]] { m => + { + val partsMap = PartsMap(m.parts) + for { + // Get the schema from the partsMap + eitherSchema <- Schema.mkSchema(partsMap) + // Get the target schema format + optTargetFormatStr <- partsMap.optPartValue( + TargetSchemaFormatParameter.name + ) + optTargetFormat = for { + targetFormatStr <- optTargetFormatStr + targetFormat <- DataFormat + .fromString(targetFormatStr) + .toOption + } yield targetFormat - /** Convert a given schema to a UML visualization using PlantUML. - * Receives a JSON object with the input schema information: - * - schema [String]: Raw schema data - * - schemaUrl [String]: Url containing the schema - * - schemaFile [File Object]: File containing schema - * - schemaFormat [String]: Format of the schema - * - schemaEngine [String]: Engine used to process the schema - * - activeSchemaTab [String]: Identifies the source of the schema (raw, URL, file...) - * Returns a JSON object with the converted schema information: - * - schemaType [String]: Type of the schema - * - schemaEngine [String]: Engine of the schema - * - svg [String]: Array of the shapes in the schema - * - plantUml [String]: Array of the shapes in the schema - */ - // case req @ POST -> Root / `api` / `verb` / "visualize" => - // req.decode[Multipart[IO]] { m => - // { - // val partsMap = PartsMap(m.parts) - // val r: IO[Json] = for { - // schemaPair <- SchemaSimple.mkSchema(partsMap, None) - // (schema, _) = schemaPair - // v <- schemaVisualize(schema) - // } yield { - // v - // } - // for { - // e <- r.attempt - /* v <- e.fold(t => errorResponseJson(t.getMessage, BadRequest), Ok(_)) */ - // } yield v - // } - // } + // Get the target engine + optTargetEngineStr <- partsMap.optPartValue( + TargetSchemaEngineParameter.name + ) + optTargetEngine = for { + targetEngineStr <- optTargetEngineStr + targetEngine <- SchemaAdapter.schemaEngineFromString( + targetEngineStr + ) + } yield targetEngine - // TODO: test and include in the client - // case req @ POST -> Root / `api` / `verb` / "cytoscape" => - // req.decode[Multipart[IO]] { m => - // { - // val partsMap = PartsMap(m.parts) - // logger.info(s"POST info partsMap. $partsMap") - // val r: IO[Json] = for { - // schemaPair <- SchemaSimple.mkSchema(partsMap, None) - // (schema, _) = schemaPair - // } yield { - // schemaCytoscape(schema) - // } - // for { - // e <- r.attempt - /* v <- e.fold(t => errorResponseJson(t.getMessage, BadRequest), Ok(_)) */ - // } yield v - // } - // } + // Abort if no valid target format, else continue + response <- optTargetFormat match { + case None => + errorResponseJson( + "Empty or invalid target format for conversion", + BadRequest + ) + case Some(targetFormat) => + eitherSchema match { + case Left(err) => errorResponseJson(err, InternalServerError) + case Right(schema) => + SchemaConvert + .schemaConvert( + schema, + targetFormat, + optTargetEngine + ) + .flatMap(result => Ok(result.asJson)) + .handleErrorWith(err => + errorResponseJson(err.getMessage, InternalServerError) + ) + } + } + } yield response + } + } + + /** Validates RDF data against a given schema-shapemap. + * Receives a JSON object with the input data, schema and shapemap information: + * + * - data [String]: RDF data (raw, URL containing the data or File with the data) + * - dataSource [String]: Identifies the source of the data (raw, URL, file...) so that the server knows how to handle it + * - dataFormat [String]: Format of the RDF data + * - inference [String]: Inference to be applied to the data + * + * - schema [String]: Schema data (raw, URL containing the schema or File with the schema) + * - schemaSource [String]: Identifies the source of the schema (raw, URL, file...) + * - schemaFormat [String]: Format of the schema + * - schemaEngine [String]: Engine used to process the schema (ignored for ShEx) + * + * - shapeMap [String]: ShapeMap data (raw, URL containing the shapeMap or File with the shapeMap) + * - shapeMapSource [String]: Identifies the source of the shapeMap (raw, URL, file...) + * - shapeMapFormat [String]: Format of the shapemap + * + * - endpoint [String]: Additional endpoint to serve as a source of data + * - triggerMode [String]: Validation trigger mode + * + * Returns a JSON object with the operation results. See [[SchemaValidate.encodeSchemaValidateOperation]] and [[ValidationResult.toJson()]]. + * + * @note When obtaining the trigger mode from the parameters, + * if the [[TriggerMode]] is shapeMap, the corresponding [[ShapeMap]] + * object will be embedded in the resulting [[TriggerShapeMap]] + */ + case req @ POST -> Root / `api` / `verb` / "validate" => + req.decode[Multipart[IO]] { m => + val partsMap = PartsMap(m.parts) + val response = for { + /* Get the data, schema and trigger-mode from the partsMap. + * If the trigger-mode is "shapeMap", the shapemap will be embedded in + * the trigger object */ + + eitherData <- Data.mkData(partsMap) + + eitherSchema <- Schema.mkSchema(partsMap) + + eitherTriggerMode <- TriggerMode + .mkTriggerMode( + partsMap, + eitherData.toOption, + eitherSchema.toOption + ) + + // Contains either the first error encountered or the validation + eitherValidationData = for { + data <- eitherData + schema <- eitherSchema + trigger <- eitherTriggerMode + } yield (data, schema, trigger) + + response <- eitherValidationData match { + case Left(err) => errorResponseJson(err, InternalServerError) + case Right((data, schema, trigger)) => + SchemaValidate + .schemaValidate(data, schema, trigger) + .flatMap(result => Ok(result.asJson)) + .handleErrorWith(err => + errorResponseJson(err.getMessage, InternalServerError) + ) + } + + } yield response + response.handleErrorWith(err => + err.getMessage match { + case msg: String => + errorResponseJson( + msg, + InternalServerError + ) + case _ => + errorResponseJson( + SchemaServiceError.couldNotValidateData, + InternalServerError + ) + } + ) - // TODO: Enhance API response - /** Validates RDF data against a given schema-shapemap. - * Receives a JSON object with the input data, schema and shapemap information: - * - data [String]: RDF data - * - dataUrl [String]: Url containing the RDF data - * - dataFile [File Object]: File containing RDF data - * - dataFormat [String]: Format of the RDF data - * - inference [String]: Inference to be applied - * - activeDataTab [String]: Identifies the source of the data (raw, URL, file...) - * - endpoint [String]: Additional endpoint to serve as a source of data - * - schema [String]: Raw schema data - * - schemaUrl [String]: Url containing the schema - * - schemaFile [File Object]: File containing the schema - * - schemaFormat [String]: Format of the schema - * - schemaEngine [String]: Engine used to process the schema - * - activeSchemaTab [String]: Identifies the source of the schema (raw, URL, file...) - * - triggerMode [String]: Validation trigger mode - * - shapeMap [String]: Raw shapemap data - * - shapeMapUrl [String]: Url containing the shapemap - * - shapeMapFile [File Object]: File containing the shapemap - * - shapeMapFormat [String]: Format of the shapemap - * - activeShapeMapTab [String]: Identifies the source of the shapemap (raw, URL, file...) - * Returns a JSON object with the converted schema information: - * - valid [Boolean]: Whether the data is at least partially valid or not - * - message [String]: Informational message - * - validationReport [String]: Additional validation information - * - schema [String]: Original input schema - * - nodesPrefixMap [Object]: Key/value structure with the data prefixes - * - shapesPrefixMap [Object]: Key/value structure with the schema prefixes - * - shapeMap [Array]: Array containing the validation results for each node. Each result has: - * - node [String]: Full name of the affected node - * - shape [String]: Full name of the affected shape - * - status [String]: Whether this node conforms this shape - * - appInfo [Object]: Additional information on why the node conforms or not - * - errors [Array]: Array of errors in the validation - */ - /* TODO: redo */ - // case req @ POST -> Root / `api` / `verb` / "validate" => - // req.decode[Multipart[IO]] { m => // { // val partsMap = PartsMap(m.parts) // val r = for { @@ -284,7 +299,7 @@ class SchemaService(client: Client[IO]) // schemaPair <- Schema.mkSchema(partsMap, Some(rdf)) // (schema, _) = schemaPair /* maybeTriggerMode <- TriggerMode.mkTriggerMode(partsMap) */ - // newRdf <- applyInference(rdf, dp.inference) + /* newRdf <- applyInference(rdf, dp.inference) */ // ret <- maybeTriggerMode match { // case Left(err) => // IO.raiseError( @@ -319,9 +334,9 @@ class SchemaService(client: Client[IO]) // ) // } yield res // } - // } } } + } object SchemaService { @@ -334,3 +349,9 @@ object SchemaService { def apply(client: Client[IO]): SchemaService = new SchemaService(client) } + +private object SchemaServiceError extends Enumeration { + type SchemaServiceError = String + val couldNotValidateData: SchemaServiceError = + "Unknown error validating the data provided. Check the inputs." +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala index 2f95a054..dd326d99 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala @@ -2,6 +2,7 @@ package es.weso.rdfshape.server.api.routes.shapemap.logic import cats.effect.IO import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf.PrefixMap import es.weso.rdfshape.server.api.definitions.ApiDefaults import es.weso.rdfshape.server.api.format.dataFormats.ShapeMapFormat import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMapSource.{ @@ -18,12 +19,16 @@ import io.circe.{Decoder, Encoder, HCursor, Json} /** Data class representing a ShapeMap and its current source. * * @note Invalid initial data is accepted, but may cause exceptions when operating with it (like converting to JSON). - * @param shapeMapPre Shapemap contents, as received before being processed depending on the [[source]] - * @param format Shapemap format - * @param source Active source, used to know which source the shapemap comes from + * @param shapeMapPre Shapemap contents, as received before being processed depending on the [[source]] + * @param nodesPrefixMap Prefix mappings of the data referenced in the shapemap + * @param shapesPrefixMap Prefix mappings of the ShEx schema referenced in the shapemap + * @param format Shapemap format + * @param source Active source, used to know which source the shapemap comes from */ sealed case class ShapeMap private ( private val shapeMapPre: Option[String], + private val nodesPrefixMap: PrefixMap = PrefixMap.empty, + private val shapesPrefixMap: PrefixMap = PrefixMap.empty, format: ShapeMapFormat, source: ShapeMapSource ) extends LazyLogging { @@ -49,14 +54,22 @@ sealed case class ShapeMap private ( * @return A ShapeMap instance used by WESO libraries in validation */ lazy val innerShapeMap: Either[String, ShapeMapW] = { - rawShapeMap match { + rawShapeMap.map(_.trim) match { + case None | Some("") => + Left("Cannot extract the ShapeMap from an empty instance") case Some(shapeMapStr) => + println(nodesPrefixMap.pm) ShapeMapW - .fromString(shapeMapStr, format.name) match { + .fromString( + shapeMapStr, + format.name, + base = None, + nodesPrefixMap, + shapesPrefixMap + ) match { case Left(errorList) => Left(errorList.toList.mkString("\n")) case Right(shapeMap) => Right(shapeMap) } - case None => Left("Cannot extract the ShapeMap from an empty instance") } } } @@ -116,11 +129,14 @@ private[api] object ShapeMap extends LazyLogging { * @return Either the shapemap or an error message */ def mkShapeMap( - partsMap: PartsMap + partsMap: PartsMap, + nodesPrefixMap: Option[PrefixMap] = None, + shapesPrefixMap: Option[PrefixMap] = None ): IO[Either[String, ShapeMap]] = { for { // Get data sent in que query paramShapeMap <- partsMap.optPartValue(ShapeMapParameter.name) + paramFormat <- ShapeMapFormat.fromRequestParams( ShapeMapFormatParameter.name, partsMap @@ -137,10 +153,11 @@ private[api] object ShapeMap extends LazyLogging { // Create the shapemap instance shapeMap = ShapeMap( shapeMapPre = paramShapeMap, + nodesPrefixMap = nodesPrefixMap.getOrElse(PrefixMap.empty), + shapesPrefixMap = shapesPrefixMap.getOrElse(PrefixMap.empty), format = paramFormat.getOrElse(ApiDefaults.defaultShapeMapFormat), source = paramSource.getOrElse(defaultShapeMapSource) ) - } yield shapeMap.innerShapeMap.map(_ => shapeMap) } } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseService.scala index bc9b5c42..f20f44a0 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseService.scala @@ -2,6 +2,7 @@ package es.weso.rdfshape.server.api.routes.wikibase.service import cats.effect._ import com.typesafe.scalalogging.LazyLogging +import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.routes.ApiService import es.weso.shapemaps.{Status => _} import org.http4s._ @@ -36,7 +37,7 @@ class WikibaseService(client: Client[IO]) def routes: HttpRoutes[IO] = HttpRoutes.of[IO] { // TODO: uncomment routes and refactor along with wikishape client - case _ => InternalServerError("Pending") + case GET -> Root / `api` / `verb` => InternalServerError("Pending") /** Search for wikidata entities using MediaWiki's API. Search based on entity ID * See https://www.wikidata.org/w/api.php?action=help&modules=wbgetentities diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala index 555ea4a9..bd960f26 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala @@ -11,16 +11,16 @@ object IncomingRequestParameters { // String constants representing each parameter name expected by the server lazy val data = "data" - lazy val compoundData = "compoundData" + lazy val dataCompound = "dataCompound" lazy val dataFormat = "dataFormat" - lazy val targetDataFormat = "targetDataFormat" + lazy val dataTargetFormat = "dataTargetFormat" lazy val schema = "schema" lazy val schemaFormat = "schemaFormat" lazy val schemaEngine = "schemaEngine" - lazy val targetSchemaFormat = "targetSchemaFormat" - lazy val targetSchemaEngine = "targetSchemaEngine" - lazy val inference = "inference" + lazy val targetSchemaFormat = "schemaTargetFormat" + lazy val targetSchemaEngine = "schemaTargetEngine" + lazy val dataInference = "dataInference" lazy val triggerMode = "triggerMode" lazy val shape = "shape" @@ -28,9 +28,8 @@ object IncomingRequestParameters { lazy val node = "node" lazy val nodeSelector = "nodeSelector" - lazy val shapemap = "shapemap" - lazy val shape_map = "shape-map" - lazy val shapemapFormat = "shapemapFormat" + lazy val shapeMap = "shapeMap" + lazy val shapeMapFormat = "shapeMapFormat" lazy val query = "query" @@ -38,7 +37,7 @@ object IncomingRequestParameters { lazy val dataSource = "dataSource" lazy val schemaSource = "schemaSource" - lazy val shapemapSource = "shapemapSource" + lazy val shapeMapSource = "shapeMapSource" lazy val querySource = "querySource" lazy val wdEntity = "wdEntity" @@ -68,8 +67,8 @@ object IncomingRequestParameters { /** Parameter expected to contain a compound of RDF data (URL encoded), formed by 2 or more RDF sources */ object CompoundDataParameter - extends OptionalQueryParamDecoderMatcher[String](compoundData) { - val name: String = compoundData + extends OptionalQueryParamDecoderMatcher[String](dataCompound) { + val name: String = dataCompound } /** Parameter expected to contain an RDF format name, referencing the user's data format @@ -82,8 +81,8 @@ object IncomingRequestParameters { /** Parameter expected to contain an RDF format name, referencing the target format of a conversion */ object TargetDataFormatParameter - extends OptionalQueryParamDecoderMatcher[String](targetDataFormat) { - val name: String = targetDataFormat + extends OptionalQueryParamDecoderMatcher[String](dataTargetFormat) { + val name: String = dataTargetFormat } /** Parameter expected to contain schema contents (URL encoded) @@ -127,8 +126,8 @@ object IncomingRequestParameters { /** Parameter expected to contain the inference applied in data validations */ object InferenceParameter - extends OptionalQueryParamDecoderMatcher[String](inference) { - val name: String = inference + extends OptionalQueryParamDecoderMatcher[String](dataInference) { + val name: String = dataInference } /** Parameter expected to contain the trigger mode present applied in data validations @@ -171,22 +170,15 @@ object IncomingRequestParameters { * The source of the data is therefore specified by [[ShapemapSourceParameter]] */ object ShapeMapParameter - extends OptionalQueryParamDecoderMatcher[String](shapemap) { - val name: String = shapemap - } - - /** Alternative parameter with the same uses as [[ShapeMapParameter]] - */ - object ShapeMapParameterAlt - extends OptionalQueryParamDecoderMatcher[String](shape_map) { - val name: String = shape_map + extends OptionalQueryParamDecoderMatcher[String](shapeMap) { + val name: String = shapeMap } /** Parameter expected to contain a shapemap format name, referencing the user's shapemap format */ object ShapeMapFormatParameter - extends OptionalQueryParamDecoderMatcher[String](shapemapFormat) { - val name: String = shapemapFormat + extends OptionalQueryParamDecoderMatcher[String](shapeMapFormat) { + val name: String = shapeMapFormat } /** Parameter expected to contain SPARQL query data contents (URL encoded) @@ -226,8 +218,8 @@ object IncomingRequestParameters { * in shapemap-related operations */ object ShapemapSourceParameter - extends OptionalQueryParamDecoderMatcher[String](shapemapSource) { - val name: String = shapemapSource + extends OptionalQueryParamDecoderMatcher[String](shapeMapSource) { + val name: String = shapeMapSource } /** Parameter expected to contain a valid identifier of the source of the data sent by the client (currently raw data, a URL or a file) From 098e0ad403becdd089e7cef6c44924854e6193ff Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Thu, 13 Jan 2022 18:19:21 +0100 Subject: [PATCH 29/32] Implemented ShapeMapInfo operation. --- build.sbt | 40 ++++--- .../es/weso/rdfshape/server/Server.scala | 24 ++-- .../api/routes/data/logic/types/Data.scala | 3 +- .../routes/data/logic/types/DataSingle.scala | 2 +- .../logic/operations/SchemaConvert.scala | 2 +- .../schema/logic/operations/SchemaInfo.scala | 2 +- .../api/routes/shapemap/logic/ShapeMap.scala | 2 +- .../logic/operations/ShapeMapInfo.scala | 104 ++++++++++++++++++ .../logic/operations/ShapeMapOperation.scala | 20 ++++ .../shapemap/service/ShapeMapService.scala | 49 ++++----- .../api/utils/parameters/PartsMap.scala | 4 +- 11 files changed, 181 insertions(+), 71 deletions(-) create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/operations/ShapeMapInfo.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/operations/ShapeMapOperation.scala diff --git a/build.sbt b/build.sbt index 84ec7fc4..7af369cf 100644 --- a/build.sbt +++ b/build.sbt @@ -139,9 +139,6 @@ lazy val unidocSettings: Seq[Def.Setting[_]] = Seq( "-private" ) ) - -ThisBuild / githubWorkflowJavaVersions := Seq(JavaCIVersion) -ThisBuild / githubWorkflowScalaVersions := Seq(ScalaCIVersion) // Shared publish settings for all modules. lazy val publishSettings = Seq( organization := "es.weso", @@ -186,6 +183,12 @@ lazy val resolverSettings = Seq( Resolver.sonatypeRepo("snapshots") ) ) +// "sbt-github-actions" plugin settings +val JavaCIVersion = "adopt@1.11" +val ScalaCIVersion = "2.13.6" +ThisBuild / githubWorkflowJavaVersions := Seq(JavaCIVersion) +ThisBuild / githubWorkflowScalaVersions := Seq(ScalaCIVersion) + // Shared settings for the BuildInfo Plugin // See https://github.com/sbt/sbt-buildinfo lazy val buildInfoSettings = Seq( @@ -257,7 +260,6 @@ lazy val server = project http4sBlazeClient, http4sEmberClient, http4sCirce, - scalatags, umlShaclex, shexs, shaclex, @@ -290,26 +292,25 @@ lazy val docs = project ) lazy val MUnitFramework = new TestFramework("munit.Framework") /* DEPENDENCY versions */ -lazy val http4sVersion = "1.0.0-M23" -lazy val catsVersion = "2.6.1" +lazy val http4sVersion = "1.0.0-M30" +lazy val catsVersion = "2.7.0" /* ------------------------------------------------------------------------- */ -lazy val mongodbVersion = "4.3.2" +lazy val mongodbVersion = "4.4.0" lazy val any23Version = "2.4" -lazy val rdf4jVersion = "3.7.3" +lazy val rdf4jVersion = "3.7.4" lazy val graphvizJavaVersion = "0.18.1" -lazy val logbackVersion = "1.2.6" +lazy val logbackVersion = "1.2.10" lazy val loggingVersion = "3.9.4" lazy val groovyVersion = "3.0.8" lazy val munitVersion = "0.7.27" -lazy val munitEffectVersion = "1.0.6" +lazy val munitEffectVersion = "1.0.7" lazy val plantumlVersion = "1.2021.14" lazy val scalajVersion = "2.4.2" -lazy val scalatagsVersion = "0.9.4" // WESO dependencies -lazy val shaclexVersion = "0.1.103-SNAPSHOT" -lazy val shexsVersion = "0.1.97" +lazy val shaclexVersion = "0.1.103ult" +lazy val shexsVersion = "0.1.105" lazy val umlShaclexVersion = "0.0.82" -lazy val wesoUtilsVersion = "0.1.99" +lazy val wesoUtilsVersion = "0.2.2" // Dependency modules lazy val http4sDsl = "org.http4s" %% "http4s-dsl" % http4sVersion lazy val http4sBlazeServer = @@ -326,8 +327,9 @@ lazy val any23_core = "org.apache.any23" % "apache-any23-core" % any23Versio lazy val any23_api = "org.apache.any23" % "apache-any23-api" % any23Version lazy val any23_scraper = "org.apache.any23.plugins" % "apache-any23-html-scraper" % "2.3" -lazy val rdf4j_runtime = "org.eclipse.rdf4j" % "rdf4j-runtime" % rdf4jVersion -lazy val graphvizJava = "guru.nidi" % "graphviz-java" % graphvizJavaVersion +lazy val rdf4j_runtime = "org.eclipse.rdf4j" % "rdf4j-runtime" % rdf4jVersion +lazy val graphvizJava = "guru.nidi" % "graphviz-java" % graphvizJavaVersion +//noinspection SbtDependencyVersionInspection lazy val plantuml = "net.sourceforge.plantuml" % "plantuml" % plantumlVersion lazy val logbackClassic = "ch.qos.logback" % "logback-classic" % logbackVersion lazy val scalaLogging = @@ -336,13 +338,9 @@ lazy val groovy = "org.codehaus.groovy" % "groovy" % groovyVersion lazy val munit = "org.scalameta" %% "munit" % munitVersion lazy val munitEffect = "org.typelevel" %% "munit-cats-effect-3" % munitEffectVersion -lazy val scalaj = "org.scalaj" %% "scalaj-http" % scalajVersion -lazy val scalatags = "com.lihaoyi" %% "scalatags" % scalatagsVersion +lazy val scalaj = "org.scalaj" %% "scalaj-http" % scalajVersion // WESO dependencies lazy val shexs = "es.weso" %% "shexs" % shexsVersion lazy val shaclex = "es.weso" %% "shaclex" % shaclexVersion lazy val umlShaclex = "es.weso" %% "umlshaclex" % umlShaclexVersion lazy val wesoUtils = "es.weso" %% "utilstest" % wesoUtilsVersion -// "sbt-github-actions" plugin settings -val JavaCIVersion = "adopt@1.11" -val ScalaCIVersion = "2.13.6" diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala index 758aad2b..d568f8a6 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala @@ -21,12 +21,11 @@ import org.http4s.blaze.client.BlazeClientBuilder import org.http4s.blaze.server.BlazeServerBuilder import org.http4s.client.Client import org.http4s.implicits.http4sKleisliResponseSyntaxOptionT -import org.http4s.server.middleware.{CORS, CORSConfig, Logger} +import org.http4s.server.middleware.{CORS, CORSPolicy, Logger} import org.http4s.{HttpApp, HttpRoutes} import java.util.concurrent.TimeUnit import javax.net.ssl.SSLContext -import scala.concurrent.ExecutionContext.global import scala.concurrent.duration._ import scala.language.postfixOps import scala.util.{Failure, Success, Try} @@ -91,7 +90,7 @@ private class Server( */ private def stream(sslContext: Option[SSLContext]): Stream[IO, ExitCode] = { for { - client <- BlazeClientBuilder[IO](global) + client <- BlazeClientBuilder[IO] .withRequestTimeout(requestTimeout.minute) .withIdleTimeout(idleTimeout.minute) .stream @@ -112,7 +111,7 @@ private class Server( sslContext: Option[SSLContext] = None ): BlazeServerBuilder[IO] = { - val baseServer = BlazeServerBuilder[IO](global) + val baseServer = BlazeServerBuilder[IO] .bindHttp(port, ip) .withIdleTimeout(idleTimeout.minutes) .withResponseHeaderTimeout(requestTimeout.minute) @@ -167,11 +166,10 @@ object Server { /** Application's CORS configuration */ - private val corsConfiguration = CORSConfig.default - .withAnyOrigin(true) - .withAnyMethod(true) - .withAllowCredentials(true) - .withMaxAge(new FiniteDuration(1, TimeUnit.DAYS)) + private val corsConfiguration: CORSPolicy = + CORS.policy.withAllowOriginAll.withAllowMethodsAll + .withAllowCredentials(true) + .withMaxAge(new FiniteDuration(1, TimeUnit.DAYS)) // Act as a server factory @@ -195,8 +193,8 @@ object Server { /** Configure the http4s application to use the specified sources as API routes */ - private def routesService(client: Client[IO]): HttpRoutes[IO] = - CORS( + private def routesService(client: Client[IO]): HttpRoutes[IO] = { + corsConfiguration.apply( APIService(client).routes <+> DataService(client).routes <+> SchemaService(client).routes <+> @@ -205,7 +203,7 @@ object Server { WikibaseService(client).routes <+> EndpointService(client).routes <+> PermalinkService(client).routes <+> - FetchService(client).routes, - corsConfiguration + FetchService(client).routes ) + } } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/Data.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/Data.scala index 1e1ade25..4272e5c6 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/Data.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/Data.scala @@ -59,12 +59,13 @@ object Data extends DataCompanion[Data] { /** Dummy implementation meant to be overridden * If called on a general [[Data]] instance, pattern match among the available data types to * use the correct implementation + * @note Defaults to [[DataSingle]]'s implementation */ implicit val decodeData: Decoder[Data] = (cursor: HCursor) => { this.getClass match { - case ds if ds == classOf[DataSingle] => DataSingle.decodeData(cursor) case de if de == classOf[DataEndpoint] => DataEndpoint.decodeData(cursor) case dc if dc == classOf[DataCompound] => DataCompound.decodeData(cursor) + case _ => DataSingle.decodeData(cursor) } } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala index 7a8b3d32..85404e5f 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala @@ -196,7 +196,7 @@ private[api] object DataSingle dataInference <- cursor - .downField("inference") + .downField("dataInference") .as[InferenceEngine] dataSource <- cursor diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaConvert.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaConvert.scala index ce59c96f..24e8b4e0 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaConvert.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaConvert.scala @@ -53,7 +53,7 @@ private[api] object SchemaConvert extends LazyLogging { ("message", Json.fromString(schemaConvert.successMessage)), ("schema", schemaConvert.inputSchema.asJson), ("result", schemaConvert.result.asJson), - ("targetDataFormat", schemaConvert.targetFormat.asJson) + ("targetSchemaFormat", schemaConvert.targetFormat.asJson) ) ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaInfo.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaInfo.scala index d06d419a..cb4d0beb 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaInfo.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaInfo.scala @@ -24,7 +24,7 @@ private[api] object SchemaInfo extends LazyLogging { private val successMessage = "Well formed Schema" - /** Given an input data, get information about it + /** Given an input schema, get information about it * * @param schema Input schema instance of any type * @return A [[SchemaInfo]] instance with the information of the input schema diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala index dd326d99..c75bb204 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala @@ -94,7 +94,7 @@ private[api] object ShapeMap extends LazyLogging { Json.obj( ("shapeMap", shapeMap.rawShapeMap.asJson), ("format", shapeMap.format.asJson), - ("inner", shapeMap.innerShapeMap.toOption.map(_.toJson).asJson) + ("model", shapeMap.innerShapeMap.toOption.map(_.toJson).asJson) ) /** Decode JSON into [[ShapeMap]] instances diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/operations/ShapeMapInfo.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/operations/ShapeMapInfo.scala new file mode 100644 index 00000000..baf2f825 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/operations/ShapeMapInfo.scala @@ -0,0 +1,104 @@ +package es.weso.rdfshape.server.api.routes.shapemap.logic.operations + +import cats.effect.IO +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf.PrefixMap +import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMap +import es.weso.rdfshape.server.api.routes.shapemap.logic.operations.ShapeMapInfo.ShapeMapInfoResult +import es.weso.rdfshape.server.utils.json.JsonUtils.prefixMap2JsonArray +import es.weso.shapemaps.{ShapeMap => ShapeMapW} +import io.circe.syntax.EncoderOps +import io.circe.{Encoder, Json} + +/** Data class representing the output of a shapeMap-information operation + * + * @param inputShapeMap ShapeMap used as input of the operation + * @param result [[ShapeMapInfoResult]] containing the resulting schema information + */ +final case class ShapeMapInfo private ( + override val inputShapeMap: ShapeMap, + result: ShapeMapInfoResult +) extends ShapeMapOperation(ShapeMapInfo.successMessage, inputShapeMap) + +private[api] object ShapeMapInfo extends LazyLogging { + + private val successMessage = "Well formed ShapeMap" + + /** Given an input ShapeMap, get information about it + * + * @param shapeMap Input shapeMap instance of any type + * @return A [[ShapeMapInfo]] instance with the information of the input schema + */ + + def shapeMapInfo(shapeMap: ShapeMap): IO[ShapeMapInfo] = { + val inner = shapeMap.innerShapeMap + + inner match { + case Left(err) => IO.raiseError(new RuntimeException(err)) + case Right(shapeMapW) => + IO { + ShapeMapInfo( + inputShapeMap = shapeMap, + result = ShapeMapInfoResult( + shapeMap = shapeMap, + model = shapeMapW, + numberOfAssociations = shapeMapW.associations.length, + nodesPrefixMap = shapeMapW.nodesPrefixMap, + shapesPrefixMap = shapeMapW.shapesPrefixMap + ) + ) + } + } + + } + + /** Case class representing the results to be returned when performing a shapeMap-info operation + * + * @param shapeMap ShapeMap operated on + * @param model The inner model of the associations in the shapeMap in use + * @param numberOfAssociations Number of node-shape associations stated in the shapeMap + * @param nodesPrefixMap Prefix map for the nodes in the shapeMap + * @param shapesPrefixMap Prefix map for the shapes in the shapeMap + */ + final case class ShapeMapInfoResult private ( + shapeMap: ShapeMap, + model: ShapeMapW, + numberOfAssociations: Int, + nodesPrefixMap: PrefixMap, + shapesPrefixMap: PrefixMap + ) + + /** JSON encoder for [[ShapeMapInfoResult]] + */ + private implicit val encodeShapeMapInfoResult: Encoder[ShapeMapInfoResult] = + (shapeMapInfoResult: ShapeMapInfoResult) => + Json.fromFields( + List( + ( + "numberOfAssociations", + shapeMapInfoResult.numberOfAssociations.asJson + ), + ("format", shapeMapInfoResult.shapeMap.format.asJson), + ( + "nodesPrefixMap", + prefixMap2JsonArray(shapeMapInfoResult.nodesPrefixMap) + ), + ( + "shapesPrefixMap", + prefixMap2JsonArray(shapeMapInfoResult.shapesPrefixMap) + ) + ) + ) + + /** JSON encoder for [[ShapeMapInfoResult]] + */ + implicit val encodeShapeMapInfoOperation: Encoder[ShapeMapInfo] = + (shapeMapInfo: ShapeMapInfo) => + Json.fromFields( + List( + ("message", Json.fromString(shapeMapInfo.successMessage)), + ("shapeMap", shapeMapInfo.inputShapeMap.asJson), + ("result", shapeMapInfo.result.asJson) + ) + ) +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/operations/ShapeMapOperation.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/operations/ShapeMapOperation.scala new file mode 100644 index 00000000..2f4b69cd --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/operations/ShapeMapOperation.scala @@ -0,0 +1,20 @@ +package es.weso.rdfshape.server.api.routes.shapemap.logic.operations + +import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMap + +/** General definition of operations that operate on [[ShapeMap]]s + * + * @param successMessage Message attached to the result of the operation + * @param inputShapeMap ShapeMap operated on + */ +private[operations] abstract class ShapeMapOperation( + val successMessage: String = ShapeMapOperation.successMessage, + val inputShapeMap: ShapeMap +) + +private[operations] object ShapeMapOperation { + + /** Dummy success message + */ + private val successMessage = "Operation completed successfully" +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala index 6d061a2a..3cf9e41e 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala @@ -6,7 +6,7 @@ import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.format.dataFormats.ShapeMapFormat import es.weso.rdfshape.server.api.routes.ApiService import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMap -import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMap.mkShapeMap +import es.weso.rdfshape.server.api.routes.shapemap.logic.operations.ShapeMapInfo import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.utils.json.JsonUtils.errorResponseJson import io.circe._ @@ -17,8 +17,6 @@ import org.http4s.client.Client import org.http4s.dsl.Http4sDsl import org.http4s.multipart._ -import scala.util.{Failure, Success, Try} - /** API service to handle shapemap-related operations * * @param client HTTP4S client object @@ -59,34 +57,25 @@ class ShapeMapService(client: Client[IO]) req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) - val maybeShapeMap: IO[Either[String, ShapeMap]] = mkShapeMap(partsMap) - maybeShapeMap.attempt.flatMap( - _.fold( - // General exception - e => errorResponseJson(e.getMessage, InternalServerError), - { - // Error parsing the ShapeMap information sent - case Left(errorStr) => errorResponseJson(errorStr, BadRequest) - // Success parsing the ShapeMap information sent - case Right(shapeMap) => - shapeMap.innerShapeMap match { - // Error creating the inner ShapeMap instance from the data - case Left(errorStr) => - errorResponseJson(errorStr, InternalServerError) - // Success creating the inner ShapeMap instance from the data. - // Try to get JSON representation - case Right(_) => - Try { - shapeMap.asJson - } match { - case Failure(exc) => - errorResponseJson(exc.getMessage, InternalServerError) - case Success(json) => Ok(json) - } - } - } + for { + // Get the schema from the partsMap + eitherShapeMap <- ShapeMap.mkShapeMap( + partsMap + ) + response <- eitherShapeMap.fold( + // If there was an error parsing the schema, return it + err => errorResponseJson(err, InternalServerError), + // Else, try and compute the schema info + shapeMap => + ShapeMapInfo + .shapeMapInfo(shapeMap) + .flatMap(info => Ok(info.asJson)) + .handleErrorWith(err => + errorResponseJson(err.getMessage, InternalServerError) + ) ) - ) + + } yield response } } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/PartsMap.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/PartsMap.scala index 23628fc6..f0ec7640 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/PartsMap.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/PartsMap.scala @@ -4,7 +4,7 @@ import cats.effect.IO import cats.implicits._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdfshape.server.api.format.Format -import fs2.text.utf8Decode +import fs2.text.utf8.decode import org.http4s.multipart.Part /** Data class containing a map of a request's parameters with the form (param name: param content) @@ -51,7 +51,7 @@ case class PartsMap private (map: Map[String, Part[IO]]) { def optPartValue(key: String): IO[Option[String]] = map.get(key) match { case Some(part) => - part.body.through(utf8Decode).compile.foldMonoid.map(Some.apply) + part.body.through(decode).compile.foldMonoid.map(Some.apply) case None => IO.pure(None) } } From 84421166ad83284860bb02535b8da4228e3c8367 Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Thu, 27 Jan 2022 10:17:36 +0100 Subject: [PATCH 30/32] Refactored wikibase service and others --- .../es/weso/rdfshape/server/Server.scala | 2 - .../server/api/definitions/ApiDefaults.scala | 75 +- .../api/definitions/ApiDefinitions.scala | 68 ++ .../data/logic/operations/DataOperation.scala | 4 +- .../routes/data/logic/types/DataSingle.scala | 6 +- .../api/routes/data/service/DataService.scala | 14 +- .../api/routes/endpoint/logic/Outgoing.scala | 62 +- .../endpoint/service/EndpointService.scala | 3 +- .../logic/operations/SchemaOperation.scala | 2 + .../logic/trigger/TriggerModeType.scala | 6 +- .../logic/trigger/TriggerShapeMap.scala | 2 +- .../routes/schema/logic/types/Schema.scala | 4 +- .../routes/schema/service/SchemaService.scala | 66 +- .../logic/operations/ShapeMapOperation.scala | 4 +- .../shapemap/service/ShapeMapService.scala | 6 +- .../api/routes/shex/service/ShExService.scala | 43 - .../wikibase/logic/WikibaseEntity.scala | 66 -- .../wikibase/logic/WikibaseSchema.scala | 134 ---- .../logic/model/wikibase/Wikibase.scala | 51 ++ .../logic/model/wikibase/Wikidata.scala | 53 ++ .../wikibase/objects/WikibaseObject.scala | 10 + .../objects/wikidata/WikidataEntity.scala | 24 + .../objects/wikidata/WikidataObject.scala | 42 + .../objects/wikidata/WikidataProperty.scala | 24 + .../objects/wikidata/WikidataSchema.scala | 24 + .../logic/operations/WikibaseOperation.scala | 129 +++ .../operations/WikibaseOperationDetails.scala | 202 +++++ .../operations/WikibaseOperationFormats.scala | 17 + .../operations/WikibaseOperationResult.scala | 35 + .../operations/get/WikibaseGetLabels.scala | 20 + .../operations/get/WikibaseGetOperation.scala | 87 ++ .../operations/get/WikibasePropTypes.scala | 25 + .../languages/WikibaseLanguages.scala | 112 +++ .../query/WikibaseQueryOperation.scala | 69 ++ .../schema/WikibaseSchemaContent.scala | 61 ++ .../schema/WikibaseSchemaExtract.scala | 125 +++ .../schema/WikibaseSchemaValidate.scala | 138 ++++ .../schema/WikibaseSheXerExtract.scala | 325 ++++++++ .../search/WikibaseSearchEntity.scala | 20 + .../search/WikibaseSearchLexeme.scala | 20 + .../search/WikibaseSearchOperation.scala | 123 +++ .../search/WikibaseSearchProperty.scala | 20 + .../search/WikibaseSearchTypes.scala | 18 + .../wikibase/service/WikibaseService.scala | 744 +++++++----------- .../service/WikibaseServiceUtils.scala | 147 ---- .../IncomingRequestParameters.scala | 32 +- .../api/utils/parameters/PartsMap.scala | 26 +- .../server/api/values/EndpointValue.scala | 7 - .../api/values/WikidataEntityValue.scala | 7 - .../{HTML2RDF.scala => HtmlToRdf.scala} | 16 +- .../rdfshape/server/streams/Streams.scala | 187 ----- .../server/utils/codec/CodecUtils.scala | 14 + .../rdfshape/server/wikibase/Wikibase.scala | 44 -- .../server/html2rdf/HTML2RDFTest.scala | 2 +- 54 files changed, 2325 insertions(+), 1242 deletions(-) delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shex/service/ShExService.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseEntity.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseSchema.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/Wikibase.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/Wikidata.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/objects/WikibaseObject.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/objects/wikidata/WikidataEntity.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/objects/wikidata/WikidataObject.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/objects/wikidata/WikidataProperty.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/objects/wikidata/WikidataSchema.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/WikibaseOperation.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/WikibaseOperationDetails.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/WikibaseOperationFormats.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/WikibaseOperationResult.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/get/WikibaseGetLabels.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/get/WikibaseGetOperation.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/get/WikibasePropTypes.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/languages/WikibaseLanguages.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/query/WikibaseQueryOperation.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/schema/WikibaseSchemaContent.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/schema/WikibaseSchemaExtract.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/schema/WikibaseSchemaValidate.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/schema/WikibaseSheXerExtract.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/search/WikibaseSearchEntity.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/search/WikibaseSearchLexeme.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/search/WikibaseSearchOperation.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/search/WikibaseSearchProperty.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/search/WikibaseSearchTypes.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseServiceUtils.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/values/EndpointValue.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/api/values/WikidataEntityValue.scala rename modules/server/src/main/scala/es/weso/rdfshape/server/html2rdf/{HTML2RDF.scala => HtmlToRdf.scala} (97%) delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/streams/Streams.scala create mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/utils/codec/CodecUtils.scala delete mode 100644 modules/server/src/main/scala/es/weso/rdfshape/server/wikibase/Wikibase.scala diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala index d568f8a6..12ec404c 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/Server.scala @@ -11,7 +11,6 @@ import es.weso.rdfshape.server.api.routes.fetch.service.FetchService import es.weso.rdfshape.server.api.routes.permalink.service.PermalinkService import es.weso.rdfshape.server.api.routes.schema.service.SchemaService import es.weso.rdfshape.server.api.routes.shapemap.service.ShapeMapService -import es.weso.rdfshape.server.api.routes.shex.service.ShExService import es.weso.rdfshape.server.api.routes.wikibase.service.WikibaseService import es.weso.rdfshape.server.utils.error.exceptions.SSLContextCreationException import es.weso.rdfshape.server.utils.error.{ExitCodes, SysUtils} @@ -198,7 +197,6 @@ object Server { APIService(client).routes <+> DataService(client).routes <+> SchemaService(client).routes <+> - ShExService(client).routes <+> ShapeMapService(client).routes <+> WikibaseService(client).routes <+> EndpointService(client).routes <+> diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala index de9956ee..9cb5c87e 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefaults.scala @@ -17,42 +17,63 @@ import es.weso.rdfshape.server.api.routes.schema.logic.SchemaSource import es.weso.rdfshape.server.api.routes.schema.logic.SchemaSource.SchemaSource import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMapSource import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMapSource.ShapeMapSource -import es.weso.schema.{Schema, Schemas, ShapeMapTrigger} +import es.weso.schema.{ + Schemas, + ShapeMapTrigger, + ValidationTrigger, + Schema => SchemaW +} import es.weso.shapemaps.ShapeMap -import es.weso.utils.FileUtils /** Application-wide defaults */ case object ApiDefaults { - val availableDataFormats: List[DataFormat] = DataFormat.availableFormats - val defaultDataFormat: DataFormat = DataFormat.defaultFormat - val defaultRdfFormat: RdfFormat = RdfFormat.defaultFormat - val availableSchemaFormats: List[SchemaFormat] = SchemaFormat.availableFormats - val defaultSchemaFormat: SchemaFormat = ShaclFormat.defaultFormat - val defaultSchemaFormatName: String = defaultSchemaFormat.name - val availableSchemaEngines: List[String] = Schemas.availableSchemaNames - val defaultSchemaEngine: Schema = Schemas.defaultSchema - val defaultSchemaEngineName: String = defaultSchemaEngine.name - val availableTriggerModes: List[String] = Schemas.availableTriggerModes - val defaultTriggerMode: String = ShapeMapTrigger(ShapeMap.empty).name - val availableInferenceEngines = List( - "NONE", - "RDFS", - "OWL" - ) // TODO: Obtain from RDFAsJenaModel.empty.map(_.availableInferenceEngines).unsafeRunSync - val defaultSchemaEmbedded = false + + /** [[DataFormat]] used when the format can be omitted or is needed but none was provided + */ + val defaultDataFormat: DataFormat = DataFormat.defaultFormat + + /** [[RdfFormat]] used when the format can be omitted or is needed but none was provided + */ + val defaultRdfFormat: RdfFormat = RdfFormat.defaultFormat + + /** [[SchemaFormat]] used when the format can be omitted or is needed but none was provided + */ + val defaultSchemaFormat: SchemaFormat = ShaclFormat.defaultFormat + + /** [[ShapeMapFormat]] used when the format can be omitted or is needed but none was provided + */ + val defaultShapeMapFormat: ShapeMapFormat = ShapeMapFormat.defaultFormat + + /** Schema engined ([[SchemaW]]) used when the engine can be omitted or is needed but none was provided + */ + val defaultSchemaEngine: SchemaW = Schemas.defaultSchema + + /** [[ValidationTrigger]] used when the trigger can be omitted or is needed but none was provided + */ + val defaultTriggerMode: ValidationTrigger = ShapeMapTrigger( + ShapeMap.empty + ) + + /** [[InferenceEngine]] used when the engine can be omitted or is needed but none was provided + */ val defaultInferenceEngine: InferenceEngine = NONE - val defaultInferenceEngineName: String = defaultInferenceEngine.name - val defaultDataSource: DataSource = DataSource.defaultDataSource + + /** [[DataSource]] used when the source can be omitted or is needed but none was provided + */ + val defaultDataSource: DataSource = DataSource.defaultDataSource + + /** [[SchemaSource]] used when the source can be omitted or is needed but none was provided + */ val defaultSchemaSource: SchemaSource = SchemaSource.defaultSchemaSource + + /** [[ShapeMapSource]] used when the source can be omitted or is needed but none was provided + */ val defaultShapeMapSource: ShapeMapSource = ShapeMapSource.defaultShapeMapSource - val defaultShapeMapFormat: ShapeMapFormat = ShapeMapFormat.defaultFormat - val availableShapeMapFormats: List[String] = ShapeMap.formats - val defaultActiveShapeMapTab = "#shapeMapTextArea" - val defaultShapeLabel: IRI = IRI("Shape") - val relativeBase: Some[IRI] = Some(IRI("internal://base/")) - def localBase: IRI = IRI(FileUtils.currentFolderURL) + /** [[IRI]] used when the shape label can be omitted or is needed but none was provided + */ + val defaultShapeLabel: IRI = IRI("Shape") } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefinitions.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefinitions.scala index af92658f..2db70fe5 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefinitions.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/definitions/ApiDefinitions.scala @@ -1,10 +1,78 @@ package es.weso.rdfshape.server.api.definitions +import es.weso.rdf.InferenceEngine +import es.weso.rdf.nodes.IRI +import es.weso.rdfshape.server.api.format.dataFormats.schemaFormats.SchemaFormat +import es.weso.rdfshape.server.api.format.dataFormats.{ + DataFormat, + ShapeMapFormat +} +import es.weso.rdfshape.server.api.routes.schema.logic.trigger.{ + TriggerMode, + TriggerModeType +} +import es.weso.schema.{Schemas, Schema => SchemaW} +import es.weso.shapemaps.ShapeMap +import es.weso.utils.FileUtils +import org.http4s.Uri +import org.http4s.implicits.http4sLiteralsSyntax + /** Global definitions used in the API */ case object ApiDefinitions { + lazy val localBase: IRI = IRI(FileUtils.currentFolderURL) + /** API route inside the web server */ val api = "api" + + /** [[List]] of [[DataFormat]]s accepted by the application + */ + val availableDataFormats: List[DataFormat] = DataFormat.availableFormats + + /** [[List]] of [[SchemaFormat]]s accepted by the application + */ + val availableSchemaFormats: List[SchemaFormat] = SchemaFormat.availableFormats + + /** [[List]] of [[SchemaW]]s used by the application + */ + val availableSchemaEngines: List[SchemaW] = Schemas.availableSchemas + + /** [[List]] of [[ShapeMapFormat]]s accepted by the application + */ + val availableShapeMapFormats: List[ShapeMapFormat] = + ShapeMap.formats + .map(f => ShapeMapFormat.fromString(f)) + .filter(_.isRight) + .map(_.getOrElse(ShapeMapFormat.defaultFormat)) + + /** [[List]] of [[TriggerMode]]s accepted by the application, by name + * + * @note Must be coherent with [[es.weso.schema.Schemas.availableTriggerModes]] + */ + val availableTriggerModes: List[String] = + List(TriggerModeType.SHAPEMAP, TriggerModeType.TARGET_DECLARATIONS) + + /** [[List]] of [[InferenceEngine]]s accepted by the application + */ + val availableInferenceEngines: List[InferenceEngine] = + InferenceEngine.availableInferenceEngines + + /** [[IRI]] used as base for nodes created internally + */ + val relativeBase: Some[IRI] = Some(IRI("internal://base/")) + + /** [[Uri]] representation of Wikidata's base URL + */ + val wikidataUri: Uri = uri"https://www.wikidata.org" + + /** [[String]] representation of Wikidata's base URL + */ + val wikidataUrl: String = wikidataUri.renderString + + /** [[Uri]] representation of Wikidata's SPARQL endpoint + */ + val wikidataQueryUri: Uri = uri"https://query.wikidata.org/sparql" + } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataOperation.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataOperation.scala index cac92bfd..f74f9393 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataOperation.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/operations/DataOperation.scala @@ -5,13 +5,15 @@ import es.weso.rdfshape.server.api.routes.data.logic.types.Data /** General definition of operations that operate on Data * * @param successMessage Message attached to the result of the operation - * @param inputData Data operated on + * @param inputData Data operated on */ private[operations] abstract class DataOperation( val successMessage: String = DataOperation.successMessage, val inputData: Data ) +/** Static utils for Data operations + */ private[operations] object DataOperation { /** Dummy success message diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala index 85404e5f..8cbca296 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/logic/types/DataSingle.scala @@ -12,7 +12,7 @@ import es.weso.rdfshape.server.api.routes.data.logic.DataSource.DataSource import es.weso.rdfshape.server.api.routes.data.logic.aux.InferenceCodecs._ import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ import es.weso.rdfshape.server.api.utils.parameters.PartsMap -import es.weso.rdfshape.server.html2rdf.HTML2RDF +import es.weso.rdfshape.server.html2rdf.HtmlToRdf import es.weso.rdfshape.server.utils.networking.NetworkingUtils.getUrlContents import io.circe._ import io.circe.syntax.EncoderOps @@ -94,8 +94,8 @@ sealed case class DataSingle( ): IO[Resource[IO, RDFAsJenaModel]] = { logger.debug(s"RDF from string with format: $format") val formatName = format.name - if(HTML2RDF.availableExtractorNames contains formatName) - IO(HTML2RDF.extractFromString(dataStr, formatName)) + if(HtmlToRdf.availableExtractorNames.contains(formatName)) + IO(HtmlToRdf.extractFromString(dataStr, formatName)) else for { baseIri <- mkBase(base) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala index 5263c9cc..d93e5744 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala @@ -3,11 +3,11 @@ package es.weso.rdfshape.server.api.routes.data.service import cats.effect._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.nodes.IRI -import es.weso.rdfshape.server.api.definitions.ApiDefaults.{ - availableInferenceEngines, - defaultInferenceEngineName +import es.weso.rdfshape.server.api.definitions.ApiDefaults.defaultInferenceEngine +import es.weso.rdfshape.server.api.definitions.ApiDefinitions.{ + api, + availableInferenceEngines } -import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.format.dataFormats._ import es.weso.rdfshape.server.api.format.dataFormats.schemaFormats.ShExC import es.weso.rdfshape.server.api.routes.ApiService @@ -80,14 +80,14 @@ class DataService(client: Client[IO]) */ case GET -> Root / `api` / `verb` / "inferenceEngines" => val inferenceEngines = availableInferenceEngines - val json = Json.fromValues(inferenceEngines.map(Json.fromString)) + val json = + Json.fromValues(inferenceEngines.map(e => Json.fromString(e.name))) Ok(json) /** Returns the default inference engine used as a raw string */ case GET -> Root / `api` / `verb` / "inferenceEngines" / "default" => - val defaultInferenceEngine = defaultInferenceEngineName - Ok(Json.fromString(defaultInferenceEngine)) + Ok(Json.fromString(defaultInferenceEngine.name)) /** Returns a JSON array with the valid data sources that the server will accept when sent via [[DataSourceParameter]] */ diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/Outgoing.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/Outgoing.scala index 805ab604..b214a45a 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/Outgoing.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/Outgoing.scala @@ -8,36 +8,9 @@ import es.weso.rdf.nodes.{IRI, RDFNode} import es.weso.rdf.triples.RDFTriple import es.weso.rdfshape.server.utils.numeric.NumericUtils import es.weso.utils.IOUtils.{ESIO, stream2es} -import io.circe.Json +import io.circe.{Encoder, Json} -case class Outgoing(node: IRI, endpoint: IRI, children: Children) { - def toJson: Json = Json.fromFields( - List( - ("node", Json.fromString(node.toString)), - ("endpoint", Json.fromString(endpoint.toString)), - ( - "children", - Json.fromValues( - children.m.map(pair => - Json.fromFields( - List( - ("pred", Json.fromString(pair._1.toString)), - ( - "values", - Json.fromValues( - pair._2.toList.map(value => - Json.fromString(value.node.toString) - ) - ) - ) - ) - ) - ) - ) - ) - ) - ) -} +case class Outgoing(node: IRI, endpoint: IRI, children: Children) case class Children(m: Map[IRI, Vector[Value]]) @@ -97,4 +70,35 @@ object Outgoing { Outgoing(node, endpoint, Children(ts.foldLeft(zero)(cmb))) } + implicit val encode: Encoder[Outgoing] = (outgoing: Outgoing) => + Json.fromFields( + List( + ( + "node", + Json.fromString(outgoing.node.toString) + ), + ("endpoint", Json.fromString(outgoing.endpoint.toString)), + ( + "children", + Json.fromValues( + outgoing.children.m.map(pair => + Json.fromFields( + List( + ("pred", Json.fromString(pair._1.toString)), + ( + "values", + Json.fromValues( + pair._2.toList.map(value => + Json.fromString(value.node.toString) + ) + ) + ) + ) + ) + ) + ) + ) + ) + ) + } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala index 628754a0..4a1879e1 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala @@ -24,6 +24,7 @@ import es.weso.rdfshape.server.api.utils.parameters.PartsMap import es.weso.rdfshape.server.utils.json.JsonUtils.errorResponseJson import es.weso.utils.IOUtils._ import io.circe.Json +import io.circe.syntax.EncoderOps import org.http4s._ import org.http4s.circe._ import org.http4s.client.Client @@ -147,7 +148,7 @@ class EndpointService(client: Client[IO]) eitherOutgoing <- getOutgoing(optEndpoint, optNode, optLimit).value resp <- eitherOutgoing.fold( (s: String) => errorResponseJson(s"Error: $s", InternalServerError), - (outgoing: Outgoing) => Ok(outgoing.toJson) + (outgoing: Outgoing) => Ok(outgoing.asJson) ) } yield resp diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaOperation.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaOperation.scala index 781114b7..9b2b7516 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaOperation.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaOperation.scala @@ -12,6 +12,8 @@ private[operations] abstract class SchemaOperation( val inputSchema: Schema ) +/** Static utils for Schema operations + */ private[operations] object SchemaOperation { /** Dummy success message diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerModeType.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerModeType.scala index 5265a637..c23ef164 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerModeType.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerModeType.scala @@ -3,11 +3,11 @@ package es.weso.rdfshape.server.api.routes.schema.logic.trigger /** Enumeration of the different possible Validation Triggers sent by the client. * The trigger sent indicates the API how to proceed with validations */ -private[schema] object TriggerModeType extends Enumeration { +private[api] object TriggerModeType extends Enumeration { type TriggerModeType = String - val SHAPEMAP = "shapeMap" - val TARGET_DECLARATIONS = "targetDecls" + val SHAPEMAP = "ShapeMap" + val TARGET_DECLARATIONS = "TargetDecls" val defaultSchemaSource: TriggerModeType = SHAPEMAP } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerShapeMap.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerShapeMap.scala index f8fd86ca..036b21a8 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerShapeMap.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/trigger/TriggerShapeMap.scala @@ -19,7 +19,7 @@ import io.circe.{Decoder, Encoder, HCursor, Json} * * @param shapeMap Inner shapemap associated to the [[TriggerShapeMap()]] */ -sealed case class TriggerShapeMap private ( +sealed case class TriggerShapeMap( shapeMap: ShapeMap, override val data: Option[Data], override val schema: Option[Schema] diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/types/Schema.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/types/Schema.scala index a891e184..e8cfaec7 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/types/Schema.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/types/Schema.scala @@ -3,7 +3,7 @@ package es.weso.rdfshape.server.api.routes.schema.logic.types import cats.effect.IO import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.nodes.IRI -import es.weso.rdfshape.server.api.definitions.ApiDefaults +import es.weso.rdfshape.server.api.definitions.ApiDefinitions import es.weso.rdfshape.server.api.format.dataFormats.schemaFormats.SchemaFormat import es.weso.rdfshape.server.api.routes.schema.logic.SchemaSource.SchemaSource import es.weso.rdfshape.server.api.utils.parameters.PartsMap @@ -21,7 +21,7 @@ trait Schema { /** Default URI obtained from current folder */ - lazy val base: Option[IRI] = Some(ApiDefaults.localBase) + lazy val base: Option[IRI] = Some(ApiDefinitions.localBase) // ApiDefaults.relativeBase /** Source where the schema comes from diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala index 9d5c04be..8c5b0795 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/service/SchemaService.scala @@ -2,6 +2,7 @@ package es.weso.rdfshape.server.api.routes.schema.service import cats.effect._ import com.typesafe.scalalogging.LazyLogging +import es.weso.rdfshape.server.api.definitions.ApiDefinitions import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.format.dataFormats.DataFormat import es.weso.rdfshape.server.api.format.dataFormats.schemaFormats.{ @@ -18,7 +19,6 @@ import es.weso.rdfshape.server.api.routes.schema.logic.operations.{ } import es.weso.rdfshape.server.api.routes.schema.logic.trigger.{ TriggerMode, - TriggerModeType, TriggerShapeMap } import es.weso.rdfshape.server.api.routes.schema.logic.types.Schema @@ -52,9 +52,11 @@ class SchemaService(client: Client[IO]) /** Returns a JSON array with the accepted schema engines for ShEx */ - case GET -> Root / `api` / `verb` / "engines" => - val engineNames = Schemas.availableSchemaNames - val json = Json.fromValues(engineNames.map(Json.fromString)) + case GET -> Root / `api` / `verb` / "engines" / "shex" => + val shexSchemas = List(Schemas.shEx) + val json = Json.fromValues( + shexSchemas.map(_.name).map(str => Json.fromString(str)) + ) Ok(json) /** Returns a JSON array with the accepted schema engines for SHACL @@ -70,8 +72,8 @@ class SchemaService(client: Client[IO]) /** Returns the default schema format as a raw string */ case GET -> Root / `api` / `verb` / "engines" / "default" => - val schemaEngine = Schemas.defaultSchemaName - val json = Json.fromString(schemaEngine) + val schemaEngine = Schemas.defaultSchema + val json = Json.fromString(schemaEngine.name) Ok(json) /** Returns a JSON array with the accepted schema formats. @@ -85,7 +87,7 @@ class SchemaService(client: Client[IO]) optSchemaEngine.getOrElse(Schemas.defaultSchemaName) ) formats = schema match { - case ShExSchema(_) => ShExFormat.availableFormats + case _: ShExSchema => ShExFormat.availableFormats case _ => ShaclFormat.availableFormats } } yield Json.fromValues( @@ -103,7 +105,7 @@ class SchemaService(client: Client[IO]) */ case GET -> Root / `api` / `verb` / "triggerModes" => val json = Json.fromValues( - List(TriggerModeType.SHAPEMAP, TriggerModeType.TARGET_DECLARATIONS).map( + ApiDefinitions.availableTriggerModes.map( Json.fromString ) ) @@ -286,54 +288,6 @@ class SchemaService(client: Client[IO]) ) } ) - - // { - // val partsMap = PartsMap(m.parts) - // val r = for { - // dataPair <- DataSingle.getData(partsMap, relativeBase) - // (resourceRdf, dp) = dataPair - // res <- for { - // emptyRes <- RDFAsJenaModel.empty - // vv <- (resourceRdf, emptyRes).tupled.use { case (rdf, builder) => - // for { - // schemaPair <- Schema.mkSchema(partsMap, Some(rdf)) - // (schema, _) = schemaPair - /* maybeTriggerMode <- TriggerMode.mkTriggerMode(partsMap) */ - /* newRdf <- applyInference(rdf, dp.inference) */ - // ret <- maybeTriggerMode match { - // case Left(err) => - // IO.raiseError( - // new RuntimeException( - // s"Could not obtain validation trigger: $err" - // ) - // ) - // case Right(triggerMode) => - // for { - // r <- io2f( - // schemaValidate( - // newRdf, - // schema, - // triggerMode, - // relativeBase, - // builder - // ) - // ) - // json <- io2f(schemaResult2json(r._1)) - // } yield json - // } - // } yield ret - // } - // } yield vv - // } yield res - // - // for { - // e <- r.attempt - // res <- e.fold( - // exc => errorResponseJson(exc.getMessage, BadRequest), - // json => Ok(json) - // ) - // } yield res - // } } } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/operations/ShapeMapOperation.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/operations/ShapeMapOperation.scala index 2f4b69cd..cd096541 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/operations/ShapeMapOperation.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/operations/ShapeMapOperation.scala @@ -5,13 +5,15 @@ import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMap /** General definition of operations that operate on [[ShapeMap]]s * * @param successMessage Message attached to the result of the operation - * @param inputShapeMap ShapeMap operated on + * @param inputShapeMap ShapeMap operated on */ private[operations] abstract class ShapeMapOperation( val successMessage: String = ShapeMapOperation.successMessage, val inputShapeMap: ShapeMap ) +/** Static utils for ShapeMap operations + */ private[operations] object ShapeMapOperation { /** Dummy success message diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala index 3cf9e41e..a910844f 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/service/ShapeMapService.scala @@ -2,8 +2,8 @@ package es.weso.rdfshape.server.api.routes.shapemap.service import cats.effect._ import com.typesafe.scalalogging.LazyLogging +import es.weso.rdfshape.server.api.definitions.ApiDefinitions import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api -import es.weso.rdfshape.server.api.format.dataFormats.ShapeMapFormat import es.weso.rdfshape.server.api.routes.ApiService import es.weso.rdfshape.server.api.routes.shapemap.logic.ShapeMap import es.weso.rdfshape.server.api.routes.shapemap.logic.operations.ShapeMapInfo @@ -35,8 +35,8 @@ class ShapeMapService(client: Client[IO]) /** Returns a JSON array with the accepted shapeMap formats. */ case GET -> Root / `api` / `verb` / "formats" => - val formats = ShapeMapFormat.availableFormats.map(_.name) - val json = Json.fromValues(formats.map(str => Json.fromString(str))) + val formats = ApiDefinitions.availableShapeMapFormats + val json = Json.fromValues(formats.map(f => Json.fromString(f.name))) Ok(json) /** Obtain information about a shapeMap. diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shex/service/ShExService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shex/service/ShExService.scala deleted file mode 100644 index 848e3d4c..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shex/service/ShExService.scala +++ /dev/null @@ -1,43 +0,0 @@ -package es.weso.rdfshape.server.api.routes.shex.service - -import cats.effect._ -import com.typesafe.scalalogging.LazyLogging -import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api -import es.weso.rdfshape.server.api.routes.ApiService -import es.weso.schema._ -import io.circe._ -import org.http4s._ -import org.http4s.circe._ -import org.http4s.client.Client -import org.http4s.dsl.Http4sDsl - -class ShExService(client: Client[IO]) - extends Http4sDsl[IO] - with ApiService - with LazyLogging { - - override val verb: String = "shEx" - - /** Describe the API routes handled by this service and the actions performed on each of them - */ - val routes: HttpRoutes[IO] = HttpRoutes.of[IO] { - - /** Returns a JSON array with the accepted ShEx schema formats - */ - case GET -> Root / `api` / `verb` / "formats" => - val formats = Schemas.availableFormats - val json = Json.fromValues(formats.map(str => Json.fromString(str))) - Ok(json) - } -} - -object ShExService { - - /** Service factory - * - * @param client Underlying http4s client - * @return A new ShEx Service - */ - def apply(client: Client[IO]): ShExService = - new ShExService(client) -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseEntity.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseEntity.scala deleted file mode 100644 index 76514796..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseEntity.scala +++ /dev/null @@ -1,66 +0,0 @@ -package es.weso.rdfshape.server.api.routes.wikibase.logic - -import cats.implicits._ -import com.typesafe.scalalogging.LazyLogging -import org.http4s.Uri -import org.http4s.implicits._ - -import scala.util.matching.Regex - -/** Data class containing the information to fetch a given WikibaseEntity - */ -private[wikibase] case class WikibaseEntity( - localName: String, - uri: Uri, - sourceUri: String -) - -/** Static utilities to aid in converting information to WikibaseEntity instances - */ -object WikibaseEntity extends LazyLogging { - - /** Create a WikibaseEntity instance from a wikidata URI - * - * @param entity String containing an entity unique URI in wikidata (e.g.: https://www.wikidata.org/wiki/Q14317) - * @return Either an instance of {@link es.weso.rdfshape.server.api.routes.wikibase.logic.WikibaseEntity} containing the entity information, or an error message - */ - def uriToEntity(entity: String): Either[String, WikibaseEntity] = { - val wdRegex = "http://www.wikidata.org/entity/(.*)".r - entity match { - case wdRegex(localName) => - val uri = - uri"https://www.wikidata.org" / "wiki" / "Special:EntityData" / (localName + ".ttl") - WikibaseEntity(localName, uri, entity).asRight[String] - case _ => - s"Entity: $entity doesn't match regular expression: $wdRegex" - .asLeft[WikibaseEntity] - } - } - - /** Create a WikibaseEntity instance from a wikidata URI (alternate) - * - * @param entity String containing an entity unique URI in wikidata (e.g.: https://www.wikidata.org/wiki/Q14317) - * @return Either an instance of {@link es.weso.rdfshape.server.api.routes.wikibase.logic.WikibaseEntity} containing the entity information, or an error message - */ - def uriToEntity2(entity: String): Either[String, WikibaseEntity] = { - val wdRegex: Regex = "<(http://www.wikidata.org/entity/(.*))>".r - entity match { - case wdRegex(_, _) => - val matches = wdRegex.findAllIn(entity) - logger.debug(s"Wikidata matches: ${matches.groupCount}") - if(matches.groupCount == 2) { - val localName = matches.group(2) - val sourceUri = matches.group(1) - val uri = - uri"https://www.wikidata.org" / "wiki" / "Special:EntityData" / (localName + ".ttl") - logger.debug(s"Wikidata item uri: $uri") - WikibaseEntity(localName, uri, sourceUri).asRight[String] - } else - s"Entity: $entity doesn't match regular expression: $wdRegex" - .asLeft[WikibaseEntity] - case _ => - s"Entity: $entity doesn't match regular expression: $wdRegex" - .asLeft[WikibaseEntity] - } - } -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseSchema.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseSchema.scala deleted file mode 100644 index d10db48c..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/WikibaseSchema.scala +++ /dev/null @@ -1,134 +0,0 @@ -//package es.weso.rdfshape.server.api.routes.wikibase.logic -// -//import cats.effect._ -//import com.typesafe.scalalogging.LazyLogging -//import es.weso.rdf.RDFReasoner -//import es.weso.rdfshape.server.api.routes.schema.logic.types.Schema -/* import - * es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters.WdSchemaParameter */ -//import es.weso.rdfshape.server.api.utils.parameters.PartsMap -//import es.weso.rdfshape.server.wikibase._ -//import es.weso.schema.{Schemas, Schema => SchemaW} -//import org.http4s._ -//import org.http4s.client._ -//import org.http4s.dsl.io._ -// -//case class WikibaseSchema( -// maybeSchemaParam: Option[Schema], -// maybeEntitySchema: Option[String], -// schemaStr: Option[String], -// wikibase: Wikibase = Wikidata -//) { -// -// def getSchema( -// maybeData: Option[RDFReasoner], -// client: Client[IO] -// ): IO[(Option[String], Either[String, SchemaW])] = { -// (maybeSchemaParam, maybeEntitySchema) match { -// case (None, None) => -// IO.pure((None, Left(s"No values for entity schema or schema"))) -// case (Some(schemaParam), None) => schemaParam.getSchema() -// case (None, Some(entitySchema)) => -// schemaFromEntitySchema(entitySchema, client) -// case (Some(schemaParam), Some(entitySchema)) => -// schemaFromEntitySchema(entitySchema, client) -// -// } -// } -// -// def schemaFromEntitySchema( -// es: String, -// client: Client[IO] -// ): IO[(Option[String], Either[String, SchemaW])] = { -// val uriSchema = wikibase.schemaEntityUri(es) -// val r: IO[(SchemaW, String)] = for { -// strSchema <- deref(uriSchema, client) -// schema <- Schemas.fromString(strSchema, "ShEXC", "ShEx") -// } yield (schema, strSchema) -// r.attempt.map { -// case Left(t) => (None, Left(t.getMessage)) -// case Right(pair) => -// val (schema, str) = pair -// (Some(str), Right(schema)) -// } -// -// } -// -// private def deref(uri: Uri, client: Client[IO]): IO[String] = { -// val reqSchema: Request[IO] = Request(method = GET, uri = uri) -// client.expect[String](reqSchema) -// } -//} -// -//object WikibaseSchema extends LazyLogging { -// -// private[api] def mkSchema( -// partsMap: PartsMap, -// data: Option[RDFReasoner], -// client: Client[IO] -// ): IO[(SchemaW, WikibaseSchema)] = { -// val r: IO[(SchemaW, WikibaseSchema)] = for { -// sp <- mkWikibaseSchemaParam(partsMap) -// p <- sp.getSchema(data, client) -// (maybeStr, maybeSchema) = p -// res <- maybeSchema match { -// case Left(str) => -// IO.raiseError( -// new RuntimeException(s"Error obtaining wikibase parameters: $str") -// ) -// case Right(schema) => IO.pure((schema, sp.copy(schemaStr = maybeStr))) -// } -// } yield res -// r -// } -// -// /** Build a [[WikibaseSchema]] from request parameters -// * -// * @param partsMap Request parameters -// * @return Either the [[WikibaseSchema]] or an error constructing it -// */ -// private[api] def mkWikibaseSchemaParam( -// partsMap: PartsMap -// ): IO[Either[String, WikibaseSchema]] = -// for { -// // WD Schema param as sent by client -// paramWdSchema <- partsMap.optPartValue(WdSchemaParameter.name) -// // endpointStr <- partsMap.partValue("endpoint") -// // endpoint <- either2f(IRI.fromString(endpointStr)) -// maybeSchema <- Schema.mkSchema(partsMap) -// result <- (paramWdSchema, maybeSchema) match { -// case (None, Left(err)) => -// val msg = -// s"Could not user supplied param and missing wdschema param: $err" -// logger.error(msg) -// IO.pure(Left(msg)) -// -// case (None, Right(schema)) => -// IO.pure( -// Right( -// WikibaseSchema.empty.copy(maybeSchemaParam = Option(schema)) -// ) -// ) -// case (Some(wdSchema), Left(err)) => -// logger.error(s"Could not build user supplied schema: $err") -// IO.pure( -// Right( -// WikibaseSchema.empty.copy(maybeEntitySchema = Option(wdSchema)) -// ) -// ) -// case (Some(wdSchema), Right(schema)) => -// IO.pure( -// Right( -// WikibaseSchema.empty -// .copy( -// maybeSchemaParam = Option(schema), -// maybeEntitySchema = Option(wdSchema) -// ) -// ) -// ) -// } -// } yield result -// -// private[api] def empty: WikibaseSchema = -// WikibaseSchema(None, None, None) -//} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/Wikibase.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/Wikibase.scala new file mode 100644 index 00000000..7e0e7d60 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/Wikibase.scala @@ -0,0 +1,51 @@ +package es.weso.rdfshape.server.api.routes.wikibase.logic.model.wikibase + +import es.weso.rdfshape.server.utils.codec.CodecUtils.uriEncoder +import io.circe.syntax.EncoderOps +import io.circe.{Encoder, Json} +import org.http4s._ +import org.http4s.implicits.http4sLiteralsSyntax + +/** Abstract representation of a wikibase instance + * + * @param name Given name of the wikibase instance + * @param baseUrl Base URL where the instance is deployed (e.g. [[https://www.wikidata.org/]]) + * @param queryUrl SPARQL query endpoint of the wikibase instance, where SPARQL queries are targeted + * It may vary depending on the instance. + */ +private[api] case class Wikibase( + name: Option[String] = Option("wikibase instance"), + baseUrl: Uri = uri"", + queryUrl: Uri = uri"" +) { + + /** Base API endpoint of the wikibase instance (e.g. [[https://www.wikidata.org/w/api.php]]) + * + * @note Unlike [[queryUrl]], this can be inferred from [[baseUrl]] + */ + lazy val apiUrl: Uri = baseUrl / "w" / "api.php" + + /** Given a schema identifier, return it's location inside the wikibase instance + * Default implementation is based on Wikidata's and should be overridden. + * + * @param schema String representation of the schema identifier + * @return Uri where the schema can be accessed + */ + def schemaEntityUri(schema: String): Uri = + baseUrl / "wiki" / "Special:EntitySchemaText" / schema +} + +object Wikibase { + + /** JSON encoder for [[Wikibase]] + */ + implicit val encode: Encoder[Wikibase] = (wikibase: Wikibase) => + Json.fromFields( + List( + ("name", wikibase.name.asJson), + ("baseUrl", wikibase.baseUrl.asJson), + ("queryUrl", wikibase.queryUrl.asJson), + ("apiUrl", wikibase.apiUrl.asJson) + ) + ) +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/Wikidata.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/Wikidata.scala new file mode 100644 index 00000000..fcad8d5a --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/Wikidata.scala @@ -0,0 +1,53 @@ +package es.weso.rdfshape.server.api.routes.wikibase.logic.model.wikibase + +import es.weso.rdf.PrefixMap +import es.weso.rdf.nodes.IRI +import org.http4s.implicits.http4sLiteralsSyntax + +/** A sub-instance of the more general [[Wikibase]] class, + * configured to reference and access Wikidata + * + * @see [[https://www.wikidata.org/ Wikidata]] + */ +private[api] object Wikidata + extends Wikibase( + name = Option("wikidata"), + baseUrl = uri"https://www.wikidata.org", + queryUrl = uri"https://query.wikidata.org" / "sparql" + ) { + + /** @return List of tuples with all the prefixes used by Wikidata + * an their short-key values + */ + lazy val wikidataPrefixes: List[(String, String)] = { + //noinspection HttpUrlsUsage,SpellCheckingInspection + + List( + ("wikibase", "http://wikiba.se/ontology#"), + ("bd", "http://www.bigdata.com/rdf#"), + ("wd", "http://www.wikidata.org/entity/"), + ("wdt", "http://www.wikidata.org/prop/direct/"), + ("wdtn", "http://www.wikidata.org/prop/direct-normalized/"), + ("wds", "http://www.wikidata.org/entity/statement/"), + ("p", "http://www.wikidata.org/prop/"), + ("wdref", "http://www.wikidata.org/reference/"), + ("wdv", "http://www.wikidata.org/value/"), + ("ps", "http://www.wikidata.org/prop/statement/"), + ("psv", "http://www.wikidata.org/prop/statement/value/"), + ("psn", "http://www.wikidata.org/prop/statement/value-normalized/"), + ("pq", "http://www.wikidata.org/prop/qualifier/"), + ("pqv", "http://www.wikidata.org/prop/qualifier/value/"), + ("pqn", "http://www.wikidata.org/prop/qualifier/value-normalized/"), + ("pr", "http://www.wikidata.org/prop/reference/"), + ("prv", "http://www.wikidata.org/prop/reference/value/"), + ("prn", "http://www.wikidata.org/prop/reference/value-normalized/"), + ("wdno", "http://www.wikidata.org/prop/novalue/") + ) + } + + /** [[PrefixMap]] instance containing all Wikidata prefixes in [[wikidataPrefixes]] + */ + lazy val wikidataPrefixMap: PrefixMap = { + PrefixMap.fromMap(wikidataPrefixes.toMap.view.mapValues(IRI.apply).toMap) + } +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/objects/WikibaseObject.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/objects/WikibaseObject.scala new file mode 100644 index 00000000..31a0529d --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/objects/WikibaseObject.scala @@ -0,0 +1,10 @@ +package es.weso.rdfshape.server.api.routes.wikibase.logic.model.wikibase.objects + +import org.http4s.Uri + +/** General class representing any object (entity, schema...) living in a + * wikibase instance + * + * @param entityUri URL where the object data can be found + */ +private[api] class WikibaseObject(val entityUri: Uri) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/objects/wikidata/WikidataEntity.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/objects/wikidata/WikidataEntity.scala new file mode 100644 index 00000000..595b26fa --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/objects/wikidata/WikidataEntity.scala @@ -0,0 +1,24 @@ +package es.weso.rdfshape.server.api.routes.wikibase.logic.model.wikibase.objects.wikidata + +import org.http4s.Uri +import org.http4s.implicits.http4sLiteralsSyntax + +import scala.util.matching.Regex + +/** Data class representing a Wikidata entity + */ +case class WikidataEntity( + override val entityUri: Uri +) extends WikidataObject(entityUri) { + + override val wikidataRegex: Regex = + "(http(s)?://www.wikidata.org/entity/(.+))|(http(s)?://www.wikidata.org/wiki/(.+))".r + + checkUri() + + override val localName: String = + entityUri.renderString.split("/").last.stripSuffix("#") + + override val contentUri: Uri = + uri"https://www.wikidata.org" / "wiki" / "Special:EntityData" / (localName + ".ttl") +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/objects/wikidata/WikidataObject.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/objects/wikidata/WikidataObject.scala new file mode 100644 index 00000000..36a0b900 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/objects/wikidata/WikidataObject.scala @@ -0,0 +1,42 @@ +package es.weso.rdfshape.server.api.routes.wikibase.logic.model.wikibase.objects.wikidata + +import es.weso.rdfshape.server.api.routes.wikibase.logic.model.wikibase.objects.WikibaseObject +import es.weso.rdfshape.server.utils.networking.NetworkingUtils +import org.http4s.Uri + +import scala.util.matching.Regex + +/** Abstract class representing any object (entity, schema...) living in + * Wikidata + */ +abstract class WikidataObject( + override val entityUri: Uri +) extends WikibaseObject(entityUri) { + + /** Either the raw contents of this object or the error + * occurred while retrieving them + */ + lazy val contents: Either[String, String] = + NetworkingUtils.getUrlContents(contentUri.renderString) + + /** Short name or identifier of the entity, e.g.: Q123, + * normally this is the last part of [[entityUri]] + */ + val localName: String + + /** URL where the entity data can be found in raw form + */ + val contentUri: Uri + + /** Regular expression used to recognize wikidata objects of the required type + */ + val wikidataRegex: Regex + + /** Assert the given [[entityUri]] is a valid uri complying with + * [[wikidataRegex]] + */ + def checkUri(): Unit = assume( + wikidataRegex.matches(entityUri.renderString), + s"Uri '${entityUri.renderString}' does not comply with '${wikidataRegex.regex}'" + ) +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/objects/wikidata/WikidataProperty.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/objects/wikidata/WikidataProperty.scala new file mode 100644 index 00000000..7f01b4d9 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/objects/wikidata/WikidataProperty.scala @@ -0,0 +1,24 @@ +package es.weso.rdfshape.server.api.routes.wikibase.logic.model.wikibase.objects.wikidata + +import org.http4s.Uri +import org.http4s.implicits.http4sLiteralsSyntax + +import scala.util.matching.Regex + +/** Data class representing a Wikidata schema + */ +case class WikidataProperty( + override val entityUri: Uri +) extends WikidataObject(entityUri) { + + override val wikidataRegex: Regex = + "http(s)?://www.wikidata.org/wiki/Property:(.+)".r + + checkUri() + + override val localName: String = + entityUri.renderString.split(":").last.stripSuffix("#") + + override val contentUri: Uri = + uri"https://www.wikidata.org" / "wiki" / "Special:EntityData" / (localName + ".ttl") +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/objects/wikidata/WikidataSchema.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/objects/wikidata/WikidataSchema.scala new file mode 100644 index 00000000..7acb6ea8 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/model/wikibase/objects/wikidata/WikidataSchema.scala @@ -0,0 +1,24 @@ +package es.weso.rdfshape.server.api.routes.wikibase.logic.model.wikibase.objects.wikidata + +import org.http4s.Uri +import org.http4s.implicits.http4sLiteralsSyntax + +import scala.util.matching.Regex + +/** Data class representing a Wikidata schema + */ +case class WikidataSchema( + override val entityUri: Uri +) extends WikidataObject(entityUri) { + + override val wikidataRegex: Regex = + "http(s)?://www.wikidata.org/wiki/EntitySchema:(.+)".r + + checkUri() + + override val localName: String = + entityUri.renderString.split(":").last.stripSuffix("#") + + override val contentUri: Uri = + uri"https://www.wikidata.org" / "wiki" / "Special:EntitySchemaText" / localName +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/WikibaseOperation.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/WikibaseOperation.scala new file mode 100644 index 00000000..17a48ca5 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/WikibaseOperation.scala @@ -0,0 +1,129 @@ +package es.weso.rdfshape.server.api.routes.wikibase.logic.operations + +import cats.effect.IO +import cats.implicits.catsSyntaxEitherId +import es.weso.rdfshape.server.api.routes.wikibase.logic.model.wikibase +import es.weso.rdfshape.server.api.routes.wikibase.logic.model.wikibase.{ + Wikibase, + Wikidata +} +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.WikibaseOperation.successMessage +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.query.WikibaseQueryOperation +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.schema.WikibaseSheXerExtract +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.search.WikibaseSearchOperation +import org.http4s.Method.GET +import org.http4s._ +import org.http4s.client.Client + +/** General definition of operations that operate on wikibase + * + * @param successMessage Message attached to the result of the operation + * @param operationData Data needed to perform the wikibase operation + * @param client [[Client]] object to be used in requests to wikibase + */ +private[operations] abstract class WikibaseOperation( + val successMessage: String = successMessage, + val operationData: WikibaseOperationDetails, + val client: Client[IO] +) { + + /** Wikibase instance to be queried by this operation + * Given the data needed for the wikibase operation, configure the target wikibase instance. + * Pattern match along all possible operations and set the target's [[Wikibase.baseUrl]] and/or [[wikibase.Wikibase.queryUrl]] + * as needed for each operation. + * + * For instance: a [[WikibaseSearchOperation]] operation will include the wikibase's base URL + * whereas a WikibaseQuery operation will include the wikibase's SPARQL query URL + * + * @note Initial check for Wikidata URLs + */ + lazy val targetWikibase: Wikibase = + if( + operationData.endpoint.equals(Wikidata.baseUrl) || operationData.endpoint + .equals(Wikidata.queryUrl) || operationData.endpoint + .equals(Wikidata.apiUrl) + ) Wikidata + else + this match { + /* In certain operations, assume the endpoint the client sent is the + * SPARQL endpoint of the wikibase instance */ + case _: WikibaseQueryOperation | _: WikibaseSheXerExtract => + Wikibase(queryUrl = operationData.endpoint) + /* We normally assume that the endpoint the client sent is the base URL + * of the wikibase instance */ + case _ => + Wikibase(baseUrl = operationData.endpoint) + } + + /** [[Uri]] containing the target URL to be queried by the operation. + * This property is to be overridden to fit each operation's needs. + */ + val targetUri: Uri + + /** Maximum amount of results queried in search operations + */ + val defaultLimit: Int = 20 + + /** Offset where to continue a search operation + */ + val defaultContinue = 0 + + /** Base request object that will be executed when performing the operation. + * Meant to be overridden or complemented with additional configurations + */ + def request: Request[IO] = + Request(method = GET, uri = targetUri).withHeaders(Headers.empty) + + /** Makes the necessary requests against the [[targetWikibase]] + * as configured in the [[targetUri]] and returns the obtained JSON results + * + * @param decoder [[EntityDecoder]] required for deserializing the response + * @tparam R Desired type to which the response shall be decoded + * @return Either the Json response from the [[targetWikibase]] or a textual error + */ + + def performRequest[R](request: Request[IO] = request)(implicit + decoder: EntityDecoder[IO, R] + ): IO[Either[String, R]] = { + // Execute request and get results + for { + // Extract the request response + eitherResp <- client.run(request).use { + // Got a resource with a Response[IO] + case Status.Successful(response) => + response.attemptAs[R].leftMap(_.message).value + case failedResponse => + failedResponse + .attemptAs[String] + .fold( + decodeErr => decodeErr.message, + rawResponse => + s"Request $request failed with status ${failedResponse.status.code} " + + s"and body $rawResponse" + ) + .map(_.asLeft[R]) + } + } yield eitherResp + } + + /** Executes the operation against [[targetWikibase]] + * + * @return A [[WikibaseOperationResult]] instance with the operation results + */ + def performOperation: IO[WikibaseOperationResult] +} + +/** Static utilities for [[WikibaseOperation]] + */ +private[operations] object WikibaseOperation { + + /** Error message for operations currently limited to Wikidata instead + * of any wikibase instance + */ + val wikidataOnlyMessage = + "Cannot extract schemas from wikibase instances other than Wikidata" + + /** Dummy success message + */ + private val successMessage = "Operation completed successfully" +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/WikibaseOperationDetails.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/WikibaseOperationDetails.scala new file mode 100644 index 00000000..ff1f87f6 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/WikibaseOperationDetails.scala @@ -0,0 +1,202 @@ +package es.weso.rdfshape.server.api.routes.wikibase.logic.operations + +import cats.effect.IO +import cats.implicits.catsSyntaxEitherId +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdfshape.server.api.routes.wikibase.logic.model.wikibase.Wikidata +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.WikibaseOperationFormats.WikibaseQueryFormat +import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters._ +import es.weso.rdfshape.server.api.utils.parameters.PartsMap +import es.weso.rdfshape.server.utils.codec.CodecUtils.uriEncoder +import io.circe.syntax.EncoderOps +import io.circe.{Encoder, Json} +import org.http4s.Uri +import org.http4s.implicits.http4sLiteralsSyntax + +/** Case class representing the data attached to the queries made to wikibase's API + * or SPARQL endpoint. Optional parameters may not be required in some operations. + * + * @param endpoint [[Uri]] to be used to access a resource in a wikibase instance + * @param payload Data accompanying the request to the wikibase + * @param searchLanguage Tell the wikibase the language used in a search operation. + * @param resultLanguages Filter the languages returned in queries with internationalized results + * (empty list returns all available languages). + * Each language must be represented by its language code. + * @param limit Maximum amount of results queried in search operations + * @param continue Offset where to continue a search operation + * @param format Format in which results are requested + * @see [[https://www.mediawiki.org/wiki/Wikibase/API#API_documentation_and_Wikibase_modules]] + */ +case class WikibaseOperationDetails( + endpoint: Uri, + payload: String, + searchLanguage: Option[String], + resultLanguages: Option[List[String]], + limit: Option[Int], + continue: Option[Int], + format: Option[WikibaseQueryFormat] +) + +object WikibaseOperationDetails extends LazyLogging { + + /** Dummy empty query to be used when needed + */ + val emptyQuery: WikibaseOperationDetails = WikibaseOperationDetails( + endpoint = uri"", + payload = "", + searchLanguage = None, + resultLanguages = None, + limit = None, + continue = None, + format = None + ) + + /** Message to be logged/used when no endpoint was supplied + */ + val missingEndpointMessage = + "Missing endpoint for the wikibase operation, defaulting to Wikidata" + + /** Message to be logged/used when an unexpected error occurs + * processing the parameters + */ + val unprocessableParamsMessage = + "An unexpected error occurred while processing the request parameters" + + /** JSON encoder for [[WikibaseOperationDetails]] + * To be used for HTTP responses to clients + */ + implicit val encode: Encoder[WikibaseOperationDetails] = + (opDetails: WikibaseOperationDetails) => + Json.fromFields( + List( + ("endpoint", opDetails.endpoint.asJson), + ("payload", opDetails.payload.asJson), + ("searchLanguage", opDetails.searchLanguage.asJson), + ("resultLanguages", opDetails.resultLanguages.asJson), + ("limit", opDetails.limit.asJson), + ("continue", opDetails.continue.asJson), + ("format", opDetails.format.asJson) + ) + ) + + /** Given a GET request's parameters, try to extract an instance + * of [[WikibaseOperationDetails]] from them + * + * @param params Request's parameters + * @return Either the [[WikibaseOperationDetails]] instance or an error message + */ + def apply( + params: Map[String, String] + ): IO[Either[String, WikibaseOperationDetails]] = { + // 1. Check for the existence of basic parameters: endpoint + val wikibaseEndpoint = params.get(EndpointParameter.name) + val payload = params.get(WikibasePayloadParameter.name).map(_.strip()) + val queryMainData = (wikibaseEndpoint, payload.getOrElse("")) + + // 2. Fill in with the rest of data, optionally absent + val language = params.get(LanguageParameter.name).map(_.strip()) + val languages = params + .get(LanguagesParameter.name) + .map( + _.strip().split('|').toList + ) // "|" is the separating char for wikibase + val limit = params.get(LimitParameter.name).map(_.toInt) + val continue = params.get(ContinueParameter.name).map(_.toInt) + val format = params.get(WikibaseFormatParameter.name).map(_.strip()) + + IO { + queryMainData match { + case (Some(endpoint), payload) => + val endpointUri = Uri.fromString(endpoint.strip()) + endpointUri match { + case Left(parseErr) => + parseErr.details.asLeft[WikibaseOperationDetails] + case Right(uri) => + WikibaseOperationDetails( + endpoint = uri, + payload = payload, + searchLanguage = language, + resultLanguages = languages, + limit = limit, + continue = continue, + format = format + ).asRight[String] + } + + case (None, payload) => + logger.warn(missingEndpointMessage) + WikibaseOperationDetails( + endpoint = Wikidata.baseUrl, + payload = payload, + searchLanguage = language, + resultLanguages = languages, + limit = limit, + continue = continue, + format = format + ).asRight[String] + case _ => + logger.error(unprocessableParamsMessage) + Left(unprocessableParamsMessage) + } + } + } + + /** Given a POST request's parameters, try to extract an instance + * of [[WikibaseOperationDetails]] from them + * + * @param params Request's parameters + * @return Either the [[WikibaseOperationDetails]] instance or an error message + */ + def apply( + params: PartsMap + ): IO[Either[String, WikibaseOperationDetails]] = for { + // 1. Check for the existence of endpoint and payload + wikibaseEndpoint <- params.optPartValue(EndpointParameter.name) + payload <- params.optPartValue(WikibasePayloadParameter.name) + queryMainData = (wikibaseEndpoint, payload.getOrElse("")) + + // 2. Fill in with the rest of data, optionally absent + language <- params.optPartValue(LanguageParameter.name) + languages <- params.optPartValue(LanguagesParameter.name) + limit <- params.optPartValue(LimitParameter.name) + continue <- params.optPartValue(ContinueParameter.name) + format <- params.optPartValue(WikibaseFormatParameter.name) + + } yield queryMainData match { + case (Some(endpoint), payload) => + val endpointUri = Uri.fromString(endpoint.strip()) + endpointUri match { + case Left(parseErr) => + parseErr.details.asLeft[WikibaseOperationDetails] + case Right(uri) => + WikibaseOperationDetails( + endpoint = uri, + payload = payload.strip(), + searchLanguage = language.map(_.strip()), + resultLanguages = languages.map( + _.split('|').toList + ), + limit = limit.map(_.toInt), + continue = continue.map(_.toInt), + format = format.map(_.strip()) + ).asRight[String] + } + + case (None, payload) => + logger.warn(missingEndpointMessage) + WikibaseOperationDetails( + endpoint = Wikidata.baseUrl, + payload = payload.strip(), + searchLanguage = language.map(_.strip()), + resultLanguages = languages.map( + _.split('|').toList + ), + limit = limit.map(_.toInt), + continue = continue.map(_.toInt), + format = format.map(_.strip()) + ).asRight[String] + case _ => + logger.error(unprocessableParamsMessage) + Left(unprocessableParamsMessage) + } +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/WikibaseOperationFormats.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/WikibaseOperationFormats.scala new file mode 100644 index 00000000..76369146 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/WikibaseOperationFormats.scala @@ -0,0 +1,17 @@ +package es.weso.rdfshape.server.api.routes.wikibase.logic.operations + +/** Enumeration of the different formats that can be requested to wikibase's API. + * The "fm" formats are meant for debugging + * + * @see [[https://www.mediawiki.org/wiki/Wikibase/API#Request_Format]] + */ +private[api] object WikibaseOperationFormats extends Enumeration { + type WikibaseQueryFormat = String + + val JSON = "json" + val JSON_FM = "jsonfm" + val XML = "xml" + val XML_FM = "xmlfm" + + val default: WikibaseQueryFormat = JSON +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/WikibaseOperationResult.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/WikibaseOperationResult.scala new file mode 100644 index 00000000..7ad3faf5 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/WikibaseOperationResult.scala @@ -0,0 +1,35 @@ +package es.weso.rdfshape.server.api.routes.wikibase.logic.operations + +import es.weso.rdfshape.server.api.routes.wikibase.logic.model.wikibase.Wikibase +import io.circe.syntax.EncoderOps +import io.circe.{Encoder, Json} + +/** Case class representing the results to be returned when performing a search + * operation in a wikibase instance + * + * @param operationData Input data operated on + * @param wikibase Target wikibase operated on + * @param result Results returned by the operation, ready for API responses + */ +final case class WikibaseOperationResult private ( + operationData: WikibaseOperationDetails, + wikibase: Wikibase, + result: Json +) + +/** Static codec utilities for the results + */ +private[api] object WikibaseOperationResult { + + /** JSON encoder for [[WikibaseOperationResult]]s + */ + implicit val encode: Encoder[WikibaseOperationResult] = + (opResult: WikibaseOperationResult) => + Json.fromFields( + List( + ("operationData", opResult.operationData.asJson), + ("wikibase", opResult.wikibase.asJson), + ("result", opResult.result) + ) + ) +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/get/WikibaseGetLabels.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/get/WikibaseGetLabels.scala new file mode 100644 index 00000000..6f6925bf --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/get/WikibaseGetLabels.scala @@ -0,0 +1,20 @@ +package es.weso.rdfshape.server.api.routes.wikibase.logic.operations.get + +import cats.effect.IO +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.WikibaseOperationDetails +import org.http4s.client.Client + +/** A [[WikibaseGetOperation]] returning the labels of the objects retrieved. + * + * @param operationData Data needed to perform the wikibase operation + * @param client [[Client]] object to be used in requests to wikibase + * @note All derived operations are based on [[https://www.wikidata.org/w/api.php?action=help&modules=wbgetentities]] + */ +sealed case class WikibaseGetLabels( + override val operationData: WikibaseOperationDetails, + override val client: Client[IO] +) extends WikibaseGetOperation( + operationData, + client, + List(WikibasePropTypes.LABELS) + ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/get/WikibaseGetOperation.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/get/WikibaseGetOperation.scala new file mode 100644 index 00000000..da816d68 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/get/WikibaseGetOperation.scala @@ -0,0 +1,87 @@ +package es.weso.rdfshape.server.api.routes.wikibase.logic.operations.get + +import cats.effect.IO +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.get.WikibaseGetOperation.defaultResultLanguages +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.get.WikibasePropTypes.WikibasePropTypes +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.{ + WikibaseOperation, + WikibaseOperationDetails, + WikibaseOperationFormats, + WikibaseOperationResult +} +import es.weso.rdfshape.server.utils.error.exceptions.WikibaseServiceException +import io.circe.Json +import org.http4s.Uri +import org.http4s.circe.jsonDecoder +import org.http4s.client.Client + +/** Common class for wikibase operations based on retrieving entities. + * Given an input [[WikibaseOperationDetails]], get items from a wikibase instance. + * + * @param operationData Data needed to perform the wikibase operation + * @param client [[Client]] object to be used in requests to wikibase + * @param props Properties to be returned from the objects retrieved + * @note All derived operations are based on [[https://www.wikidata.org/w/api.php?action=help&modules=wbgetentities]] + */ +private[wikibase] class WikibaseGetOperation( + override val operationData: WikibaseOperationDetails, + override val client: Client[IO], + props: List[WikibasePropTypes] = WikibasePropTypes.default +) extends WikibaseOperation( + WikibaseGetOperation.successMessage, + operationData, + client + ) { + + /** Target URL in the targeted wikibase instance. Already prepared with the + * search action and given payload. + */ + override lazy val targetUri: Uri = { + targetWikibase.apiUrl + .withQueryParam("action", "wbgetentities") + .withQueryParam("props", props.mkString("|")) + .withQueryParam("ids", operationData.payload) + .withQueryParam( + "languages", + operationData.resultLanguages + .getOrElse(defaultResultLanguages) + .mkString("|") + ) + .withQueryParam( + "format", + operationData.format + .getOrElse(WikibaseOperationFormats.JSON) + ) + + /* "props" parameter with the data to be returned to be defined in + * sub-classes */ + + } + + override def performOperation: IO[WikibaseOperationResult] = { + // Build the results item from the wikibase response, throwing errors + for { + eitherResponse <- super.performRequest[Json]() + result <- eitherResponse match { + case Left(err) => IO.raiseError(WikibaseServiceException(err)) + case Right(jsonResults) => + IO { + WikibaseOperationResult( + operationData = operationData, + wikibase = targetWikibase, + result = jsonResults + ) + } + } + } yield result + } + +} + +object WikibaseGetOperation { + private val successMessage = "Get entities executed successfully" + + /** Languages in which the results will be returned when none has been specified + */ + private val defaultResultLanguages: List[String] = List("en") +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/get/WikibasePropTypes.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/get/WikibasePropTypes.scala new file mode 100644 index 00000000..51233125 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/get/WikibasePropTypes.scala @@ -0,0 +1,25 @@ +package es.weso.rdfshape.server.api.routes.wikibase.logic.operations.get + +/** Enumeration of the different properties of objects that can be requested to + * wikibase's API in get operations. + * + * @see [[https://www.wikidata.org/w/api.php?action=help&modules=wbgetentitiess]] + */ +private[api] object WikibasePropTypes extends Enumeration { + type WikibasePropTypes = String + + val ALIASES = "aliases" + val CLAIMS = "claims" + val DATATYPE = "datatype" + val DESCRIPTIONS = "descriptions" + val INFO = "info" + val LABELS = "labels" + val SITELINKS = "sitelinks" + val SITELINKS_URLS = "sitelinks/urls" + + /** Default list of props request in get operations, mirroring the default used + * by Mediawiki's API + */ + val default: List[WikibasePropTypes] = + List(INFO, SITELINKS, ALIASES, LABELS, DESCRIPTIONS, CLAIMS, DATATYPE) +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/languages/WikibaseLanguages.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/languages/WikibaseLanguages.scala new file mode 100644 index 00000000..461620de --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/languages/WikibaseLanguages.scala @@ -0,0 +1,112 @@ +package es.weso.rdfshape.server.api.routes.wikibase.logic.operations.languages + +import cats.effect.IO +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.languages.WikibaseLanguages.convertLanguages +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.{ + WikibaseOperation, + WikibaseOperationDetails, + WikibaseOperationFormats, + WikibaseOperationResult +} +import es.weso.rdfshape.server.utils.error.exceptions.WikibaseServiceException +import io.circe.Json +import org.http4s.Uri +import org.http4s.circe.jsonDecoder +import org.http4s.client.Client + +/** Given an input [[WikibaseOperationDetails]], query a wikibase instance + * for all the languages present in it + * + * @param operationData Data needed to perform the wikibase operation + * @param client [[Client]] object to be used in requests to wikibase + * @note All derived operations are based on [[https://www.wikidata.org/w/api.php?action=help&modules=query%2Bwbcontentlanguages]] + */ +private[wikibase] case class WikibaseLanguages( + override val operationData: WikibaseOperationDetails, + override val client: Client[IO] +) extends WikibaseOperation( + WikibaseLanguages.successMessage, + operationData, + client + ) { + + /** Target URL in the targeted wikibase instance + */ + override lazy val targetUri: Uri = { + targetWikibase.apiUrl + .withQueryParam("action", "query") + .withQueryParam("meta", "wbcontentlanguages") + .withQueryParam("wbclcontext", "term") + .withQueryParam("wbclprop", "code|autonym") + .withQueryParam( + "format", + operationData.format + .getOrElse(WikibaseOperationFormats.JSON) + ) + } + + override def performOperation: IO[WikibaseOperationResult] = { + // Build the results item from the wikibase response, throwing errors + for { + eitherResponse <- super.performRequest[Json]() + result <- eitherResponse.flatMap(convertLanguages) match { + case Left(err) => IO.raiseError(WikibaseServiceException(err)) + case Right(jsonResults) => + IO { + WikibaseOperationResult( + operationData = operationData, + wikibase = targetWikibase, + result = jsonResults + ) + } + } + } yield result + } + +} + +object WikibaseLanguages { + private val successMessage = "Messages fetched successfully" + + /** Convert the response from Wikibase "wbcontentlanguages" to a more convenient JSON structure + * + * @param json Input JSON, as received from Wikibase + * @return Either a JSON representation of the languages in the Wikibase, or an error message + */ + private def convertLanguages(json: Json): Either[String, Json] = for { + languagesObj <- json.hcursor + .downField("query") + .downField("wbcontentlanguages") + .focus + .toRight(s"Error obtaining query/wbcontentlanguages at ${json.spaces2}") + keys <- languagesObj.hcursor.keys.toRight( + s"Error obtaining values from languages: ${languagesObj.spaces2}" + ) + converted = Json.fromValues( + keys.map(key => + Json.fromFields( + List( + ( + "label", + languagesObj.hcursor + .downField(key) + .downField("code") + .focus + .getOrElse(Json.Null) + ), + ( + "name", + languagesObj.hcursor + .downField(key) + .downField("autonym") + .focus + .getOrElse(Json.Null) + ) + ) + ) + ) + ) + } yield { + converted + } +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/query/WikibaseQueryOperation.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/query/WikibaseQueryOperation.scala new file mode 100644 index 00000000..1863bbaf --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/query/WikibaseQueryOperation.scala @@ -0,0 +1,69 @@ +package es.weso.rdfshape.server.api.routes.wikibase.logic.operations.query + +import cats.effect.IO +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.{ + WikibaseOperation, + WikibaseOperationDetails, + WikibaseOperationResult +} +import es.weso.rdfshape.server.utils.error.exceptions.WikibaseServiceException +import io.circe.Json +import org.http4s.circe.jsonDecoder +import org.http4s.client.Client +import org.http4s.headers.Accept +import org.http4s.{Headers, MediaType, Request, Uri} + +/** Common class for wikibase operations based on querying a SPARQL endpoint. + * Given an input [[WikibaseOperationDetails]], perform a query against a + * wikibase instance. + * + * @param operationData Data needed to perform the wikibase operation + * @param client [[Client]] object to be used in requests to wikibase + * @note All derived operations are based on [[https://www.wikidata.org/w/api.php?action=help&modules=wbgetentities]] + */ +private[wikibase] case class WikibaseQueryOperation( + override val operationData: WikibaseOperationDetails, + override val client: Client[IO] +) extends WikibaseOperation( + WikibaseQueryOperation.successMessage, + operationData, + client + ) { + + /** Target URL in the targeted wikibase instance. Already prepared with the + * endpoint and query. + */ + override lazy val targetUri: Uri = { + targetWikibase.queryUrl + .withQueryParam("query", operationData.payload) + } + + /** Request for this operation. + * Include the "Accept" header to get JSON responses + */ + override def request: Request[IO] = + super.request.withHeaders(Headers(Accept(MediaType.application.`json`))) + + override def performOperation: IO[WikibaseOperationResult] = { + // Build the results item from the wikibase response, throwing errors + for { + eitherResponse <- super.performRequest[Json]() + result <- eitherResponse match { + case Left(err) => IO.raiseError(WikibaseServiceException(err)) + case Right(jsonResults) => + IO { + WikibaseOperationResult( + operationData = operationData, + wikibase = targetWikibase, + result = jsonResults + ) + } + } + } yield result + } + +} + +object WikibaseQueryOperation { + private val successMessage = "Query executed successfully" +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/schema/WikibaseSchemaContent.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/schema/WikibaseSchemaContent.scala new file mode 100644 index 00000000..ea99377f --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/schema/WikibaseSchemaContent.scala @@ -0,0 +1,61 @@ +package es.weso.rdfshape.server.api.routes.wikibase.logic.operations.schema + +import cats.effect.IO +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.{ + WikibaseOperation, + WikibaseOperationDetails, + WikibaseOperationResult +} +import es.weso.rdfshape.server.utils.error.exceptions.WikibaseServiceException +import io.circe.syntax.EncoderOps +import org.http4s.Uri +import org.http4s.client.Client + +/** Given an input [[WikibaseOperationDetails]], search for schemas in a + * wikibase instance + * + * @param operationData Data needed to perform the wikibase operation + * @param client [[Client]] object to be used in requests to wikibase + */ +private[wikibase] case class WikibaseSchemaContent( + override val operationData: WikibaseOperationDetails, + override val client: Client[IO] +) extends WikibaseOperation( + WikibaseSchemaContent.successMessage, + operationData, + client + ) { + + /** Target URL in the targeted wikibase instance + */ + override lazy val targetUri: Uri = { + targetWikibase.baseUrl / + "wiki" / + "Special:EntitySchemaText" / + operationData.payload + } + + override def performOperation: IO[WikibaseOperationResult] = { + // Build the results item from the wikibase response, throwing errors + for { + eitherResponse <- super + .performRequest[String]() + result <- eitherResponse match { + case Left(err) => IO.raiseError(WikibaseServiceException(err)) + case Right(jsonResults) => + IO { + WikibaseOperationResult( + operationData = operationData, + wikibase = targetWikibase, + result = jsonResults.asJson + ) + } + } + } yield result + } + +} + +object WikibaseSchemaContent { + private val successMessage = "Schema contents fetched successfully" +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/schema/WikibaseSchemaExtract.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/schema/WikibaseSchemaExtract.scala new file mode 100644 index 00000000..ce02c156 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/schema/WikibaseSchemaExtract.scala @@ -0,0 +1,125 @@ +package es.weso.rdfshape.server.api.routes.wikibase.logic.operations.schema + +import cats.effect.IO +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf.jena.RDFAsJenaModel +import es.weso.rdf.nodes.IRI +import es.weso.rdfshape.server.api.format.dataFormats.Turtle +import es.weso.rdfshape.server.api.format.dataFormats.schemaFormats.ShExC +import es.weso.rdfshape.server.api.routes.wikibase.logic.model.wikibase.Wikidata +import es.weso.rdfshape.server.api.routes.wikibase.logic.model.wikibase.objects.wikidata.WikidataEntity +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.WikibaseOperation.wikidataOnlyMessage +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.{ + WikibaseOperation, + WikibaseOperationDetails, + WikibaseOperationResult +} +import es.weso.rdfshape.server.utils.error.exceptions.WikibaseServiceException +import es.weso.schema.{Schemas, Schema => SchemaW} +import es.weso.schemaInfer.{InferOptions, SchemaInfer} +import es.weso.shapemaps.{RDFNodeSelector, ResultShapeMap} +import es.weso.utils.IOUtils.{either2es, io2es} +import io.circe.Json +import org.http4s.Uri +import org.http4s.client.Client +import org.http4s.implicits.http4sLiteralsSyntax + +import scala.util.{Failure, Success, Try} + +/** Given an input [[WikibaseOperationDetails]], attempt to extract an schema (ShEx) + * from a given entity present in the target wikibase instance + * + * @param operationData Data needed to perform the wikibase operation + * @param client [[Client]] object to be used in requests to wikibase + * @note Only available for Wikidata + * @note Should be passed a client with redirect + */ +private[wikibase] case class WikibaseSchemaExtract( + override val operationData: WikibaseOperationDetails, + override val client: Client[IO] +) extends WikibaseOperation( + WikibaseSchemaExtract.successMessage, + operationData, + client + ) + with LazyLogging { + + override lazy val targetUri: Uri = uri"" // unused + + /* Legacy code that I just refactored here: https://imgur.com/gallery/DH6vrFm */ + override def performOperation: IO[WikibaseOperationResult] = { + val entityUri = operationData.payload + + // Raise error if target is not Wikidata + if(targetWikibase != Wikidata) + IO.raiseError(WikibaseServiceException(wikidataOnlyMessage)) + else { + val tryResult = for { + // Get the Wikidata item info from the URI submitted as payload + wdEntity <- Try { + WikidataEntity(Uri.unsafeFromString(entityUri)) + } + eitherResult = + for { + + // Raw RDF of the Wikidata entity as String + strRdf <- io2es(client.expect[String](wdEntity.contentUri)) + + // Infer schema magic + eitherInferred <- io2es( + RDFAsJenaModel + .fromString(strRdf, Turtle.name) + .flatMap( + _.use(rdf => + for { + rdfSerialized <- rdf.serialize(Turtle.name) + nodeSelector = RDFNodeSelector(IRI(entityUri)) + inferred <- SchemaInfer.runInferSchema( + rdf, + nodeSelector, + Schemas.shEx.name, + IRI(s"http://example.org/Shape_${wdEntity.localName}"), + InferOptions.defaultOptions.copy(maxFollowOn = 3) + ) + } yield inferred + ) + ) + ) + // Tuple with the infer results + pair <- either2es[(SchemaW, ResultShapeMap)](eitherInferred) + + // Form the schema in "ShExC format that will be part of the result + shExCStr <- io2es({ + val (schema, _) = pair + schema.serialize(ShExC.name.toUpperCase) + }) + + } yield WikibaseOperationResult( + operationData = operationData, + wikibase = targetWikibase, + result = Json.fromFields( + List( + ("entity", Json.fromString(entityUri)), + ("result", Json.fromString(shExCStr)) + ) + ) + ) + + } yield eitherResult.value.flatMap { + case Left(err) => IO.raiseError(WikibaseServiceException(err)) + case Right(value) => IO.pure(value) + } + + // Return an error or the contained value + tryResult match { + case Failure(exception) => + IO.raiseError(WikibaseServiceException(exception.getMessage)) + case Success(value) => value + } + } + } +} + +object WikibaseSchemaExtract { + private val successMessage = "Schema contents extracted successfully" +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/schema/WikibaseSchemaValidate.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/schema/WikibaseSchemaValidate.scala new file mode 100644 index 00000000..af77e30b --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/schema/WikibaseSchemaValidate.scala @@ -0,0 +1,138 @@ +package es.weso.rdfshape.server.api.routes.wikibase.logic.operations.schema + +import cats.effect.IO +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf.NONE +import es.weso.rdf.nodes.IRI +import es.weso.rdfshape.server.api.format.dataFormats.{Compact, Turtle} +import es.weso.rdfshape.server.api.routes.data.logic.DataSource +import es.weso.rdfshape.server.api.routes.data.logic.types.DataSingle +import es.weso.rdfshape.server.api.routes.schema.logic.operations.SchemaValidate +import es.weso.rdfshape.server.api.routes.schema.logic.trigger.TriggerShapeMap +import es.weso.rdfshape.server.api.routes.schema.logic.types.Schema +import es.weso.rdfshape.server.api.routes.shapemap.logic.{ + ShapeMap, + ShapeMapSource +} +import es.weso.rdfshape.server.api.routes.wikibase.logic.model.wikibase.Wikidata +import es.weso.rdfshape.server.api.routes.wikibase.logic.model.wikibase.objects.wikidata.WikidataEntity +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.WikibaseOperation.wikidataOnlyMessage +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.{ + WikibaseOperation, + WikibaseOperationDetails, + WikibaseOperationResult +} +import es.weso.rdfshape.server.utils.error.exceptions.WikibaseServiceException +import es.weso.shapemaps.{Start, ShapeMap => ShapeMapW} +import io.circe.Json +import io.circe.syntax.EncoderOps +import org.http4s.Uri +import org.http4s.client.Client +import org.http4s.implicits.http4sLiteralsSyntax + +import scala.util.{Failure, Success, Try} + +/** Given an input [[WikibaseOperationDetails]] and a Schema, attempt to Validate entities + * in a wikibase using wikidata schemas or shape expressions + * + * @param operationData Data needed to perform the wikibase operation + * @param client [[Client]] object to be used in requests to wikibase + * @param schema [[Schema]] against which to validate entities + * @note Only available for Wikidata + */ +private[wikibase] case class WikibaseSchemaValidate( + override val operationData: WikibaseOperationDetails, + override val client: Client[IO], + schema: Schema +) extends WikibaseOperation( + WikibaseSchemaValidate.successMessage, + operationData, + client + ) + with LazyLogging { + + override lazy val targetUri: Uri = uri"" // unused + + override def performOperation: IO[WikibaseOperationResult] = { + val entityUri = operationData.payload + + // Raise error if target is not Wikidata + if(targetWikibase != Wikidata) + IO.raiseError(WikibaseServiceException(wikidataOnlyMessage)) + else { + val tryResult = for { + // Get the Wikidata item info from the URI submitted as payload + wdEntity <- Try { + WikidataEntity(Uri.unsafeFromString(entityUri)) + } + // Create the data to be validated, using Wikidata to get the URL + // to the Turtle contents + inputData = DataSingle( + dataPre = Some(wdEntity.contentUri.renderString), + dataFormat = Turtle, + inference = NONE, + dataSource = DataSource.URL + ) + + /* Get the schema model needed for validation: already passed to the + * class */ + + // Perform validation + eitherValidationResults = for { + // Create your trigger mode: ShEx with basic Shapemap + shapeMapModel <- ShapeMapW.empty.add( + IRI(wdEntity.entityUri.renderString), + Start + ) + shapeMapFinalModel <- shapeMapModel.serialize(Compact.name) + trigger = TriggerShapeMap( + shapeMap = ShapeMap( + shapeMapPre = Some(shapeMapFinalModel), + nodesPrefixMap = shapeMapModel.nodesPrefixMap.addPrefixMap( + Wikidata.wikidataPrefixMap + ), + shapesPrefixMap = shapeMapModel.shapesPrefixMap.addPrefixMap( + Wikidata.wikidataPrefixMap + ), + format = Compact, + source = ShapeMapSource.TEXT + ), + data = Some(inputData), + schema = Some(schema) + ) + result = SchemaValidate.schemaValidate( + inputData, + schema, + trigger + ) + } yield result + } yield eitherValidationResults match { + case Left(err) => IO.raiseError(WikibaseServiceException(err)) + case Right(value) => value + } + + // Return an error or the contained value wrapped into a Result object + tryResult match { + case Failure(exception) => + IO.raiseError(WikibaseServiceException(exception.getMessage)) + case Success(value) => + value.map(validationResults => + WikibaseOperationResult( + operationData = operationData, + wikibase = targetWikibase, + result = Json.fromFields( + List( + ("entity", Json.fromString(entityUri)), + ("result", validationResults.asJson) + ) + ) + ) + ) + } + } + } +} + +object WikibaseSchemaValidate { + private val successMessage = "Schema contents validated successfully" +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/schema/WikibaseSheXerExtract.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/schema/WikibaseSheXerExtract.scala new file mode 100644 index 00000000..15f34577 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/schema/WikibaseSheXerExtract.scala @@ -0,0 +1,325 @@ +package es.weso.rdfshape.server.api.routes.wikibase.logic.operations.schema + +import cats.effect.IO +import com.typesafe.scalalogging.LazyLogging +import es.weso.rdf.PrefixMap +import es.weso.rdfshape.server.api.format.dataFormats.{DataFormat, Turtle} +import es.weso.rdfshape.server.api.routes.data.logic.DataSource +import es.weso.rdfshape.server.api.routes.data.logic.DataSource.DataSource +import es.weso.rdfshape.server.api.routes.wikibase.logic.model.wikibase.Wikidata +import es.weso.rdfshape.server.api.routes.wikibase.logic.model.wikibase.objects.wikidata.WikidataEntity +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.schema.WikibaseSheXerExtract.ShexerParams.{ + wikidataNamespaceQualifiers, + wikidataProp31 +} +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.schema.WikibaseSheXerExtract.{ + ShexerParams, + mkShexerShapemap, + shexerUri +} +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.{ + WikibaseOperation, + WikibaseOperationDetails, + WikibaseOperationResult +} +import es.weso.rdfshape.server.utils.error.exceptions.WikibaseServiceException +import io.circe.syntax.EncoderOps +import io.circe.{Encoder, Json} +import org.http4s.Method.POST +import org.http4s._ +import org.http4s.circe._ +import org.http4s.client.Client +import org.http4s.headers.{`Content-Type` => ContentType} +import org.http4s.implicits.http4sLiteralsSyntax + +/** Given an input [[WikibaseOperationDetails]], attempt to extract an schema (ShEx) + * from a given entity using SheXer. + * + * @param operationData Data needed to perform queries against a wikibase + * @param client [[Client]] object to be used in requests to wikibase + * @note Only available for Wikidata + * @note Should be passed a client with redirect + * @note [[operationData]] will contain the target entity as payload and + * the target Wikibase as endpoint + * @see [[https://github.com/DaniFdezAlvarez/shexer]] + */ +private[wikibase] case class WikibaseSheXerExtract( + override val operationData: WikibaseOperationDetails, + override val client: Client[IO] +) extends WikibaseOperation( + WikibaseSheXerExtract.successMessage, + operationData, + client + ) + with LazyLogging { + + override lazy val targetUri: Uri = uri"" // unused + + override def performOperation: IO[WikibaseOperationResult] = { + // Raise error if target is not Wikidata + if(targetWikibase != Wikidata) + IO.raiseError( + WikibaseServiceException( + "Cannot extract schemas from wikibase instances other tha Wikidata" + ) + ) + else { + + // Make the SheXer params by scanning the data sent by the client + val wdEntity = WikidataEntity(Uri.unsafeFromString(operationData.payload)) + val shexerParams = ShexerParams( + graph = wdEntity.contentUri.renderString, + graphSource = DataSource.URL, + endpointGraph = targetWikibase.queryUrl, + shapeMap = Some(mkShexerShapemap(wdEntity)) + ) + + // Make the request to SheXer + val finalRequest = request.withEntity[Json](shexerParams.asJson) + val eitherResponse: IO[Either[String, Json]] = for { + response <- performRequest[Json](finalRequest) + } yield response + + eitherResponse.flatMap({ + case Left(err) => IO.raiseError(WikibaseServiceException(err)) + case Right(shexerResponse) => + IO { + WikibaseOperationResult( + operationData = operationData, + wikibase = targetWikibase, + result = shexerResponse + ) + } + }) + } + + } + + /** Request for this operation. + * Change method to POST, target to SheXer + * and include the SheXer params as the entity to be processed + */ + override def request: Request[IO] = + super.request + .withMethod(POST) + .withUri(shexerUri) + .withHeaders(Headers(ContentType(MediaType.application.`json`))) + +} + +private[wikibase] object WikibaseSheXerExtract { + + /** SheXer deployment address + */ + val shexerUri: Uri = uri"http://156.35.94.158:8081/shexer" + private val successMessage = "Schema contents extracted successfully" + + /** Create the shapeMap parameter to be sent to SheXer given a known [[WikidataEntity]] + * + * @param entity Wikidata entity being processed by SheXer + * @param varName Name to be given to the user entity + * @return + */ + def mkShexerShapemap( + entity: WikidataEntity, + varName: String = "userEntity" + ): String = + s"SPARQL 'SELECT DISTINCT ?$varName WHERE { VALUES ?$varName { wd:${entity.localName} } }'@<$varName> " + + /** Represent the JSON structure of + * POST parameters accepted by the Shexer API. + * Default values are provided for common uses + * + * @param graph RDF content to be analyzed + * @param graphSource Source where the data comes from. Use URL + * if [[graph]] is a URL pointing to raw data + * Default: [[DataSource.TEXT]] + * @param inputFormat RDF syntax used. + * Default: turtle + * @param prefixes Tuple of namespaces-prefixes. The pairs + * provided will be used to parse the RDF + * content and write the resulting shapes. + * @param endpointGraph URL of an SPARQL endpoint. + * @param instantiationProp Property used to links an instance with its class. + * Default: "rdf:type" + * @param namespacesToIgnore List of namespaces whose properties should be + * ignored during the shexing process. + * Default: none + * @param queryDepth Indicates the depth to generate queries + * when targeting a SPARQL endpoint. + * Currently it can be 1 or 2. + * Default: 1 + * @param keepLessSpecific It prefers to use "+" closures rather + * than exact cardinalities in the triple + * constraints + * Default: true + * @param acceptanceThreshold Number in [0,1] that indicates the + * minimum proportion of entities that + * should have a given feature for this + * to be accepted as a triple constraint + * in the produced shape. + * Default: 0 + * @param allClasses If True, it generates a shape for every + * elements with at least an instance + * in the considered graph. + * Default: false + * @param allCompliant If False, the shapes produced may not be + * compliant with all the entities considered + * to build them. This is because it won't + * use Kleene closures for any constraint. + * Default: true + * @param discardUselessConstraints If True, it keeps just the most possible + * specific constraint w.r.t. cardinality. + * Default: true + * @param inferNumericTypes If True, it tries to infer the numeric type (xsd:int, xsd:float..) of + * untyped numeric literals. + * Default, True + * @param shapeMap ShapeMap to associate nodes with shapes. + * It uses the same syntax of validation shape + * maps. + * Default: None + * @param disableComments When set to True, the shapes do not + * include comment with ratio of entities + * compliant with a triple constraint. + * Default: false + * @param namespaceQualifiers When a list with elements is provided, + * the properties in the namespaces specified + * are considered to be pointers to qualifier + * nodes. + * Default: None + * @param shapeQualifiers If True, a shape is generated for those + * nodes detected as qualifiers according to + * Wikidata data model and the properties + * pointing to them specified in [[namespaceQualifiers]]. + * Default: false + * @see [[https://github.com/weso/shexerp3/blob/develop/ws/shexer_rest.py]] + */ + final case class ShexerParams( + graph: String, + graphSource: DataSource = DataSource.TEXT, + inputFormat: Option[DataFormat] = Some(Turtle), + prefixes: PrefixMap = Wikidata.wikidataPrefixMap, + endpointGraph: Uri = Wikidata.queryUrl, + instantiationProp: Uri = wikidataProp31, + namespacesToIgnore: List[String] = List(), + queryDepth: Int = 1, + acceptanceThreshold: Int = 0, + keepLessSpecific: Boolean = true, + allClasses: Boolean = false, + allCompliant: Boolean = true, + discardUselessConstraints: Boolean = true, + inferNumericTypes: Boolean = true, + shapeMap: Option[String], + disableComments: Boolean = false, + namespaceQualifiers: List[Uri] = wikidataNamespaceQualifiers, + shapeQualifiers: Boolean = false + ) + + //noinspection HttpUrlsUsage + object ShexerParams { + + /** [[Uri]] representing the property "rdf:type" + */ + private val rdfTypeProp = + uri"http://www.w3.org/1999/02/22-rdf-syntax-ns#type" + + /** [[Uri]] representing Wikidata's "instance of" property + * + * @see [[https://www.wikidata.org/wiki/Property:P31]] + */ + private val wikidataProp31: Uri = + uri"http://www.wikidata.org/prop/direct/P31" + + private val wikidataNamespaceQualifiers = List( + uri"http://www.wikidata.org/prop/" + ) + + /** Encoder to transform [[ShexerParams]] instances to JSON to be sent + * in requests + */ + implicit val encode: Encoder[ShexerParams] = (shexerParams: ShexerParams) => + { + val graphParam = shexerParams.graphSource match { + case DataSource.URL => "graph_url" + case _ => "raw_graph" + } + val baseParams: List[(String, Json)] = List( + (graphParam, Json.fromString(shexerParams.graph)), + ( + "input_format", + shexerParams.inputFormat.map(_.name.toLowerCase).asJson + ), + ( + "prefixes", + Json.fromFields( + shexerParams.prefixes.pm.map(prefixMapping => + (prefixMapping._1.str, Json.fromString(prefixMapping._2.str)) + ) + ) + ), + ( + "endpoint", + Json.fromString(shexerParams.endpointGraph.renderString) + ), + ( + "instantiation_prop", + Json.fromString(shexerParams.instantiationProp.renderString) + ), + ( + "ignore", + Json.fromValues( + shexerParams.namespacesToIgnore.map(Json.fromString) + ) + ), + ( + "query_depth", + Json.fromInt(shexerParams.queryDepth) + ), + ( + "keep_less_specific", + Json.fromBoolean(shexerParams.keepLessSpecific) + ), + ( + "all_classes", + Json.fromBoolean(shexerParams.allClasses) + ), + ( + "all_compliant", + Json.fromBoolean(shexerParams.allCompliant) + ), + ( + "discard_useless_constraints", + Json.fromBoolean(shexerParams.discardUselessConstraints) + ), + ( + "infer_untyped_nums", + Json.fromBoolean(shexerParams.inferNumericTypes) + ), + ( + "disable_comments", + Json.fromBoolean(shexerParams.disableComments) + ), + ( + "namespaces_for_qualifiers", + Json.fromValues( + shexerParams.namespaceQualifiers.map(ns => + Json.fromString(ns.renderString) + ) + ) + ), + ( + "shape_qualifiers_mode", + Json.fromBoolean(shexerParams.shapeQualifiers) + ), + ( + "shape_map", + shexerParams.shapeMap.asJson + ) + ) + + // Return final params as JSON object + Json.fromFields(baseParams).deepDropNullValues + } + + } +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/search/WikibaseSearchEntity.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/search/WikibaseSearchEntity.scala new file mode 100644 index 00000000..c7c5725b --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/search/WikibaseSearchEntity.scala @@ -0,0 +1,20 @@ +package es.weso.rdfshape.server.api.routes.wikibase.logic.operations.search + +import cats.effect.IO +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.WikibaseOperationDetails +import org.http4s.client.Client + +/** A [[WikibaseSearchOperation]] searching for entities in a wikibase instance. + * Resorts to [[https://www.wikidata.org/w/api.php?action=help&modules=wbsearchentities]] + * + * @param operationData Data needed to perform the wikibase operation + * @param client [[Client]] object to be used in requests to wikibase + */ +sealed case class WikibaseSearchEntity private ( + override val operationData: WikibaseOperationDetails, + override val client: Client[IO] +) extends WikibaseSearchOperation( + operationData, + client, + WikibaseSearchTypes.ITEM + ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/search/WikibaseSearchLexeme.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/search/WikibaseSearchLexeme.scala new file mode 100644 index 00000000..330a0fd1 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/search/WikibaseSearchLexeme.scala @@ -0,0 +1,20 @@ +package es.weso.rdfshape.server.api.routes.wikibase.logic.operations.search + +import cats.effect.IO +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.WikibaseOperationDetails +import org.http4s.client.Client + +/** A [[WikibaseSearchOperation]] searching for lexemes in a wikibase instance. + * Resorts to [[https://www.wikidata.org/w/api.php?action=help&modules=wbsearchentities]] + * + * @param operationData Data needed to perform the wikibase operation + * @param client [[Client]] object to be used in requests to wikibase + */ +sealed case class WikibaseSearchLexeme private ( + override val operationData: WikibaseOperationDetails, + override val client: Client[IO] +) extends WikibaseSearchOperation( + operationData, + client, + WikibaseSearchTypes.LEXEME + ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/search/WikibaseSearchOperation.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/search/WikibaseSearchOperation.scala new file mode 100644 index 00000000..d376be0a --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/search/WikibaseSearchOperation.scala @@ -0,0 +1,123 @@ +package es.weso.rdfshape.server.api.routes.wikibase.logic.operations.search + +import cats.effect.IO +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.search.WikibaseSearchOperation.{ + convertEntities, + defaultSearchLanguage +} +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.search.WikibaseSearchTypes.WikibaseSearchTypes +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.{ + WikibaseOperation, + WikibaseOperationDetails, + WikibaseOperationFormats, + WikibaseOperationResult +} +import es.weso.rdfshape.server.utils.error.exceptions.WikibaseServiceException +import io.circe.Json +import org.http4s.Uri +import org.http4s.circe.jsonDecoder +import org.http4s.client.Client + +/** Common abstract class for wikibase operations based on searches: entities, properties, lexemes, etc. + * Given an input [[WikibaseOperationDetails]], perform a search in a wikibase instance. + * + * @param operationData Data needed to perform the wikibase operation + * @param client [[Client]] object to be used in requests to wikibase + * @note All derived operations are based on [[https://www.wikidata.org/w/api.php?action=help&modules=wbsearchentities]] + */ +private[wikibase] abstract class WikibaseSearchOperation( + override val operationData: WikibaseOperationDetails, + override val client: Client[IO], + itemType: WikibaseSearchTypes +) extends WikibaseOperation( + WikibaseSearchOperation.successMessage, + operationData, + client + ) { + + /** Target URL in the targeted wikibase instance. Already prepared with the + * search action and given payload. + */ + override lazy val targetUri: Uri = { + targetWikibase.apiUrl + .withQueryParam("action", "wbsearchentities") + .withQueryParam("type", itemType) + .withQueryParam("search", operationData.payload) + .withQueryParam( + "language", + operationData.searchLanguage.getOrElse(defaultSearchLanguage) + ) + .withQueryParam("limit", operationData.limit.getOrElse(defaultLimit)) + .withQueryParam( + "continue", + operationData.continue.getOrElse(defaultContinue) + ) + .withQueryParam( + "format", + operationData.format + .getOrElse(WikibaseOperationFormats.JSON) + ) + } + + override def performOperation: IO[WikibaseOperationResult] = { + // Build the results item from the wikibase response, throwing errors + for { + eitherResponse <- super.performRequest[Json]() + result <- eitherResponse.flatMap(convertEntities) match { + case Left(err) => IO.raiseError(WikibaseServiceException(err)) + case Right(jsonResults) => + IO { + WikibaseOperationResult( + operationData = operationData, + wikibase = targetWikibase, + result = jsonResults + ) + } + } + } yield result + } + +} + +private[wikibase] object WikibaseSearchOperation { + private val successMessage = "Search executed successfully" + + /** Search language to be provided when none has been specified + */ + private val defaultSearchLanguage = "en" + + /** Convert the response from wikibase's "wbsearchentities" to a + * a JSON array for API responses + * + * @param json Input JSON, as received from Wikibase + * @return Either a JSON representation of the entities in the Wikibase, or an error message + */ + private[search] def convertEntities(json: Json): Either[String, Json] = for { + entities <- json.hcursor + .downField("search") + .values + .toRight("Error obtaining search value") + converted = Json.fromValues( + entities.map((value: Json) => + Json.fromFields( + List( + ( + "label", + value.hcursor.downField("label").focus.getOrElse(Json.Null) + ), + ("id", value.hcursor.downField("id").focus.getOrElse(Json.Null)), + ( + "uri", + value.hcursor.downField("concepturi").focus.getOrElse(Json.Null) + ), + ( + "descr", + value.hcursor.downField("description").focus.getOrElse(Json.Null) + ) + ) + ) + ) + ) + } yield converted + +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/search/WikibaseSearchProperty.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/search/WikibaseSearchProperty.scala new file mode 100644 index 00000000..f544f179 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/search/WikibaseSearchProperty.scala @@ -0,0 +1,20 @@ +package es.weso.rdfshape.server.api.routes.wikibase.logic.operations.search + +import cats.effect.IO +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.WikibaseOperationDetails +import org.http4s.client.Client + +/** A [[WikibaseSearchOperation]] searching for properties in a wikibase instance. + * Resorts to [[https://www.wikidata.org/w/api.php?action=help&modules=wbsearchentities]] + * + * @param operationData Data needed to perform the wikibase operation + * @param client [[Client]] object to be used in requests to wikibase + */ +sealed case class WikibaseSearchProperty private ( + override val operationData: WikibaseOperationDetails, + override val client: Client[IO] +) extends WikibaseSearchOperation( + operationData, + client, + WikibaseSearchTypes.PROPERTY + ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/search/WikibaseSearchTypes.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/search/WikibaseSearchTypes.scala new file mode 100644 index 00000000..f1556836 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/search/WikibaseSearchTypes.scala @@ -0,0 +1,18 @@ +package es.weso.rdfshape.server.api.routes.wikibase.logic.operations.search + +/** Enumeration of the different types of objects that can be requested to + * wikibase's API in search operations. + * + * @see [[https://www.wikidata.org/w/api.php?action=help&modules=wbsearchentities]] + */ +private[api] object WikibaseSearchTypes extends Enumeration { + type WikibaseSearchTypes = String + + val ITEM = "item" + val PROPERTY = "property" + val LEXEME = "lexeme" + val FORM = "form" + val SENSE = "sense" + + val default: WikibaseSearchTypes = ITEM +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseService.scala index f20f44a0..af64bb91 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseService.scala @@ -4,506 +4,354 @@ import cats.effect._ import com.typesafe.scalalogging.LazyLogging import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.routes.ApiService +import es.weso.rdfshape.server.api.routes.schema.logic.operations.SchemaValidate +import es.weso.rdfshape.server.api.routes.schema.logic.types.Schema +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.get.WikibaseGetLabels +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.languages.WikibaseLanguages +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.query.WikibaseQueryOperation +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.schema.{ + WikibaseSchemaContent, + WikibaseSchemaExtract, + WikibaseSchemaValidate, + WikibaseSheXerExtract +} +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.search.{ + WikibaseSearchEntity, + WikibaseSearchLexeme, + WikibaseSearchProperty +} +import es.weso.rdfshape.server.api.routes.wikibase.logic.operations.{ + WikibaseOperationDetails, + WikibaseOperationResult +} +import es.weso.rdfshape.server.api.utils.parameters.PartsMap +import es.weso.rdfshape.server.utils.json.JsonUtils.errorResponseJson import es.weso.shapemaps.{Status => _} +import io.circe.syntax.EncoderOps import org.http4s._ -import org.http4s.client._ +import org.http4s.circe._ +import org.http4s.client.Client import org.http4s.client.middleware.FollowRedirect -import org.http4s.dsl._ -import org.http4s.implicits._ +import org.http4s.dsl.Http4sDsl +import org.http4s.multipart.Multipart /** API service to handle wikibase (and mostly wikidata) related operations * Acts as an intermediate proxy between clients and the MediaWiki API * * @param client HTTP4S client object */ +//noinspection DuplicatedCode class WikibaseService(client: Client[IO]) extends Http4sDsl[IO] with ApiService with LazyLogging { - override val verb: String = "wikidata" + override val verb: String = "wikibase" - val wikidataUrl = "https://www.wikidata.org" - val wikidataUri = uri"https://www.wikidata.org" - val wikidataEntityUrl = uri"https://www.wikidata.org/entity" - val apiUri = uri"/api/wikidata/entity" - val wikidataQueryUri: Uri = uri"https://query.wikidata.org/sparql" - val defaultLimit = 20 - val defaultContinue = 0 - val redirectClient: Client[IO] = FollowRedirect(3)(client) + /** [[Client]] used for some queries, needs to follows some redirects to work properly + */ + private val redirectClient: Client[IO] = FollowRedirect(3)(client) /** Describe the API routes handled by this service and the actions performed on each of them */ def routes: HttpRoutes[IO] = HttpRoutes.of[IO] { - // TODO: uncomment routes and refactor along with wikishape client - case GET -> Root / `api` / `verb` => InternalServerError("Pending") - - /** Search for wikidata entities using MediaWiki's API. Search based on entity ID - * See https://www.wikidata.org/w/api.php?action=help&modules=wbgetentities - * Receives a Wikidata entity label and a language and fetches entities in Wikidata - * - wdEntity [String]: Wikidata entity label - * - language [String]: Response desired language - * Returns a JSON object after querying MediaWiki's "wbgetentities" endpoint + /** Search for wikidata objects and return their labels in the given languages. + * Receives a JSON object with the input schema information: + * - endpoint [String]: Base URL of the target wikibase instance. Defaults to Wikidata's. + * - payload [String]: Entity identifier in the wikibase instance + * - languages [String]: Optionally, the languages of the results. Language + * codes separated by "|" + * Returns a JSON object with the results. See [[WikibaseOperationResult]] */ - // case GET -> Root / `api` / `verb` / "entityLabel" :? - // WdEntityParameter(entity) +& - // LanguageParameter(language) => - // val uri = wikidataUri - // .withPath(Uri.Path.unsafeFromString("/w/api.php")) - // .withQueryParam("action", "wbgetentities") - // .withQueryParam("props", "labels") - // .withQueryParam("ids", entity) - // .withQueryParam("languages", language) - // .withQueryParam("format", "json") - // - // logger.debug(s"wikidata searchEntity uri: ${uri.toString}") - // - // val req: Request[IO] = Request(method = GET, uri = uri) - // for { - // either <- client.run(req).use { - // case Status.Successful(r) => - // r.attemptAs[Json].leftMap(_.message).value - // case r => - // r.as[String] - // .map(b => - /* s"Request $req failed with status ${r.status.code} and body $b" */ - // .asLeft[Json] - // ) - // } - // resp <- Ok(either.fold(Json.fromString, identity)) - // } yield resp + case req @ GET -> Root / `api` / `verb` / "entityLabel" => + for { + // Get operation information sent by the user + operationDetails <- WikibaseOperationDetails(req.params) + // Create response + response <- operationDetails.fold( + err => errorResponseJson(err, BadRequest), + opData => { + val op = WikibaseGetLabels(opData, client) + op.performOperation + .flatMap(results => Ok(results.asJson)) + .handleErrorWith(err => + errorResponseJson(err.getMessage, InternalServerError) + ) + } + ) + } yield response - /** Search for wikidata schemas using MediaWiki's API. - * Receives a Wikidata schema label and fetches schemas in Wikidata - * - wdSchema [String]: Wikidata schema label - * Returns a JSON object after manually querying the schema's page + /** Search for wikidata schemas using MediaWiki's API. Search based on lexeme labels. + * Receives a JSON object with the input schema information: + * - endpoint [String]: Base URL of the target wikibase instance. Defaults to Wikidata's. + * - payload [String]: Schema identifier in the wikibase instance + * Returns a JSON object with the results. See [[WikibaseOperationResult]] */ - // case GET -> Root / `api` / `verb` / "schemaContent" :? - // WdSchemaParameter(wdSchema) => - // val uri = wikidataUri.withPath( - /* Uri.Path.unsafeFromString(s"/wiki/Special:EntitySchemaText/$wdSchema") */ - // ) - // - // val req: Request[IO] = Request(method = GET, uri = uri) - // for { - // eitherValues <- client.run(req).use { - // case Status.Successful(r) => - // r.attemptAs[String].leftMap(_.message).value - // case r => - // r.as[String] - // .map(b => - /* s"Request $req failed with status ${r.status.code} and body $b" */ - // .asLeft[String] - // ) - // } - // json: Json = eitherValues.fold( - // e => Json.fromFields(List(("error", Json.fromString(e)))), - // s => Json.fromFields(List(("result", Json.fromString(s)))) - // ) - // resp <- Ok(json) - // } yield resp + case req @ GET -> Root / `api` / `verb` / "schemaContent" => + for { + // Get operation information sent by the user + operationDetails <- WikibaseOperationDetails(req.params) + // Create response + response <- operationDetails.fold( + err => errorResponseJson(err, BadRequest), + opData => { + val op = WikibaseSchemaContent(opData, client) + op.performOperation + .flatMap(results => Ok(results.asJson)) + .handleErrorWith(err => + errorResponseJson(err.getMessage, InternalServerError) + ) + } + ) + } yield response - /** Search for entities in a wikibase using MediaWiki's API. Search based on entity labels. - * See https://www.wikidata.org/w/api.php?action=help&modules=wbsearchentities - * Receives an entity label and a language and fetches entities in the wikibase whose endpoint was selected - * - endpoint [String]: SPARQL query endpoint of the target wikibase instance. Defaults to Wikidata - * - label [String]: Label / keywords in the name of the entities searched - * - language [String]: Response desired language + /** Search for entities in a wikibase using MediaWiki's API. Search based on lexeme labels. + * Receives a JSON object with the input property information: + * - endpoint [String]: Base URL of the target wikibase instance. Defaults to Wikidata's. + * - payload [String]: Keywords for the search + * - language [String]: Language in which the search is conducted * - limit [Int]: Max number of results * - continue [Int]: Offset where to continue a search - * Returns a JSON object after querying MediaWiki's "wbsearchentities" endpoint + * Returns a JSON object with the results. See [[WikibaseOperationResult]] + * + * @note see https://www.wikidata.org/w/api.php?action=help&modules=wbsearchentities */ - // case GET -> Root / `api` / `verb` / "searchEntity" :? - // EndpointParameter(maybeEndpoint) +& - // LabelParameter(label) +& - // LanguageParameter(language) +& - // LimitParameter(maybelimit) +& - // ContinueParameter(maybeContinue) => - // val limit: String = maybelimit.getOrElse(defaultLimit.toString) - /* val continue: String = maybeContinue.getOrElse(defaultContinue.toString) */ - // val endpoint: String = maybeEndpoint.getOrElse(wikidataUrl) - // - // logger.debug(s"Wikibase entity search with endpoint: $endpoint") - // - // val uri = Uri - // .unsafeFromString(endpoint) - // .withPath(Uri.Path.unsafeFromString("/w/api.php")) - // .withQueryParam("action", "wbsearchentities") - // .withQueryParam("search", label) - // .withQueryParam("language", language) - // .withQueryParam("limit", limit) - // .withQueryParam("continue", continue) - // .withQueryParam("format", "json") - // - // logger.debug(s"wikidata searchEntity uri: $uri") - // - // val req: Request[IO] = Request(method = GET, uri = uri) - // - // for { - // eitherValues <- client.run(req).use { - // case Status.Successful(r) => - // r.attemptAs[Json].leftMap(_.message).value - // case r => - // r.as[String] - // .map(b => - /* s"Request $req failed with status ${r.status.code} and body $b" */ - // .asLeft[Json] - // ) - // } - // eitherResult = for { - // json <- eitherValues - // converted <- convertEntities(json) - // } yield converted - // resp <- Ok(eitherResult.fold(Json.fromString, identity)) - // } yield resp + case req @ GET -> Root / `api` / `verb` / "searchEntity" => + for { + // Get operation information sent by the user + operationDetails <- WikibaseOperationDetails(req.params) + // Create response + response <- operationDetails.fold( + err => errorResponseJson(err, BadRequest), + opData => { + val searchOperation = WikibaseSearchEntity(opData, client) + searchOperation.performOperation + .flatMap(results => Ok(results.asJson)) + .handleErrorWith(err => + errorResponseJson(err.getMessage, InternalServerError) + ) + } + ) + } yield response /** Search for properties in a wikibase using MediaWiki's API. Search based on property labels. - * See https://www.wikidata.org/w/api.php?action=help&modules=wbsearchentities - * Receives a property label and a language and fetches properties in the wikibase whose endpoint was selected - * - endpoint [String]: SPARQL query endpoint of the target wikibase instance. Defaults to Wikidata - * - label [String]: Label / keywords in the name of the properties searched - * - language [String]: Response desired language + * Receives a JSON object with the input property information: + * - endpoint [String]: Base URL of the target wikibase instance. Defaults to Wikidata + * - payload [String]: Keywords for the search + * - language [String]: Language in which the search is conducted * - limit [Int]: Max number of results * - continue [Int]: Offset where to continue a search - * Returns a JSON object after querying MediaWiki's "wbsearchentities" endpoint. + * Returns a JSON object with the results. See [[WikibaseOperationResult]] + * + * @note see https://www.wikidata.org/w/api.php?action=help&modules=wbsearchentities */ - // case GET -> Root / `api` / `verb` / "searchProperty" :? - // EndpointParameter(maybeEndpoint) +& - // LabelParameter(label) +& - // LanguageParameter(language) +& - // LimitParameter(maybelimit) +& - // ContinueParameter(maybeContinue) => - // val limit: String = maybelimit.getOrElse(defaultLimit.toString) - /* val continue: String = maybeContinue.getOrElse(defaultContinue.toString) */ - // val endpoint: String = maybeEndpoint.getOrElse(wikidataUrl) - // - // logger.debug(s"Wikibase property search with endpoint: $endpoint") - // - // val uri = Uri - // .fromString(endpoint) - // .valueOr(throw _) - // .withPath(Uri.Path.unsafeFromString("/w/api.php")) - // .withQueryParam("action", "wbsearchentities") - // .withQueryParam("search", label) - // .withQueryParam("language", language) - // .withQueryParam("limit", limit) - // .withQueryParam("continue", continue) - // .withQueryParam("type", "property") - // .withQueryParam("format", "json") - // - // logger.debug(s"wikidata searchProperty uri: $uri") - // - // val req: Request[IO] = Request(method = GET, uri = uri) - // - // for { - // eitherValues <- client.run(req).use { - // case Status.Successful(r) => - // r.attemptAs[Json].leftMap(_.message).value - // case r => - // r.as[String] - // .map(b => - /* s"Request $req failed with status ${r.status.code} and body $b" */ - // .asLeft[Json] - // ) - // } - // eitherResult = for { - // json <- eitherValues - // converted <- convertEntities(json) - // } yield converted - // resp <- Ok(eitherResult.fold(Json.fromString, identity)) - // } yield resp + case req @ GET -> Root / `api` / `verb` / "searchProperty" => + for { + // Get operation information sent by the user + operationDetails <- WikibaseOperationDetails(req.params) + // Create response + response <- operationDetails.fold( + err => errorResponseJson(err, BadRequest), + opData => { + val searchOperation = WikibaseSearchProperty(opData, client) + searchOperation.performOperation + .flatMap(results => Ok(results.asJson)) + .handleErrorWith(err => + errorResponseJson(err.getMessage, InternalServerError) + ) + } + ) + } yield response /** Search for lexemes in a wikibase using MediaWiki's API. Search based on lexeme labels. - * See https://www.wikidata.org/w/api.php?action=help&modules=wbsearchentities - * Receives a lexeme label and a language and fetches properties in the wikibase whose endpoint was selected - * - endpoint [String]: SPARQL query endpoint of the target wikibase instance. Defaults to Wikidata - * - label [String]: Label / keywords in the name of the lexemes searched - * - language [String]: Response desired language + * Receives a JSON object with the input property information: + * - endpoint [String]: Base URL of the target wikibase instance. Defaults to Wikidata's. + * - payload [String]: Keywords for the search + * - language [String]: Language in which the search is conducted * - limit [Int]: Max number of results * - continue [Int]: Offset where to continue a search - * Returns a JSON object after querying MediaWiki's "wbsearchentities" endpoint. + * Returns a JSON object with the results. See [[WikibaseOperationResult]] + * + * @note see https://www.wikidata.org/w/api.php?action=help&modules=wbsearchentities */ - // case GET -> Root / `api` / `verb` / "searchLexeme" :? - // EndpointParameter(maybeEndpoint) +& - // LabelParameter(label) +& - // LanguageParameter(language) +& - // LimitParameter(maybelimit) +& - // ContinueParameter(maybeContinue) => - // val limit: String = maybelimit.getOrElse(defaultLimit.toString) - /* val continue: String = maybeContinue.getOrElse(defaultContinue.toString) */ - // val endpoint: String = maybeEndpoint.getOrElse(wikidataUrl) - // - // logger.debug(s"Wikibase lexeme search with endpoint: $endpoint") - // - // val uri = Uri - // .fromString(endpoint) - // .valueOr(throw _) - // .withPath(Uri.Path.unsafeFromString("/w/api.php")) - // .withQueryParam("action", "wbsearchentities") - // .withQueryParam("search", label) - // .withQueryParam("language", language) - // .withQueryParam("limit", limit) - // .withQueryParam("continue", continue) - // .withQueryParam("type", "lexeme") - // .withQueryParam("format", "json") - // - // logger.debug(s"wikidata searchLexeme uri: $uri") - // - // val req: Request[IO] = Request(method = GET, uri = uri) - // for { - // eitherValues <- client.run(req).use { - // case Status.Successful(r) => - // r.attemptAs[Json].leftMap(_.message).value - // case r => - // r.as[String] - // .map(b => - /* s"Request $req failed with status ${r.status.code} and body $b" */ - // .asLeft[Json] - // ) - // } - // eitherResult = for { - // json <- eitherValues - // converted <- convertEntities(json) - // } yield converted - // resp <- Ok(eitherResult.fold(Json.fromString, identity)) - // } yield resp + case req @ GET -> Root / `api` / `verb` / "searchLexeme" => + for { + // Get operation information sent by the user + operationDetails <- WikibaseOperationDetails(req.params) + // Create response + response <- operationDetails.fold( + err => errorResponseJson(err, BadRequest), + opData => { + val searchOperation = WikibaseSearchLexeme(opData, client) + searchOperation.performOperation + .flatMap(results => Ok(results.asJson)) + .handleErrorWith(err => + errorResponseJson(err.getMessage, InternalServerError) + ) + } + ) + } yield response /** Search for all the languages used in a wikibase instance. - * - endpoint [String]: SPARQL query endpoint of the target wikibase instance. Defaults to Wikidata. - * Returns a JSON object with the array of languages returned by the endpoint. + * - endpoint [String]: Base URL of the target wikibase instance. Defaults to Wikidata. + * Returns a JSON object with the array of languages supported. + * See [[WikibaseOperationResult]] */ - // case GET -> Root / `api` / `verb` / "languages" :? - // EndpointParameter(maybeEndpoint) => - // val endpoint: String = maybeEndpoint.getOrElse(wikidataUrl) - // logger.debug(s"Wikibase language search with endpoint: $endpoint") - // - // val uri = Uri - // .fromString(endpoint) - // .valueOr(throw _) - // .withPath(Uri.Path.unsafeFromString("/w/api.php")) - // .withQueryParam("action", "query") - // .withQueryParam("meta", "wbcontentlanguages") - // .withQueryParam("wbclcontext", "term") - // .withQueryParam("wbclprop", "code|autonym") - // .withQueryParam("format", "json") - // - // val req: Request[IO] = Request(method = GET, uri = uri) - // for { - // eitherValues <- client.run(req).use { - // case Status.Successful(r) => - // r.attemptAs[Json].leftMap(_.message).value - // case r => - // r.as[String] - // .map(b => - /* s"Request $req failed with status ${r.status.code} and body $b" */ - // .asLeft[Json] - // ) - // } - // eitherResult = for { - // json <- eitherValues - // converted <- convertLanguages(json) - // } yield converted - // resp <- Ok( - // eitherResult.fold(Json.fromString, identity) - // ) - // } yield resp + case req @ GET -> Root / `api` / `verb` / "languages" => + for { + // Get operation information sent by the user + operationDetails <- WikibaseOperationDetails(req.params) + // Create response + response <- operationDetails.fold( + err => errorResponseJson(err, BadRequest), + opData => { + val languagesOperation = WikibaseLanguages(opData, client) + languagesOperation.performOperation + .flatMap(results => Ok(results.asJson)) + .handleErrorWith(err => + errorResponseJson(err.getMessage, InternalServerError) + ) + } + ) + } yield response /** Execute a given SPARQL query to a given SPARQL endpoint of a wikibase instance. * Receives a target endpoint and the query text. - * - endpoint [String]: SPARQL query endpoint. Defaults to Wikidata - * - query [String]: SPARQL query to be run + * - endpoint [String]: SPARQL query endpoint of the target wikibase instance. Defaults to Wikidata + * - payload [String]: SPARQL query to be run * Returns a JSON object with the query results: - * - head [Object]: Query metadata - * - vars: [Array]: Query variables - * - results [Object]: Query results - * - bindings: [Array]: Query results, each item being an object mapping each variable to its value + * (see [[WikibaseOperationResult]]) + * + * Query examples in [[https://www.wikidata.org/wiki/Wikidata:SPARQL_query_service/queries/examples]] */ - // case req @ POST -> Root / `api` / `verb` / "query" => - // req.decode[Multipart[IO]] { m => - // { - // val partsMap = PartsMap(m.parts) - // for { - // optQuery <- partsMap.optPartValue("query") - // optEndpoint <- partsMap.optPartValue("endpoint") - // endpoint = optEndpoint.getOrElse(wikidataQueryUri.toString()) - // query = optQuery.getOrElse("") - // req: Request[IO] = - // Request( - // method = GET, - // uri = Uri - // .fromString(endpoint) - // .valueOr(throw _) - // .withQueryParam("query", query) - // ) - // .withHeaders( - // `Accept`(MediaType.application.`json`) - // ) - // eitherValue <- client.run(req).use { - // case Status.Successful(r) => - // r.attemptAs[Json].leftMap(_.message).value - // case r => - // r.as[String] - // .map(b => - /* s"Request $req failed with status ${r.status.code} and body $b" */ - // .asLeft[Json] - // ) - // } - // resp <- Ok(eitherValue.fold(Json.fromString, identity)) - // } yield resp - // } - // } + case req @ POST -> Root / `api` / `verb` / "query" => + req.decode[Multipart[IO]] { m => + val partsMap = PartsMap(m.parts) + for { + // Get operation information sent by the user + operationDetails <- WikibaseOperationDetails(partsMap) + // Create response + response <- operationDetails.fold( + err => errorResponseJson(err, BadRequest), + opData => { + val queryOp = WikibaseQueryOperation(opData, client) + queryOp.performOperation + .flatMap(results => Ok(results.asJson)) + .handleErrorWith(err => + errorResponseJson(err.getMessage, InternalServerError) + ) + } + ) + } yield response + } /** Attempts to extract an schema (ShEx) from a given entity present in wikidata. - * Receives an entity URI: - * - entity [String]: Unique address of the entity in wikidata - * Returns a JSON object with the extraction results: - * - entity [String]: URI of the entity whose information we searched - * - result [String]: Extracted schema + * Receives an entity URI as payload. + * - endpoint [String]: Base URL of the target wikibase instance. Defaults to Wikidata. + * - payload [String]: Unique URI of the entity in wikidata + * Returns a JSON object with the extracted schema: + * (see [[WikibaseOperationResult]]) */ - // case req @ POST -> Root / `api` / `verb` / "extract" => - // req.decode[Multipart[IO]] { m => - // val partsMap = PartsMap(m.parts) - // val r: EitherT[IO, String, Response[IO]] = for { - // label <- EitherT(partsMap.eitherPartValue("entity")) - // info <- either2es[WikibaseEntity](uriToEntity(label)) - // _ <- { - // logger.debug(s"Extraction URI: ${info.uri}"); - // ok_esf[Unit, IO](()) - // } - // strRdf <- io2es(redirectClient.expect[String](info.uri)) - // eitherInferred <- io2es( - // RDFAsJenaModel - // .fromString(strRdf, "TURTLE") - // .flatMap( - // _.use(rdf => - // for { - // rdfSerialized <- rdf.serialize("TURTLE") - // nodeSelector = RDFNodeSelector(IRI(label)) - // inferred <- SchemaInfer.runInferSchema( - // rdf, - // nodeSelector, - // "ShEx", - // IRI(s"http://example.org/Shape_${info.localName}"), - // InferOptions.defaultOptions.copy(maxFollowOn = 3) - // ) - // } yield inferred - // ) - // ) - // ) - // pair <- either2es[(Schema, ResultShapeMap)](eitherInferred) - // shExCStr <- io2es({ - // val (schema, _) = pair - // schema.serialize("SHEXC") - // }) - // _ <- { - // logger.trace(s"ShExC str: $shExCStr"); - // ok_es[Unit](()) - // } - // resp <- io2es( - // Ok( - // Json.fromFields( - // List( - // ("entity", Json.fromString(label)), - // ("result", Json.fromString(shExCStr)) - // ) - // ) - // ) - // ) - // } yield resp - // for { - // either <- r.value - // resp <- either.fold( - // err => errorResponseJson(err, InternalServerError), - // r => IO.pure(r) - // ) - // } yield resp - // } + case req @ POST -> Root / `api` / `verb` / "extract" => + req.decode[Multipart[IO]] { m => + val partsMap = PartsMap(m.parts) + for { + // Get operation information sent by the user + operationDetails <- WikibaseOperationDetails(partsMap) + // Create response + response <- operationDetails.fold( + err => errorResponseJson(err, BadRequest), + opData => { + val queryOp = WikibaseSchemaExtract(opData, redirectClient) + queryOp.performOperation + .flatMap(results => Ok(results.asJson)) + .handleErrorWith(err => + errorResponseJson(err.getMessage, InternalServerError) + ) + } + ) + } yield response + } - // TODO: This one doesn't work. It gives a timeout response - /** Attempts to extract an schema (ShEx) from a given entity present in wikidata using "shexer". - * See https://github.com/DaniFdezAlvarez/shexer - * Receives an entity URI: - * - entity [String]: Unique address of the entity in wikidata - * Returns a JSON object with the extraction results: - * - entity [String]: URI of the entity whose information we searched - * - result [String]: Extracted schema + // TODO: Needs exhaustive testing and client changes + /** Attempts to extract an schema (ShEx) from a given entity present in wikidata + * using SheXer. See [[https://github.com/DaniFdezAlvarez/shexer]]. + * Receives an entity URI as payload. + * - endpoint [String]: Base URL of the target wikibase instance. Should + * be left empty so it defaults to Wikidata. + * - payload [String]: Unique URI of the entity in wikidata + * Returns a JSON object with the extracted schema: + * (see [[WikibaseOperationResult]]) */ - // case req @ POST -> Root / `api` / `verb` / "shexer" => - // req.decode[Multipart[IO]] { m => - // val partsMap = PartsMap(m.parts) - // val r: EitherT[IO, String, Response[IO]] = for { - // label <- EitherT(partsMap.eitherPartValue("entity")) - // jsonParams <- either2es[Json](mkShexerParams(label)) - // postRequest = Request[IO]( - // method = POST, - // uri = uri"http://156.35.94.158:8081/shexer" - // ).withHeaders(`Content-Type`(MediaType.application.`json`)) - // .withEntity[Json](jsonParams) - // _ <- { - // logger.debug(s"URI: ${jsonParams.spaces2}"); - // ok_es[Unit](()) - // } - // result <- f2es(redirectClient.expect[Json](postRequest)) - // _ <- { - // logger.trace(s"Result\n${result.spaces2}"); - // ok_es[Unit](()) - // } - // resp <- f2es(Ok(result)) - // } yield resp - // for { - // either <- r.value - // resp <- either.fold( - // err => errorResponseJson(err, InternalServerError), - // r => IO.pure(r) - // ) - // } yield resp - // } + case req @ POST -> Root / `api` / `verb` / "shexer" => + req.decode[Multipart[IO]] { m => + val partsMap = PartsMap(m.parts) + for { + // Get operation information sent by the user + operationDetails <- WikibaseOperationDetails(partsMap) + // Create response + response <- operationDetails.fold( + err => errorResponseJson(err, BadRequest), + opData => { + val queryOp = WikibaseSheXerExtract(opData, redirectClient) + queryOp.performOperation + .flatMap(results => Ok(results.asJson)) + .handleErrorWith(err => + errorResponseJson(err.getMessage, InternalServerError) + ) + } + ) + } yield response + } - /** Validate entities in a wikibase using wikidata schemas or shape expressions. - * Receives several data: - * - endpoint [String]: Endpoint of the target wikibase instance. Defaults to Wikidata - * - entity [String]: URI of the entity to be validated - * - entitySchema [String]: (Wikidata schema only) Identifier of the wikidata schema to be used - * - schema [String]: (ShEx schema only) Raw contents of the schema supplied by the user - * - schemaFormat [String]: (ShEx schema only) Format of the schema supplied by the user - * - schemaEngine [String]: Schema engine to be used (defaults to ShEx) - * - shape [String]: Shape of the schema which will be compared against the entity - * Returns a JSON object with the results (pending). + // TODO: Needs exhaustive testing and client changes + /** Validate entities in wikidata using a given schema. + * Receives an entity URI as payload, as well as the parameters to create + * the ShEx schema against which to validate. + * - endpoint [String]: Base URL of the target wikibase instance. Should + * be left empty so it defaults to Wikidata. + * - payload [String]: Unique URI of the entity in wikidata + * - schema [String]: Schema data (raw, URL containing the schema or File with the schema) + * - schemaSource [String]: Identifies the source of the schema (raw, URL, file...) + * - schemaEngine [String]: Format of the schema, should be ShEx if using wikidata schemas + * + * Returns a JSON object with the validation results: + * (see [[WikibaseOperationResult]] and + * [[SchemaValidate.encodeSchemaValidateOperation]]) */ - // case req @ POST -> Root / `api` / `verb` / "validate" => - // logger.debug(s"Wikidata validate request: $req") - // req.decode[Multipart[IO]] { m => - // val partsMap = PartsMap(m.parts) - // val r: IO[Response[IO]] = for { - // eitherEntity <- partsMap.eitherPartValue("entity") - // item <- ioFromEither(eitherEntity) - // info <- ioFromEither(uriToEntity2(item)) - // pair <- WikibaseSchema.mkSchema(partsMap, None, client) - // - // (schema, wbp) = pair - // iriItem <- ioFromEither(IRI.fromString(info.sourceUri)) - // shapeMap <- ioFromEither(ShapeMap.empty.add(iriItem, Start)) - // triggerMode = ShapeMapTrigger(shapeMap) - // result <- for { - // res1 <- WikibaseRDF.wikidata - // res2 <- RDFAsJenaModel.empty - // vv <- (res1, res2).tupled.use { case (rdf, builder) => - // for { - /* validationResult <- schema.validate(rdf, triggerMode, builder) */ - /* // json <- schemaResult2json(validationResult) */ - /* } yield SchemaValidate.encodeValidationResult(validationResult) */ - // } - // } yield vv - // resp <- Ok(result) - // } yield resp - // r.attempt.flatMap( - // _.fold( - // s => errorResponseJson(s.getMessage, InternalServerError), - // IO.pure - // ) - // ) - // } + case req @ POST -> Root / `api` / `verb` / "validate" => + req.decode[Multipart[IO]] { m => + val partsMap = PartsMap(m.parts) + for { + // Get operation information sent by the user + eitherDetails <- WikibaseOperationDetails(partsMap) + + // Get the validation schema sent by the user + eitherSchema <- Schema.mkSchema(partsMap) + + operationData: Either[String, (WikibaseOperationDetails, Schema)] = + for { + details <- eitherDetails + schema <- eitherSchema + } yield (details, schema) + + // Create response + response <- operationData.fold( + err => errorResponseJson(err, BadRequest), + { + case (details, schema) => { + val operation = WikibaseSchemaValidate(details, client, schema) + operation.performOperation + .flatMap(results => Ok(results.asJson)) + .handleErrorWith(err => + errorResponseJson(err.getMessage, InternalServerError) + ) + } + } + ) + } yield response + } } } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseServiceUtils.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseServiceUtils.scala deleted file mode 100644 index af305a67..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseServiceUtils.scala +++ /dev/null @@ -1,147 +0,0 @@ -package es.weso.rdfshape.server.api.routes.wikibase.service - -import cats.implicits._ -import io.circe.Json -import io.circe.parser.parse - -private[service] class WikibaseServiceUtils {} - -/** Static utilities used by the Wikibase service - */ -//noinspection HttpUrlsUsage,SpellCheckingInspection -object WikibaseServiceUtils { - - /** For a given entity, create the JSON structure accepted by the Shexer API - * - * @param entity Entity to be examined by Shexer - * @return JSON object with the structure accepted by the Shexer API, adapted for the given entity - */ - def mkShexerParams(entity: String): Either[String, Json] = for { - prefixes <- wikidataPrefixes - } yield Json.fromFields( - List( - ("prefixes", prefixes), - ( - "shape_map", - Json.fromString( - "SPARQL'SELECT DISTINCT ?virus WHERE { VALUES ?virus { wd:Q82069695 } }'@ " - ) - ), - ("endpoint", Json.fromString("https://query.wikidata.org/sparql")), - ("all_classes", Json.False), - ("query_depth", Json.fromInt(1)), - ("threshold", Json.fromInt(0)), - ( - "instantiation_prop", - Json.fromString("http://www.wikidata.org/prop/direct/P31") - ), - ("disable_comments", Json.True), - ("shape_qualifiers_mode", Json.True), - ( - "namespaces_for_qualifiers", - Json.arr(Json.fromString("http://www.wikidata.org/prop/")) - ) - ) - ) - - /** @return JSON containing all the prefixed used by Wikidata - */ - def wikidataPrefixes: Either[String, Json] = { - val json = - """{ - "http://wikiba.se/ontology#": "wikibase", - "http://www.bigdata.com/rdf#": "bd", - "http://www.wikidata.org/entity/": "wd", - "http://www.wikidata.org/prop/direct/": "wdt", - "http://www.wikidata.org/prop/direct-normalized/": "wdtn", - "http://www.wikidata.org/entity/statement/": "wds", - "http://www.wikidata.org/prop/": "p", - "http://www.wikidata.org/reference/": "wdref", - "http://www.wikidata.org/value/": "wdv", - "http://www.wikidata.org/prop/statement/": "ps", - "http://www.wikidata.org/prop/statement/value/": "psv", - "http://www.wikidata.org/prop/statement/value-normalized/": "psn", - "http://www.wikidata.org/prop/qualifier/": "pq", - "http://www.wikidata.org/prop/qualifier/value/": "pqv", - "http://www.wikidata.org/prop/qualifier/value-normalized/": "pqn", - "http://www.wikidata.org/prop/reference/": "pr", - "http://www.wikidata.org/prop/reference/value/": "prv", - "http://www.wikidata.org/prop/reference/value-normalized/": "prn", - "http://www.wikidata.org/prop/novalue/": "wdno" - }""" - parse(json).leftMap(e => s"Error parsing prefixes: $e") - } - - /** Convert the response from Wikibase "wbcontentlanguages" to a more convenient JSON structure - * @param json Input JSON, as received from Wikibase - * @return Either a JSON representation of the languages in the Wikibase, or an error message - */ - def convertLanguages(json: Json): Either[String, Json] = for { - languagesObj <- json.hcursor - .downField("query") - .downField("wbcontentlanguages") - .focus - .toRight(s"Error obtaining query/wbcontentlanguages at ${json.spaces2}") - keys <- languagesObj.hcursor.keys.toRight( - s"Error obtaining values from languages: ${languagesObj.spaces2}" - ) - converted = Json.fromValues( - keys.map(key => - Json.fromFields( - List( - ( - "label", - languagesObj.hcursor - .downField(key) - .downField("code") - .focus - .getOrElse(Json.Null) - ), - ( - "name", - languagesObj.hcursor - .downField(key) - .downField("autonym") - .focus - .getOrElse(Json.Null) - ) - ) - ) - ) - ) - } yield { - converted - } - - /** Convert the response from Wikibase "wbsearchentities" to a more convenient JSON structure - * @param json Input JSON, as received from Wikibase - * @return Either a JSON representation of the entities in the Wikibase, or an error message - */ - def convertEntities(json: Json): Either[String, Json] = for { - entities <- json.hcursor - .downField("search") - .values - .toRight("Error obtaining search value") - converted = Json.fromValues( - entities.map((value: Json) => - Json.fromFields( - List( - ( - "label", - value.hcursor.downField("label").focus.getOrElse(Json.Null) - ), - ("id", value.hcursor.downField("id").focus.getOrElse(Json.Null)), - ( - "uri", - value.hcursor.downField("concepturi").focus.getOrElse(Json.Null) - ), - ( - "descr", - value.hcursor.downField("description").focus.getOrElse(Json.Null) - ) - ) - ) - ) - ) - } yield converted -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala index bd960f26..324bdb42 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/IncomingRequestParameters.scala @@ -43,6 +43,7 @@ object IncomingRequestParameters { lazy val wdEntity = "wdEntity" lazy val wdSchema = "wdSchema" + lazy val payload = "payload" lazy val url = "url" lazy val urlCode = "urlCode" lazy val hostname = "hostname" @@ -50,8 +51,10 @@ object IncomingRequestParameters { lazy val examples = "examples" lazy val manifestUrl = "manifestUrl" lazy val language = "language" + lazy val languages = "languages" lazy val label = "label" lazy val limit = "limit" + lazy val format = "wbFormat" lazy val continue = "continue" lazy val withDot = "withDot" @@ -230,6 +233,13 @@ object IncomingRequestParameters { val name: String = querySource } + /** Parameter expected to contain a payload for later use querying wikibase's API + */ + object WikibasePayloadParameter + extends QueryParamDecoderMatcher[String](payload) { + val name: String = payload + } + /** Parameter expected to contain a valid identifier/name/label of a wikidata entity * in wikidata-related operations */ @@ -245,14 +255,24 @@ object IncomingRequestParameters { } /** Parameter expected to contain a valid language code, normally for - * wikidata-related operations that return data in a user-selected language + * wikidata-related operations that search for data in a user-selected language * - * @note See {@linkplain https:// en.wikipedia.org / wiki / List_of_ISO_639 - 1 _codes} + * @note See [[https:// en.wikipedia.org / wiki / List_of_ISO_639 - 1 _codes]] */ object LanguageParameter extends QueryParamDecoderMatcher[String](language) { val name: String = language } + /** Parameter expected to contain a list of language codes, normally for + * wikidata-related operations that return data in a user-selected language + * + * @note See [[https:// en.wikipedia.org / wiki / List_of_ISO_639 - 1 _codes]] + */ + object LanguagesParameter + extends QueryParamDecoderMatcher[String](languages) { + val name: String = languages + } + /** Parameter expected to contain a valid identifier/name/label of a wikibase entity * in wikibase-related operations */ @@ -289,4 +309,12 @@ object IncomingRequestParameters { val name: String = continue } + /** Parameter expected to contain the format requested to wikibase when + * searching for data + */ + object WikibaseFormatParameter + extends OptionalQueryParamDecoderMatcher[String](format) { + val name: String = format + } + } diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/PartsMap.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/PartsMap.scala index f0ec7640..41230854 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/PartsMap.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/utils/parameters/PartsMap.scala @@ -28,6 +28,20 @@ case class PartsMap private (map: Map[String, Part[IO]]) { } + /** Extract the value from a request parameter, decoding it and handling errors + * + * @param key Parameter key + * @param alt Alternative value to be returned when parameter value + * is missing + * @return Optionally, the String contents of the parameter + */ + def optPartValue(key: String, alt: Option[String] = None): IO[Option[String]] = + map.get(key) match { + case Some(part) => + part.body.through(decode).compile.foldMonoid.map(Some.apply) + case None => IO.pure(None) + } + /** Shorthand for extracting values from a request parameter with an informational error message * * @param key Parameter key @@ -42,18 +56,6 @@ case class PartsMap private (map: Map[String, Part[IO]]) { ) case Some(s) => Right(s) } - - /** Extract the value from a request parameter, decoding it and handling errors - * - * @param key Parameter key - * @return Optionally, the String contents of the parameter - */ - def optPartValue(key: String): IO[Option[String]] = - map.get(key) match { - case Some(part) => - part.body.through(decode).compile.foldMonoid.map(Some.apply) - case None => IO.pure(None) - } } object PartsMap extends LazyLogging{ diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/values/EndpointValue.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/values/EndpointValue.scala deleted file mode 100644 index 0b408230..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/values/EndpointValue.scala +++ /dev/null @@ -1,7 +0,0 @@ -package es.weso.rdfshape.server.api.values - -/** Data class representing any endpoint from where information is fetched or that identifies RDF data - * @param endpoint Base endpoint - * @param node Specific information node - */ -case class EndpointValue(endpoint: Option[String], node: Option[String]) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/values/WikidataEntityValue.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/values/WikidataEntityValue.scala deleted file mode 100644 index d25fa2ad..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/values/WikidataEntityValue.scala +++ /dev/null @@ -1,7 +0,0 @@ -package es.weso.rdfshape.server.api.values - -/** Data class representing a Wikidata entity - * - * @param entity Entity of which the data is contained - */ -case class WikidataEntityValue(entity: Option[String]) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/html2rdf/HTML2RDF.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/html2rdf/HtmlToRdf.scala similarity index 97% rename from modules/server/src/main/scala/es/weso/rdfshape/server/html2rdf/HTML2RDF.scala rename to modules/server/src/main/scala/es/weso/rdfshape/server/html2rdf/HtmlToRdf.scala index 95d002f5..a555500f 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/html2rdf/HTML2RDF.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/html2rdf/HtmlToRdf.scala @@ -28,9 +28,9 @@ import org.eclipse.rdf4j.model.{ import scala.util.Try -/** Utilities for extracting RDF models from different sources +/** Utilities for extracting RDF models from HTML markdown fetched from different sources */ -object HTML2RDF extends LazyLogging { +object HtmlToRdf extends LazyLogging { /** List of all available RDF data extractors */ @@ -164,6 +164,12 @@ object HTML2RDF extends LazyLogging { case b: BNode => cnvBNode(b) } + def cnvBNode(b: BNode): JenaResource = + model.createResource(AnonId.create(b.getID)) + + def cnvIRI(p: RDF4jIRI): JenaProperty = + model.createProperty(p.toString) + def cnvObj(o: Value): JenaRDFNode = o match { case i: RDF4jIRI => cnvIRI(i) case b: BNode => cnvBNode(b) @@ -174,12 +180,6 @@ object HTML2RDF extends LazyLogging { model.createTypedLiteral(l.getLabel, l.getDatatype.toString) } - def cnvBNode(b: BNode): JenaResource = - model.createResource(AnonId.create(b.getID)) - - def cnvIRI(p: RDF4jIRI): JenaProperty = - model.createProperty(p.toString) - override def startDocument( documentIRI: RDF4jIRI ): Unit = {} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/streams/Streams.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/streams/Streams.scala deleted file mode 100644 index b85aa72d..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/streams/Streams.scala +++ /dev/null @@ -1,187 +0,0 @@ -package es.weso.rdfshape.server.streams - -import cats.effect.IO -import com.typesafe.scalalogging.LazyLogging -import es.weso.rdf.jena.SPARQLQueries.queryTriplesWithSubject -import es.weso.rdf.nodes.IRI -import org.apache.commons.io.output.WriterOutputStream -import org.apache.jena.query.QueryExecutionFactory -import org.apache.jena.rdf.model.ModelFactory -import org.apache.jena.riot.system._ -import org.apache.jena.riot.{Lang, RDFDataMgr} -import org.http4s.Uri - -import java.io.{OutputStream, StringWriter} -import java.nio.charset.Charset -import java.nio.charset.StandardCharsets.UTF_8 - -/** Utilities for working with RDF data and its extraction from remote sources - */ -object Streams extends LazyLogging { - - /** @param uri URI to read from - * @param lang Output RDF syntax (turtle, n-triples...) - * @return Raw RDF data from a remote URI in plain text using the specified syntax - */ - def getRdfRaw( - uri: Uri, - lang: Lang = Lang.TURTLE - ): IO[String] = { - - getRdf( - uri, - lang, - (stringWriter, _, rdfStream) => { - IO { - RDFDataMgr.parse(rdfStream, uri.renderString) - val raw = stringWriter.toString - logger.debug(s"Raw graph: $raw") - raw - } - } - ) - - } - - /** @param uri URI to read from - * @param lang Output RDF syntax (turtle, n-triples...) - * @return Graphed RDF data from a remote URI in plain text using the specified syntax - */ - def getRdfRawWithModel(uri: Uri, lang: Lang = Lang.NTRIPLES): IO[String] = { - getRdf( - uri, - lang, - (stringWriter, _, rdfStream) => { - IO { - val model = ModelFactory.createDefaultModel - val modelGraph = model.getGraph - val streamGraph = StreamRDFLib.graph(modelGraph) - RDFDataMgr.parse(streamGraph, uri.renderString) - - logger.debug(s"Model graph: $model") - - StreamRDFOps.sendGraphToStream(modelGraph, rdfStream) - stringWriter.toString - } - } - ) - - } - - /** @param endpoint URI to read from - * @param node Node to query the endpoint about - * @param lang Output RDF syntax (turtle, n-triples...) - * @return Outgoing node information in RDF from a remote endpoint in plain text with the specified syntax - */ - def getOutgoing( - endpoint: Uri, - node: String, - lang: Lang = Lang.TURTLE - ): IO[String] = - getRdf( - endpoint, - lang, - (stringWriter, _, rdfStream) => { - IO { - val query = QueryExecutionFactory - .sparqlService( - endpoint.renderString, - queryTriplesWithSubject(IRI(node)) - ) - .execConstruct() - - val graph = query.getGraph - logger.debug(s"Query graph: $graph") - - StreamRDFOps.sendGraphToStream(graph, rdfStream) - stringWriter.toString - - } - } - ) - - /** Generic function for private use. Given an RDF-extracting function, executes it while checking for errors and closing all resources used in the process. - * - * @param uri URI to read from - * @param lang Output RDF syntax (turtle, n-triples...) - * @param getRdfLogic Logic in charge of extracting RDF from sources - * @param encoding Encoding with which the data extracted is stored - * @return String representation of the RDF data extracted (in the specified language and encoding) - */ - private def getRdf( - uri: Uri, - lang: Lang, - getRdfLogic: (StringWriter, OutputStream, StreamRDF) => IO[String], - encoding: Charset = UTF_8 - ): IO[String] = { - - /* Get the necessary elements (writer, streams, etc.) to read the RDF data - * and store it in plain text if needed. */ - val streamsIOElements = StreamsIOElements(lang, encoding) - val (stringWriter, outputStream, rdfStream) = - StreamsIOElements.unapply(streamsIOElements) - - /* Extract the String representation of the URI and pick up the data from - * the initial StringWriter. - * DATA => StreamRDF => OutputStream => StringWriter */ - try { - getRdfLogic(stringWriter, outputStream, rdfStream) - } catch { - // Log errors before throwing - case e: Throwable => - logger.error(s"Error parsing RDF data from $uri: ${e.getMessage}") - throw e - } finally { - // Always close the output stream - outputStream.close() - } - - } - -} - -/** Data class used as a factory for the repetitive task of instantiating - * the IO tools (StringWriters, OutputStreams, RDFStreams) used for RDF reading, parsing and storing - * - * @param stringWriter String buffer used to store RDF data in plain text - * @param outputStream OutputStream receiving RDF data and sending it to the writer - * @param streamRDF RDFStream used for reading RDF data and sending it to the OutputStream once formatted - */ -sealed case class StreamsIOElements( - stringWriter: StringWriter, - outputStream: OutputStream, - streamRDF: StreamRDF -) - -object StreamsIOElements { - - /** Factory method - * - * @param lang Syntax that the RDFStream with use to output RDF data - * @param encoding Encoding that the OutputStream will use to output data - * @return A new data object with IO utils - */ - def apply( - lang: Lang = Lang.TURTLE, - encoding: Charset = UTF_8 - ): StreamsIOElements = { - // Create basic StringWriter and attach it to an OutputStream - val stringWriter = new StringWriter - val outputStream: OutputStream = - new WriterOutputStream(stringWriter, encoding) - // Create an RDF StreamWriter outputting to the previous OutputStream - val rdfStream: StreamRDF = - StreamRDFWriter.getWriterStream(outputStream, lang) - - new StreamsIOElements(stringWriter, outputStream, rdfStream) - } - - /** @param it StreamsIOElements containing the IO utils to parse RDF - * @return A tuple will the IO Utils ready to be destructured in other parts of the code - */ - def unapply( - it: StreamsIOElements - ): (StringWriter, OutputStream, StreamRDF) = { - (it.stringWriter, it.outputStream, it.streamRDF) - } -} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/codec/CodecUtils.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/codec/CodecUtils.scala new file mode 100644 index 00000000..6f654d65 --- /dev/null +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/codec/CodecUtils.scala @@ -0,0 +1,14 @@ +package es.weso.rdfshape.server.utils.codec + +import io.circe.{Encoder, Json} +import org.http4s.Uri + +/** Codecs for Circe to (de)serialize JSON data + */ +case object CodecUtils { + + /** JSON encoder for the [[Uri]] class used in http4s + */ + implicit val uriEncoder: Encoder[Uri] = (uri: Uri) => + Json.fromString(uri.renderString) +} diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/wikibase/Wikibase.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/wikibase/Wikibase.scala deleted file mode 100644 index 70f65e2e..00000000 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/wikibase/Wikibase.scala +++ /dev/null @@ -1,44 +0,0 @@ -package es.weso.rdfshape.server.wikibase - -import es.weso.rdf.nodes._ -import org.http4s._ -import org.http4s.implicits._ - -/** Abstract representation of a wikibase instance - * - * @param name Given name of the wikibase instance - * @param baseUrl Base URL where the instance is deployed (e.g. [[https://www.wikidata.org/]]) - * @param endpointUrl API endpoint of the wikibase instance, where queries are usually made - */ -abstract sealed class Wikibase( - val name: String, - val baseUrl: Uri, - val endpointUrl: IRI -) { - - /** Given a schema identifier, return it's location inside the wikibase instance - * - * @param schema String representation of the schema identifier - * @return Uri where the schema can be accessed - */ - def schemaEntityUri(schema: String): Uri -} - -/** A sub-instance of the more general Wikibase class, containing the data required to access [[https://www.wikidata.org/ Wikidata]] - * - * @see {@link es.weso.rdfshape.server.wikibase.Wikibase} - */ -case object Wikidata - extends Wikibase( - name = "wikidata", - baseUrl = uri"https://www.wikidata.org", - endpointUrl = IRI("https://query.wikidata.org/sparql") - ) { - - def schemaEntityUri(schema: String): Uri = { - val uri = baseUrl.withPath( - Uri.Path.unsafeFromString(s"/wiki/Special:EntitySchemaText/$schema") - ) - uri - } -} diff --git a/modules/server/src/test/scala/es/weso/rdfshape/server/html2rdf/HTML2RDFTest.scala b/modules/server/src/test/scala/es/weso/rdfshape/server/html2rdf/HTML2RDFTest.scala index e66dc968..e8084f0b 100644 --- a/modules/server/src/test/scala/es/weso/rdfshape/server/html2rdf/HTML2RDFTest.scala +++ b/modules/server/src/test/scala/es/weso/rdfshape/server/html2rdf/HTML2RDFTest.scala @@ -93,7 +93,7 @@ class HTML2RDFTest extends CatsEffectSuite { ) { val r: IO[(Boolean, String, String)] = for { res1 <- IO( - HTML2RDF.extractFromString(html, extractorName) + HtmlToRdf.extractFromString(html, extractorName) ) res2 <- RDFAsJenaModel.fromChars(expected, "TURTLE") vv <- (res1, res2).tupled.use { case (rdf, expected) => From 0695bc982139685ca6d4a2f681c833e5e9c75e50 Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Fri, 28 Jan 2022 13:43:12 +0100 Subject: [PATCH 31/32] Enhanced wikibase validation --- build.sbt | 17 +- .../api/routes/data/service/DataService.scala | 1 - .../logic/operations/SchemaValidate.scala | 4 +- .../api/routes/shapemap/logic/ShapeMap.scala | 1 - .../operations/WikibaseOperationDetails.scala | 1 + .../schema/WikibaseSchemaValidate.scala | 43 +++-- .../server/api/DataServiceTest.pending | 132 ------------- .../server/api/ValidateShExTest.pending | 182 ------------------ .../server/api/WebServiceTest.pending | 67 ------- .../weso/wikibaserdf/WikibaseRDFTest.pending | 181 ----------------- .../logback-configurations/logback.groovy | 32 ++- src/main/scala/es/weso/rdfshape/Main.scala | 3 + 12 files changed, 59 insertions(+), 605 deletions(-) delete mode 100644 modules/server/src/test/scala/es/weso/rdfshape/server/api/DataServiceTest.pending delete mode 100644 modules/server/src/test/scala/es/weso/rdfshape/server/api/ValidateShExTest.pending delete mode 100644 modules/server/src/test/scala/es/weso/rdfshape/server/api/WebServiceTest.pending delete mode 100644 modules/server/src/test/scala/es/weso/wikibaserdf/WikibaseRDFTest.pending diff --git a/build.sbt b/build.sbt index 7af369cf..9dac7066 100644 --- a/build.sbt +++ b/build.sbt @@ -183,12 +183,6 @@ lazy val resolverSettings = Seq( Resolver.sonatypeRepo("snapshots") ) ) -// "sbt-github-actions" plugin settings -val JavaCIVersion = "adopt@1.11" -val ScalaCIVersion = "2.13.6" -ThisBuild / githubWorkflowJavaVersions := Seq(JavaCIVersion) -ThisBuild / githubWorkflowScalaVersions := Seq(ScalaCIVersion) - // Shared settings for the BuildInfo Plugin // See https://github.com/sbt/sbt-buildinfo lazy val buildInfoSettings = Seq( @@ -275,6 +269,8 @@ lazy val server = project mongodb ) ) +ThisBuild / githubWorkflowJavaVersions := Seq(JavaCIVersion) +ThisBuild / githubWorkflowScalaVersions := Seq(ScalaCIVersion) /* ------------------------------------------------------------------------- */ // Documentation project, for MDoc + Docusaurus documentation lazy val docs = project @@ -299,8 +295,8 @@ lazy val mongodbVersion = "4.4.0" lazy val any23Version = "2.4" lazy val rdf4jVersion = "3.7.4" lazy val graphvizJavaVersion = "0.18.1" -lazy val logbackVersion = "1.2.10" -lazy val loggingVersion = "3.9.4" +lazy val logbackVersion = "1.2.8" +lazy val scalaLoggingVersion = "3.9.4" lazy val groovyVersion = "3.0.8" lazy val munitVersion = "0.7.27" lazy val munitEffectVersion = "1.0.7" @@ -333,7 +329,7 @@ lazy val graphvizJava = "guru.nidi" % "graphviz-java" % graphvizJavaVer lazy val plantuml = "net.sourceforge.plantuml" % "plantuml" % plantumlVersion lazy val logbackClassic = "ch.qos.logback" % "logback-classic" % logbackVersion lazy val scalaLogging = - "com.typesafe.scala-logging" %% "scala-logging" % loggingVersion + "com.typesafe.scala-logging" %% "scala-logging" % scalaLoggingVersion lazy val groovy = "org.codehaus.groovy" % "groovy" % groovyVersion lazy val munit = "org.scalameta" %% "munit" % munitVersion lazy val munitEffect = @@ -344,3 +340,6 @@ lazy val shexs = "es.weso" %% "shexs" % shexsVersion lazy val shaclex = "es.weso" %% "shaclex" % shaclexVersion lazy val umlShaclex = "es.weso" %% "umlshaclex" % umlShaclexVersion lazy val wesoUtils = "es.weso" %% "utilstest" % wesoUtilsVersion +// "sbt-github-actions" plugin settings +val JavaCIVersion = "adopt@1.11" +val ScalaCIVersion = "2.13.6" diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala index d93e5744..c3ac3171 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/data/service/DataService.scala @@ -192,7 +192,6 @@ class DataService(client: Client[IO]) case errorMessage: String => errorResponseJson(errorMessage, InternalServerError) case _ => // null exception message, return a general error message - err.printStackTrace() errorResponseJson( DataServiceError.couldNotParseData, InternalServerError diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaValidate.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaValidate.scala index af119786..df78911f 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaValidate.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/schema/logic/operations/SchemaValidate.scala @@ -59,8 +59,8 @@ private[api] object SchemaValidate extends LazyLogging { for { innerSchema <- schema.getSchema result = innerSchema.flatMap(s => { - trigger.getValidationTrigger.map(vt => { - s.validate(rdf, vt, builder) + trigger.getValidationTrigger.map(trigger => { + s.validate(rdf, trigger, builder) }) }) validation <- result match { diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala index c75bb204..9776468a 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/shapemap/logic/ShapeMap.scala @@ -58,7 +58,6 @@ sealed case class ShapeMap private ( case None | Some("") => Left("Cannot extract the ShapeMap from an empty instance") case Some(shapeMapStr) => - println(nodesPrefixMap.pm) ShapeMapW .fromString( shapeMapStr, diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/WikibaseOperationDetails.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/WikibaseOperationDetails.scala index ff1f87f6..607b0ad6 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/WikibaseOperationDetails.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/WikibaseOperationDetails.scala @@ -23,6 +23,7 @@ import org.http4s.implicits.http4sLiteralsSyntax * (empty list returns all available languages). * Each language must be represented by its language code. * @param limit Maximum amount of results queried in search operations + * In SPARQL queries, the limit is embedded in the query text * @param continue Offset where to continue a search operation * @param format Format in which results are requested * @see [[https://www.mediawiki.org/wiki/Wikibase/API#API_documentation_and_Wikibase_modules]] diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/schema/WikibaseSchemaValidate.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/schema/WikibaseSchemaValidate.scala index af77e30b..9c94a5ec 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/schema/WikibaseSchemaValidate.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/schema/WikibaseSchemaValidate.scala @@ -1,12 +1,14 @@ package es.weso.rdfshape.server.api.routes.wikibase.logic.operations.schema import cats.effect.IO +import cats.implicits.catsSyntaxEitherId import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.NONE import es.weso.rdf.nodes.IRI import es.weso.rdfshape.server.api.format.dataFormats.{Compact, Turtle} import es.weso.rdfshape.server.api.routes.data.logic.DataSource import es.weso.rdfshape.server.api.routes.data.logic.types.DataSingle +import es.weso.rdfshape.server.api.routes.data.logic.types.merged.DataCompound import es.weso.rdfshape.server.api.routes.schema.logic.operations.SchemaValidate import es.weso.rdfshape.server.api.routes.schema.logic.trigger.TriggerShapeMap import es.weso.rdfshape.server.api.routes.schema.logic.types.Schema @@ -53,25 +55,37 @@ private[wikibase] case class WikibaseSchemaValidate( override lazy val targetUri: Uri = uri"" // unused + private val entitiesSeparator = '|' + override def performOperation: IO[WikibaseOperationResult] = { - val entityUri = operationData.payload + + val entityUris = operationData.payload.split(entitiesSeparator) // Raise error if target is not Wikidata if(targetWikibase != Wikidata) IO.raiseError(WikibaseServiceException(wikidataOnlyMessage)) else { val tryResult = for { - // Get the Wikidata item info from the URI submitted as payload - wdEntity <- Try { - WikidataEntity(Uri.unsafeFromString(entityUri)) + // Get the Wikidata items info from the URI submitted as payload + wdEntities <- Try { + entityUris.map(it => WikidataEntity(Uri.unsafeFromString(it))) } // Create the data to be validated, using Wikidata to get the URL // to the Turtle contents - inputData = DataSingle( - dataPre = Some(wdEntity.contentUri.renderString), - dataFormat = Turtle, - inference = NONE, - dataSource = DataSource.URL + /* Data will be a compound of several simple data fetched from each + * entity */ + + inputData = DataCompound( + wdEntities + .map(entity => + DataSingle( + dataPre = Some(entity.contentUri.renderString), + dataFormat = Turtle, + inference = NONE, + dataSource = DataSource.URL + ) + ) + .toList ) /* Get the schema model needed for validation: already passed to the @@ -80,9 +94,12 @@ private[wikibase] case class WikibaseSchemaValidate( // Perform validation eitherValidationResults = for { // Create your trigger mode: ShEx with basic Shapemap - shapeMapModel <- ShapeMapW.empty.add( - IRI(wdEntity.entityUri.renderString), - Start + // For each entity, add a start point + shapeMapModel <- wdEntities.foldLeft(ShapeMapW.empty.asRight[String])( + (sm, entity) => { + val entityUri = IRI(entity.entityUri.renderString) + sm.flatMap(_.add(entityUri, Start)) + } ) shapeMapFinalModel <- shapeMapModel.serialize(Compact.name) trigger = TriggerShapeMap( @@ -122,7 +139,7 @@ private[wikibase] case class WikibaseSchemaValidate( wikibase = targetWikibase, result = Json.fromFields( List( - ("entity", Json.fromString(entityUri)), + ("entity", entityUris.asJson), ("result", validationResults.asJson) ) ) diff --git a/modules/server/src/test/scala/es/weso/rdfshape/server/api/DataServiceTest.pending b/modules/server/src/test/scala/es/weso/rdfshape/server/api/DataServiceTest.pending deleted file mode 100644 index 8444d4f3..00000000 --- a/modules/server/src/test/scala/es/weso/rdfshape/server/api/DataServiceTest.pending +++ /dev/null @@ -1,132 +0,0 @@ -package es.weso.rdfshape.server.server -import cats.effect._ -import es.weso.rdf.nodes.{IRI, RDFNode} -import es.weso.shapemaps.{Status => ShapeMapStatus, _} -import io.circe.Json -import io.circe.parser._ -import org.http4s.client.blaze.BlazeClientBuilder -import org.http4s.dsl.io._ -import org.http4s.implicits._ -import org.http4s.{Request, Response, Uri, Query => HQuery} -import es.weso.utils.test._ -import org.http4s.Uri.{Path => UriPath} -import scala.concurrent.ExecutionContext.global -import munit.CatsEffectSuite - -class DataServiceTest extends CatsEffectSuite with JsonMatchers { - - - test(s"Should merge two RDF files") { - val dataStr1 = "

1 ." - val dataStr2 = "

2 ." - val compoundData = - s"""|[ - |{ "data": "${dataStr1}", "dataFormat": "Turtle" }, - |{ "data": "${dataStr2}", "dataFormat": "Turtle" } - |] - |""".stripMargin - val ioResponse = serve( - Request( - GET, - Uri( - path = UriPath.fromString("/api/data/convert"), - query = HQuery.fromPairs( - (QueryParams.compoundData, compoundData), - (QueryParams.targetDataFormat, "Turtle") - ) - ) - ) - ) - val response = ioResponse.unsafeRunSync - response.status should be(Ok) - val strResponse = response.as[String].unsafeRunSync() - val jsonResponse = parse(strResponse).getOrElse(Json.Null) - val expected = - """|

1,2 - |""".stripMargin - - jsonResponse.hcursor - .downField("msg") - .as[String] - .fold( - err => fail(s"Error decoding field msg in response: ${err.toString}"), - str => str should be("Conversion successful!") - ) - jsonResponse.hcursor - .downField("result") - .as[String] - .fold( - err => fail(s"Error obtaining field result from obtained JSON:\n${jsonResponse.spaces2}"), - strResult => info(s"Obtained result: $strResult") - - ) - - } - - test("Should obtain JSON data from RDF") { - val dataStr = - """|prefix : - |:x :p 1 . - |""".stripMargin - - val ioResponse = serve( - Request( - GET, - Uri( - path = UriPath.fromString("/api/data/convert"), - query = HQuery.fromPairs( - (QueryParams.data, dataStr), - (QueryParams.dataFormat, "Turtle"), - (QueryParams.targetDataFormat, "JSON") - ) - ) - ) - ) - - val response = ioResponse.unsafeRunSync - response.status should be(Ok) - val strResponse = response.as[String].unsafeRunSync() - val jsonResponse = parse(strResponse).getOrElse(Json.Null) - val expected = - """|[ - | { "data": { "id": "N0", "type": "iri", "label": ":x" } }, - | { "data": { "id": "N1", "type": "lit", "label": "1" } }, - | { "data": { "source": "N0", "target": "N1", "label": ":p", "href": "http://example.org/p"} } - |]""".stripMargin - jsonResponse.hcursor - .downField("msg") - .as[String] - .fold( - err => fail(s"Error decoding field msg in response: ${err.toString}"), - str => str should be("Conversion successful!") - ) - jsonResponse.hcursor - .downField("result") - .as[String] - .fold( - err => fail(s"Error obtaining field result from obtained JSON:\n${jsonResponse.spaces2}"), - strResult => - parse(strResult).fold( - err => fail(s"Error parsing string in Result: $err\nString in Result:$strResult"), - jsonResult => jsonResult should matchJsonString(expected) - ) - ) - } - - val ip = "0.0.0.0" - val port = 8080 - implicit val timer: Timer[IO] = IO.timer(global) - implicit val cs: ContextShift[IO] = IO.contextShift(global) - - def serve(req: Request[IO]): IO[Response[IO]] = { - val blocker = Blocker[IO] - - blocker.use { - case (blocker) => - BlazeClientBuilder[IO](global).resource.use { - case client => - DataService[IO](blocker, client).routes.orNotFound.run(req) - } - } - } -} diff --git a/modules/server/src/test/scala/es/weso/rdfshape/server/api/ValidateShExTest.pending b/modules/server/src/test/scala/es/weso/rdfshape/server/api/ValidateShExTest.pending deleted file mode 100644 index 01b4f724..00000000 --- a/modules/server/src/test/scala/es/weso/rdfshape/server/api/ValidateShExTest.pending +++ /dev/null @@ -1,182 +0,0 @@ -package es.weso.rdfshape.server.server - -import cats._ -import org.http4s._ -import org.http4s.dsl.io._ -import org.http4s.implicits._ -import cats.effect._ -import es.weso.rdf.nodes.{IRI, RDFNode} -import es.weso.shapeMaps.{Status => ShapeMapStatus, _} -import io.circe.Json -import io.circe.parser._ -import org.http4s.client.blaze.BlazeClientBuilder -import org.http4s.{Request, Response, Uri} -import org.http4s.{Query => HQuery} -import org.scalatest.funspec.AnyFunSpec -import org.scalatest.matchers.should.Matchers -import org.http4s.dsl.io._ -import scala.concurrent.ExecutionContext.global -import org.http4s.Uri.{Path => UriPath} - - -class ValidateShExTest extends AnyFunSpec with Matchers { - - val ip = "0.0.0.0" - val port = 8080 - implicit val timer: Timer[IO] = IO.timer(global) - implicit val cs: ContextShift[IO] = IO.contextShift(global) - - def serve(req: Request[IO]): IO[Response[IO]] = { - val blocker = Blocker[IO] - - blocker.use { case (blocker) => - BlazeClientBuilder[IO](global).resource.use { case client => - SchemaService[IO](blocker, client).routes.orNotFound.run(req) - } - } - } - - describe("ValidateShEx") { -/* it("Should return 200 when asking for root") { - val response = serve(Request(GET, Uri(path = "/"))) - response.unsafeRunSync.status should be(Ok) - } */ -/* - it("Should run test API method") { - val response = serve(Request( - GET, - Uri( - path = "/api/test", - query = HQuery.fromPairs(("name", ""))))) - response.status should be(Ok) - response.as[String].unsafeRunSync() should be("Hello ") - } - - it("Should validate a single example using ShEx and TargetDecls") { - val dataStr = - """prefix : - |prefix sh: - |:x :p 1 . - |:S sh:targetNode :x . - |""".stripMargin - - val schemaStr = - """prefix : - |:S { :p . } - |""".stripMargin - - val response = serve(Request( - GET, - Uri( - path = "/api/validate", - query = HQuery.fromPairs( - ("data", dataStr), - ("schema", schemaStr), - ("schemaFormat", "SHEXC"), - ("triggerMode", "TargetDecls"), - ("schemaEngine", "ShEx"))))) - - response.status should be(Ok) - val strResponse = response.as[String].unsafeRunSync() - val jsonResponse = parse(strResponse).getOrElse(Json.Null) - val isValid: Option[Boolean] = - jsonResponse.hcursor.get[Boolean]("valid").toOption - isValid shouldBe Some(true) - } -*/ - it("Should validate a single example using ShEx and shapeMap") { - val dataStr = - """prefix : - |:x :p 1 . - |""".stripMargin - - val schemaStr = - """prefix : - |:S { :p [ 1 2 ] } - |""".stripMargin - - val shapeMapStr = ":x@:S" - - val ioResponse = serve(Request( - GET, - Uri( - path = UriPath.fromString("/api/schema/validate"), - query = HQuery.fromPairs( - ("data", dataStr), - ("schema", schemaStr), - ("schemaFormat", "SHEXC"), - ("triggerMode", "ShapeMap"), - ("shape-map", shapeMapStr), - ("schemaEngine", "ShEx") - )))) - - val response = ioResponse.unsafeRunSync - response.status should be(Ok) - val strResponse = response.as[String].unsafeRunSync() - val jsonResponse = parse(strResponse).getOrElse(Json.Null) - val isValid: Option[Boolean] = - jsonResponse.hcursor.get[Boolean]("valid").toOption - isValid shouldBe Some(true) - val x = IRI("http://example.org/x") - val s: ShapeMapLabel = IRILabel(IRI("http://example.org/S")) - shapeMapStatus(jsonResponse, x, s, Conformant) - } - - it("Should fail to validate a wrong example using ShEx and shapeMap") { - val dataStr = - """prefix : - |:x :p 1 . - |""".stripMargin - - val schemaStr = - """prefix : - |:S { :p [2 3] } - |""".stripMargin - - val shapeMapStr = ":x@:S" - - val ioResponse = serve(Request( - GET, - Uri( - path = UriPath.fromString("/api/schema/validate"), - query = HQuery.fromPairs( - ("data", dataStr), - ("schema", schemaStr), - ("schemaFormat", "SHEXC"), - ("triggerMode", "ShapeMap"), - ("shape-map", shapeMapStr), - ("schemaEngine", "ShEx") - )))) - - val response = ioResponse.unsafeRunSync - response.status should be(Ok) - val strResponse = response.as[String].unsafeRunSync() - val jsonResponse = parse(strResponse).getOrElse(Json.Null) - val isValid: Option[Boolean] = - jsonResponse.hcursor.get[Boolean]("valid").toOption - isValid shouldBe Some(true) - val x = IRI("http://example.org/x") - val s: ShapeMapLabel = IRILabel(IRI("http://example.org/S")) - shapeMapStatus(jsonResponse, x, s, NonConformant) - } - - def isNodeShape(a: Association, x: RDFNode, s: ShapeMapLabel): Boolean = a.node match { - case RDFNodeSelector(n) => n == x && a.shape == s - case _ => false - } - - def shapeMapStatus(response: Json, node: RDFNode, label: ShapeMapLabel, status: ShapeMapStatus): Unit = { - response.hcursor.downField("shapeMap").as[ShapeMap].fold( - failure => fail(failure.message), - shapeMapReturned => { - shapeMapReturned.associations.filter(isNodeShape(_, node, label)).headOption.fold( - fail(s"No association found for node $node in shapeMap $shapeMapReturned") - )(a => - a.info.status should be(status) - ) - } - ) - } - - } -} diff --git a/modules/server/src/test/scala/es/weso/rdfshape/server/api/WebServiceTest.pending b/modules/server/src/test/scala/es/weso/rdfshape/server/api/WebServiceTest.pending deleted file mode 100644 index d677a31f..00000000 --- a/modules/server/src/test/scala/es/weso/rdfshape/server/api/WebServiceTest.pending +++ /dev/null @@ -1,67 +0,0 @@ -package es.weso.rdfshape.server.server - -import org.scalatest._ -//import org.scalatestplus.selenium.HtmlUnit -import org.openqa.selenium.htmlunit.HtmlUnitDriver -import cats._ -import org.http4s._ -import org.http4s.dsl.io._ -import org.http4s.implicits._ -import cats.effect._ -import org.http4s.dsl.Http4sDsl -import org.http4s.server.{Router, Server} -import org.http4s.server.blaze.{BlazeBuilder, BlazeServerBuilder} -import org.http4s.server.middleware.CORS -import org.http4s.server.staticcontent.FileService.Config -import org.log4s.getLogger -// import fs2.Stream -import scala.util.Properties.envOrNone -import cats.implicits._ -import cats.effect._ -import org.http4s.twirl._ -import es.weso._ -import org.http4s.server.staticcontent._ -import scala.concurrent.ExecutionContext - - - - -class WebServiceTest extends FunSpec - with Matchers - with EitherValues - with BeforeAndAfter - with WebBrowser { - implicit val webDriver: WebDriver = new HtmlUnitDriver - val ip = "0.0.0.0" - val port = 8080 - implicit val timer: Timer[IO] = IO.timer(ExecutionContext.global) - implicit val cs: ContextShift[IO] = IO.contextShift(ExecutionContext.global) - val shaclexServer = new RDFShapeServer[IO](ip, port) - var server: Server[IO] = null - - before { - println(s"Before tests...starting server...") - // val builder = BlazeBuilder[IO].bindHttp(port,"localhost").mountService(shaclexServer.service).start - // server = builder.unsafeRunSync - } - - after { - println(s"After tests...closing server and browser...") - // server.shutdown.unsafeRunSync - close - quit - } - - // val host = s"http://localhost:$port" - val host = s"http://weso.rdfshape.es" - - - /* describe(s"Home page") { - - it(s"Should contain SHACLex") { - go to (host) - pageTitle should contain("SHACLex") - } - - } */ -} diff --git a/modules/server/src/test/scala/es/weso/wikibaserdf/WikibaseRDFTest.pending b/modules/server/src/test/scala/es/weso/wikibaserdf/WikibaseRDFTest.pending deleted file mode 100644 index 1e2a238d..00000000 --- a/modules/server/src/test/scala/es/weso/wikibaserdf/WikibaseRDFTest.pending +++ /dev/null @@ -1,181 +0,0 @@ -package es.weso.wikibaserdfserver - -import org.scalatest.funspec._ -import org.scalatest.matchers.should._ -import es.weso.rdf.nodes.IRI -import cats.implicits._ -import cats.effect._ -import cats.effect.concurrent._ -import es.weso.rdf.nodes.RDFNode -import fs2.Stream -import es.weso.rdf.triples.RDFTriple -import org.apache.jena.rdf.model.{RDFNode => JenaRDFNode,Resource => JenaResource,_} - -// import es.weso.rdfshape.server.server.WikibaseRDF -// import es.weso.rdf.jena.RDFAsJenaModel - -class WikibaseRDFTest extends AnyFunSpec with Matchers { - -/* class Model(name: String, var closed: Boolean) { - def close(): Unit = this.closed = true - - def isClosed(): Boolean = this.closed - } - - object ModelFactory { - def createDefaultModel: Model = new Model("empty", false) - } */ - - - case class RDFAsJenaModel(modelRef: Ref[IO,Model]) { - - def getModel: IO[Model] = modelRef.get - - def triplesWithSubject(node: RDFNode): Stream[IO,RDFTriple] = streamFromIOs(for { - model <- getModel - ts <- if (model.isClosed) { - IO.raiseError(new RuntimeException(s"Closed model!!!")) - } else - IO{ - pprint.log(node, s"Obtaining triples with Subject from ${node}") - List[RDFTriple]() - } - } yield ts) - - def streamFromIOs[A](vs: IO[List[A]]): Stream[IO,A] = { - Stream.eval(vs).flatMap(x => Stream(x: _*)) - } - } - - def showJenaModel(rdf: RDFAsJenaModel): IO[Unit] = for { - model <- rdf.getModel - } yield { - pprint.pprintln(s"RDF id: ${System.identityHashCode(rdf)}, Model id: ${System.identityHashCode(model)}") - } - - object RDFAsJenaModel { - - def empty: IO[Resource[IO,RDFAsJenaModel]] = { - def acquire: IO[RDFAsJenaModel] = { - pprint.pprintln(s"### Acquire RDF") - val model = ModelFactory.createDefaultModel - pprint.log(System.identityHashCode(model),"Model Id in acquire") - val jenaModel = RDFAsJenaModel.fromModel(model) - pprint.log(System.identityHashCode(jenaModel),"RDF Id in acquire") - jenaModel - } - IO(Resource.make(acquire)(closeJenaModel)) - } - - def fromModel(model: Model): IO[RDFAsJenaModel] = for { - ref <- Ref.of[IO, Model](model) - } yield { - val rdf = RDFAsJenaModel(ref) - pprint.pprintln(s"FromModel ID: ${System.identityHashCode(rdf)})") - rdf - } - - private def closeJenaModel(m: RDFAsJenaModel): IO[Unit] = for { - _ <- IO(pprint.log(m, s"Closing Model")) - model <- m.getModel - } yield model.close() - - } - - case class CachedState(rdf: RDFAsJenaModel) - - object CachedState { - def initial: IO[Resource[IO,CachedState]] = for { - res <- RDFAsJenaModel.empty - } yield res.evalMap(rdf => for { - modelRef <- rdf.getModel - _ <- showJenaModel(rdf) - _ <- IO { pprint.log(modelRef,"CachedState.initial:")} - _ <- IO { pprint.log(rdf,s"CachedState.initial: ${rdf} ${modelRef} Closed?:${modelRef.isClosed}")} - _ <- showJenaModel(rdf) - } yield CachedState(rdf)) - } - - - case class WikibaseRDF(refCached: Ref[IO,CachedState]) { - def triplesWithSubject(node: RDFNode): Stream[IO,RDFTriple] = for { - cachedState <- Stream.eval(refCached.get) - _ <- Stream.eval(showJenaModel(cachedState.rdf)) - ts <- cachedState.rdf.triplesWithSubject(node) - } yield ts - } - - object WikibaseRDF { - - val wikidata : IO[Resource[IO,WikibaseRDF]] = for { - res <- CachedState.initial - } yield res.evalMap(initial => for { - ref <- Ref[IO].of(initial) - _ <- IO(pprint.log(ref, "WikibaseRDF.wikidata")) - } yield WikibaseRDF(ref)) - } - - - describe(s"Use wikibaseRdf") { - val item = IRI("http://www.wikidata.org/entity/Q29377880") -/* it(s"Should use wikibase rdf once with the same item cached") { - val r: IO[(Int,Int)] = WikibaseRDF.wikidata.use(wd => for { - ts1 <- wd.triplesWithSubject(item).compile.toList - ts2 <- wd.triplesWithSubject(item).compile.toList - } yield ((ts1.length, ts2.length))) - r.attempt.unsafeRunSync.fold( - s => s"Error: ${s.getMessage}", - pair => { - val (n1,n2) = pair - n1 should be (n2) - } - ) - } */ - - it(s"Should use wikibase rdf twice") { - val r: IO[((Int,Int),(Int,Int))] = for { - res <- WikibaseRDF.wikidata - pair1 <- res.use(wd => for { - _ <- { pprint.log(wd,s"WD asking for item"); ().pure[IO]} - ts1 <- wd.triplesWithSubject(item).compile.toList - _ <- { pprint.log(wd, s"Asking for item, 2nd time (cached?)"); ().pure[IO]} - ts2 <- wd.triplesWithSubject(item).compile.toList - } yield ((ts1.length, ts2.length))) - _ <- IO(pprint.pprintln("=======================")) - res2 <- WikibaseRDF.wikidata - pair2 <- res2.use(wd => for { - _ <- { pprint.log(wd, s"WD 2nd time"); ().pure[IO]} - ts1 <- wd.triplesWithSubject(item).compile.toList - _ <- { pprint.log(wd,s"WD 2nd time 2nd round (cached?)"); ().pure[IO]} - ts2 <- wd.triplesWithSubject(item).compile.toList - } yield ((ts1.length, ts2.length))) - } yield (pair1,pair2) - r.attempt.unsafeRunSync.fold( - s => fail(s"Error: ${s.getMessage}"), - ppair => { - val (n1,n2) = ppair - info(s"Pairs: $ppair") - n1 should be (n2) - } - ) - } - -/* ignore(s"Should obtain 2 empty RDFAsJenaModel's...") { - - val r: IO[Unit] = RDFAsJenaModel.empty.use { case rdf1 => for { - model1 <- rdf1.getModel - _ <- IO (pprint.log(s"RDF1: ${rdf1}. IsClosed?: ${model1.isClosed}")) - _ <- RDFAsJenaModel.empty.use { rdf2 => for { - model2 <- rdf2.getModel - _ <- IO(pprint.log(s"RDF1 (inside for): ${rdf1}. IsClosed?: ${model1.isClosed}")) - _ <- IO(pprint.log(s"RDF2 (inside for): ${rdf2}. IsClosed?: ${model2.isClosed}")) - } yield () } - } yield () } - r.attempt.unsafeRunSync.fold( - s => fail(s"Error: $s"), - _ => info(s"Finnished") - ) - } */ - - } -} diff --git a/src/main/resources/logback-configurations/logback.groovy b/src/main/resources/logback-configurations/logback.groovy index 8c1b24bd..227a0596 100644 --- a/src/main/resources/logback-configurations/logback.groovy +++ b/src/main/resources/logback-configurations/logback.groovy @@ -1,24 +1,21 @@ /** * Logback configuration file. - * Further documentation: https://logback.qos.ch/manual/groovy.html - */ + * Further documentation: https://logback.qos.ch/manual/groovy.html*/ +import ch.qos.logback.classic.AsyncAppender import ch.qos.logback.classic.filter.ThresholdFilter /** - * Prevent logback debug messages from being printed on console - */ + * Prevent logback debug messages from being printed on console*/ statusListener(NopStatusListener) /** - * Folder, relative to the program's execution path, where program logs will be stored - */ + * Folder, relative to the program's execution path, where program logs will be stored*/ def LOGS_FOLDER = "logs" def LOGS_FILENAME = "rdfshape-api.log" /** - * Name (key) of the system property determining the application verbosity - */ + * Name (key) of the system property determining the application verbosity*/ def systemPropertyVerbosity = "rdfshape.api.verbosity.level" /* Define log appenders for Console, Files, etc. */ @@ -27,8 +24,7 @@ def systemPropertyVerbosity = "rdfshape.api.verbosity.level" * Rolling file appender. Create several files inside LOGS_FOLDER. Archives and compresses old logs. * Choices are: * - Store some time of logs before rollback (http://logback.qos.ch/manual/appenders.html#TimeBasedRollingPolicy) - * -> Store N compressed log files of size M before rollback (http://logback.qos.ch/manual/appenders.html#SizeBasedTriggeringPolicy) - */ + * -> Store N compressed log files of size M before rollback (http://logback.qos.ch/manual/appenders.html#SizeBasedTriggeringPolicy)*/ appender("ROLLING", RollingFileAppender) { file = "$LOGS_FOLDER/$LOGS_FILENAME" encoder(PatternLayoutEncoder) { @@ -56,8 +52,7 @@ appender("ROLLING", RollingFileAppender) { * Console appender. Show log messages on console while the app is running. * Log messages are emitted from the code with the aid of the scala-logging library. * A filter is used to determine which messages are shown on console or not, adapting to the user's selected verbosity level. - * http://logback.qos.ch/manual/filters.html - */ + * http://logback.qos.ch/manual/filters.html*/ appender("CONSOLE", ConsoleAppender) { encoder(PatternLayoutEncoder) { pattern = "%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n" @@ -86,18 +81,21 @@ appender("CONSOLE", ConsoleAppender) { } } +/* Async appending to files to reduce overhead */ +appender("ASYNC", AsyncAppender) { + appenderRef("ROLLING") +} + /* Define loggers and associated appenders */ /** * Root logger: * - Show console messages - * - Append DEBUG and higher messages to the log files, whether they are shown on console or not - */ -root(DEBUG, ["CONSOLE", "ROLLING"]) + * - Append DEBUG and higher messages to the log files, whether they are shown on console or not*/ +root(DEBUG, ["CONSOLE", "ROLLING", "ASYNC"]) /*Additional settings */ /** - * Scan for configuration changes in this file periodically - */ + * Scan for configuration changes in this file periodically*/ scan("30 seconds") \ No newline at end of file diff --git a/src/main/scala/es/weso/rdfshape/Main.scala b/src/main/scala/es/weso/rdfshape/Main.scala index 449557cf..2d1f2c2d 100644 --- a/src/main/scala/es/weso/rdfshape/Main.scala +++ b/src/main/scala/es/weso/rdfshape/Main.scala @@ -1,10 +1,13 @@ package es.weso.rdfshape +import ch.qos.logback.classic.{Logger => Logger2} +import ch.qos.logback.core.joran.util.ConfigurationWatchListUtil import com.typesafe.scalalogging._ import es.weso.rdfshape.cli.ArgumentsData.unapply import es.weso.rdfshape.cli.{ArgumentsData, CliManager} import es.weso.rdfshape.logging.LoggingManager import es.weso.rdfshape.server.Server +import org.slf4j.{Logger, LoggerFactory} object Main extends App with LazyLogging { From a700fbc6072b9d2713990de35d63a4ae0b8a4e18 Mon Sep 17 00:00:00 2001 From: ulitol97 Date: Fri, 11 Feb 2022 18:28:23 +0100 Subject: [PATCH 32/32] End refactoring --- .../api/routes/endpoint/logic/Endpoint.scala | 24 +++++------ .../endpoint/service/EndpointService.scala | 40 ++---------------- .../schema/WikibaseSchemaExtract.scala | 18 +++----- .../wikibase/service/WikibaseService.scala | 2 +- .../utils/networking/NetworkingUtils.scala | 11 ++++- website/static/favicon.ico | Bin 2758 -> 14214 bytes 6 files changed, 34 insertions(+), 61 deletions(-) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/Endpoint.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/Endpoint.scala index c289e415..a3f24353 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/Endpoint.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/logic/Endpoint.scala @@ -1,7 +1,7 @@ package es.weso.rdfshape.server.api.routes.endpoint.logic -import cats.data.EitherT import cats.effect.IO +import cats.implicits.catsSyntaxEitherId import com.typesafe.scalalogging.LazyLogging import es.weso.rdf.RDFReader import es.weso.rdf.jena.{Endpoint => EndpointJena} @@ -65,17 +65,17 @@ private[api] object Endpoint extends LazyLogging { */ def getEndpointUrl( partsMap: PartsMap - ): EitherT[IO, String, URL] = for { - maybeStr <- EitherT.liftF[IO, String, Option[String]]( - partsMap.optPartValue("endpoint") - ) - ep <- maybeStr match { - case None => - EitherT.leftT[IO, URL](s"No value provided for parameter endpoint") - case Some(str) => - Try(new URL(str)) match { - case Success(url) => EitherT.rightT[IO, String](url) - case Failure(ex) => EitherT.leftT[IO, URL](ex.getMessage) + ): IO[Either[String, URL]] = for { + maybeStr <- partsMap.optPartValue("endpoint").map(_.toRight("")) + + ep = maybeStr match { + case Left(_) => + val msg = s"No value provided for parameter endpoint" + msg.asLeft[URL] + case Right(endpointStr) => + Try(new URL(endpointStr)) match { + case Success(url) => url.asRight[String] + case Failure(ex) => ex.getMessage.asLeft[URL] } } } yield ep diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala index 4a1879e1..6a987420 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/endpoint/service/EndpointService.scala @@ -7,14 +7,12 @@ import es.weso.rdfshape.server.api.definitions.ApiDefinitions.api import es.weso.rdfshape.server.api.routes.ApiService import es.weso.rdfshape.server.api.routes.endpoint.logic.Endpoint.{ getEndpointAsRDFReader, - getEndpointInfo, getEndpointUrl } -import es.weso.rdfshape.server.api.routes.endpoint.logic.EndpointStatus._ +import es.weso.rdfshape.server.api.routes.endpoint.logic.Outgoing import es.weso.rdfshape.server.api.routes.endpoint.logic.Outgoing.getOutgoing import es.weso.rdfshape.server.api.routes.endpoint.logic.query.SparqlQuery import es.weso.rdfshape.server.api.routes.endpoint.logic.query.SparqlQuery.mkSparqlQuery -import es.weso.rdfshape.server.api.routes.endpoint.logic.{Endpoint, Outgoing} import es.weso.rdfshape.server.api.utils.parameters.IncomingRequestParameters.{ EndpointParameter, LimitParameter, @@ -56,12 +54,14 @@ class EndpointService(client: Client[IO]) * - results [Object]: Query results * - bindings: [Array]: Query results, each item being an object mapping each variable to its value */ + /** + */ case req @ POST -> Root / `api` / `verb` / "query" => req.decode[Multipart[IO]] { m => val partsMap = PartsMap(m.parts) val r: EitherT[IO, String, Json] = for { - endpointUrl <- getEndpointUrl(partsMap) + endpointUrl <- EitherT(getEndpointUrl(partsMap)) endpoint <- getEndpointAsRDFReader(endpointUrl) either <- EitherT .liftF[IO, String, Either[ @@ -73,7 +73,6 @@ class EndpointService(client: Client[IO]) eitherQuery <- EitherT.fromEither[IO](either) json <- { - eitherQuery.rawQuery.fold( err => EitherT.left(IO.pure(err)), raw => { @@ -97,37 +96,6 @@ class EndpointService(client: Client[IO]) } yield resp } - /** Attempt to contact an endpoint and return metadata about it. - * Receives a JSON object with the input endpoint: - * - endpoint [String]: Target endpoint - * Returns a JSON object with the endpoint response: - * - head [Object]: Query metadata - * - vars: [Array]: Query variables - * - results [Object]: Query results - * - bindings: [Array]: Query results, each item being an object mapping each variable to its value - */ - case req @ POST -> Root / `api` / `verb` / "info" => - req.decode[Multipart[IO]] { m => - val partsMap = PartsMap(m.parts) - for { - endpointUrl <- getEndpointUrl(partsMap).value - response <- endpointUrl match { - case Left(err) => errorResponseJson(err, BadRequest) - case Right(endpointUrl) => - val endpointInfo = getEndpointInfo(endpointUrl) - endpointInfo match { - case Endpoint(errMsg, OFFLINE) => - errorResponseJson( - errMsg, - InternalServerError - ) - case _ => Ok(endpointInfo.asJson) - } - } - } yield response - - } - /** Attempt to contact a wikibase endpoint and return the data (triplets) about a node in it. * Receives a JSON object with the input endpoint, node and limits: * - endpoint [String]: Target endpoint diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/schema/WikibaseSchemaExtract.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/schema/WikibaseSchemaExtract.scala index ce02c156..9a17ef1b 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/schema/WikibaseSchemaExtract.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/logic/operations/schema/WikibaseSchemaExtract.scala @@ -71,17 +71,13 @@ private[wikibase] case class WikibaseSchemaExtract( .fromString(strRdf, Turtle.name) .flatMap( _.use(rdf => - for { - rdfSerialized <- rdf.serialize(Turtle.name) - nodeSelector = RDFNodeSelector(IRI(entityUri)) - inferred <- SchemaInfer.runInferSchema( - rdf, - nodeSelector, - Schemas.shEx.name, - IRI(s"http://example.org/Shape_${wdEntity.localName}"), - InferOptions.defaultOptions.copy(maxFollowOn = 3) - ) - } yield inferred + SchemaInfer.runInferSchema( + rdf, + RDFNodeSelector(IRI(entityUri)), + Schemas.shEx.name, + IRI(s"http://example.org/Shape_${wdEntity.localName}"), + InferOptions.defaultOptions.copy(maxFollowOn = 3) + ) ) ) ) diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseService.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseService.scala index af64bb91..c7765e2c 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseService.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/api/routes/wikibase/service/WikibaseService.scala @@ -274,7 +274,7 @@ class WikibaseService(client: Client[IO]) } yield response } - // TODO: Needs exhaustive testing and client changes + // TODO: Needs exhaustive testing. Timeouts. /** Attempts to extract an schema (ShEx) from a given entity present in wikidata * using SheXer. See [[https://github.com/DaniFdezAlvarez/shexer]]. * Receives an entity URI as payload. diff --git a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/networking/NetworkingUtils.scala b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/networking/NetworkingUtils.scala index ce914399..73d5a6b9 100644 --- a/modules/server/src/main/scala/es/weso/rdfshape/server/utils/networking/NetworkingUtils.scala +++ b/modules/server/src/main/scala/es/weso/rdfshape/server/utils/networking/NetworkingUtils.scala @@ -16,6 +16,15 @@ object NetworkingUtils extends LazyLogging { def getUrlContents(urlString: String): Either[String, String] = { Try { val url = new URL(urlString) + getUrlContents(url) + } match { + case Failure(exception) => Left(exception.getMessage) + case Success(value) => value + } + } + + def getUrlContents(url: URL): Either[String, String] = { + Try { val src = Source.fromURL(url) val str = src.mkString src.close() @@ -24,7 +33,7 @@ object NetworkingUtils extends LazyLogging { case Success(urlContent) => Right(urlContent) case Failure(exception) => val msg = - s"Could not obtain data from url $urlString." + s"Could not obtain data from url $url." logger.warn(s"$msg - ${exception.getMessage}") Left(msg) } diff --git a/website/static/favicon.ico b/website/static/favicon.ico index 4f09059e39d99d16093e1f66be45886236bc7d87..ee59ead367bc0b8e60bafbc2e3f53e213da9a39a 100644 GIT binary patch literal 14214 zcmeHNcT5&Z6klv-Z!z|QEfS-#Mif+HZ_yZ|##s5w7%LGx1mWZ?XNd*l>BWMY*mB0$ z6;CWE$Em2Xo(+}ntcd6#21P~Rz2CsD%d-2i-|oWQB|I|B?tHT|?>F=2&3kV~lDzoO z-%sNI-cpdSB-NHA$%m6arw~pkHzFdEN|JM9@bTkE>Hhuu6dxZ?M~@z*J$v?ubmYhp zI&-MxF4K79D_zZ~ej6rd3kvWT#Xwyrg7uO(emZX>EOYGbo=&gdi?k?y?OIS zq$f|F(49MX2=j|?Cr+G5&6_t@#-vlHPPAmn5=u=?{ZHU_xm+}J=1i(osS=egU7C9K z>`5CoY#_|7RRXrHTenibe*LISnKD$qe0iEYc`{)gFcrJ3zkU0*Oh`zG;G=To$~1lY zbb9pYkyUtx-(k$qPqk{*sBGD?G5LZD=y5?G2Y(ZRIXe(3JMBB-wLmvKYgM{ z7cS5rTei@dHEZbF{{8g#^XJO%Rv&n5DLg!!DpssW_3G6VI<6@3e*OAYij0gDx~4;i z4s`S8O@;NWj11a0XAXr`uTH$r(cHZBJ?FD|{jTiVmHs?_T&ZvMAvrmj{QUgHn#S7s z{Q0xvGX3t|J8Agv;X?0(g@w^4o3RdxK7VCWeW)>&p>TfPYN9*=urV#QB`z$mx6xBV#d;R)#Wlu5cE0OmV ztUoosW}kr-`bt~h&KxgXxKQ9dckY~NywKy2|CTLV60cVysC0+rS}luOzK!02ah1P6 z@fs0rYwIN+J7(CYyC0Zuf?b)%x2d_^xN)P9UA;A`?(1WAHqEPBw}32adDp(rVXU!> z7pt417~V7GdU&==0N4O0`yfHB`VotBJePD*Z@V*{1dNjd4QuC_!44k2CJKxpW zyMRsoja4}Y&kr6v5Pk@BE0ZBse_lsjx^zJu)0D$D)PcEkX%6o{;X0VHJ~0+QwQg;= zuR4hpjAQ?4&HXXyVuh_KF;AFVE0lWg9{oCP8ZBqN7FoMCg|lz6plMV3h1Wv@`+4vS zj0pRUdS30wF#6cfJ_oQ|ym%3>1@Nw0w@%o*&mHhfVDrLmm31vTbVq8Oc65{62ivt` z&s?xz0g4J>9X4#3(AjeR;>fkK3(R)bs+#Bi{rd}_NDnJwLsOj2J^KPU1ce z9Xf<63f2=RPKfdwH*OexYN=dp`xO^Pnb8+7UIo&1-Mbe*DOh0>qWt8^liE)#z3a`VrG{*L1iuuR;^Giu{f70~vuC2*$S?P# zOYQnB&bd_c2|Wv}z^ulKJqU3O#9-}`cHgwyp4IPhZh}}Nd^c_*UqY;K;J|@an^wM4 z%daPUCVcH=-Vfqdt)l%r=7*!A=p6gfsSh8Twlx(d_+k9`@#N!!a9e(yct_k9F-Sf` z#P62LQ>?I^6ZmZI#}+Lph-D6O9r#7?=@HMG#Wv2*Z0|krAod759DX>?ftd2mDc%zj z5`=tX|FJ`A87^$CeqbAYJ^Nq4q!)jR;&P;UoPzmGDVV(trV%uNNYZZfjvG4>mW4yDjMJTfk$d_LF1c}e2t&2DVW6_S{ua%Q1M@!<)o=Jmk~}n_fBa&Z}9IGGD$l?4Hl)sV3LS^J5IL zV*kb2A28b?Jzq`cUG~G*1_XEA&35b+4yH(@FiE9$_Hm6#vIZJZ|PX}j~fL1$j!yWyN8iuYw;Fq?SKlX~^) zCE~32IA&H96wNVj$fJ6`%sgYg#^uJk6}E8C1n2EIui&#`(>Uevz#WZ7jT%v_R;}{g z6Hq#^`H-}8r^j-vr-w5$GpTdu&Qz;bE&0xd$5??GX9hKD)S&k5+uPoU^oC{=pVfP^ zzXr36p65F#Nd>Hyg8-L(B9%Du<17`*}zsCI}C+QRKg*dOq85s1Hkqkp_gZb{y zS&r+H;d*QrxStckq%vfQQZ9QM=i9DkSXVc3h%(MT8r zVGj=&Kq)-tGT~lIvu4f29ORv`If9eh7wuq9J9g|SYz^o-_$5vfV&}M9aB}r(I>@#R z{G0@~2h-TT!FYh+C#+bpLY(!(hso2Mj^gBQM0>C=ac>KD3G_ix#Nq@foqy0BSew`j z0|NtX-H(v*n8^{>z?m-k9Wi1A!LKMPLOd6~DE3a=aWwK>%+37{g71KPlZzHD68bTP??7Rlxg+QT z>>0o_c<^Ac9x;Bf4RFUcJ3G5b)+F;}xn1zQnsp1-74C*0qLI9(PMS1H z_?WoAg7_u$oL(HPIC(DV%etJGj9tmtmB>Je449Asu0k$O@c|;wmb^qBFG&F?=bf(_ z=Q*60<~*BoFU~VKmpFGJ4{%XD@+K}gW&D9S3`G7FuhMree9jFhIhx^yT)DaA63t29 za+mmB3M?e|j?b-UIxnQXKrJ8gdwzbs6dW=>Imd7b{87kxJmQh___&bI&mBLEn>?QK V_@bg1Z|+fyKk=}0y<9S#{{#6r5KI67 literal 2758 zcmV;%3OV(OP)a7lf&gVT|n3z~zUjF*^YmG)@bjX`GZz?M*larH!f`aIDIsmx1 zxLm(}eSCbJ=TMfphD<>kEqumT{@_4hyLaCxDCh(*@!&v7BzZ>0zVL7l0FcY&K|w*y z&CNogaO>8sc!}S=d$)1pMoUXeu~@ut;X<=+2mH91OY|Iag@4+K`Ztk&%)6_wUaI7XW_xsRqCjM@M9EP@9!ihc%c? z=DmCOs8s6Sy?a$E)$GJquU@UFsL0IBbar+&n>8}B2tnd&YI*<=d?RR&9(Cy&%a$$6 z&CP9XZ9RYfyg@(1sOanKb9QzP4i3I`>lRVU;^NMrApKDqfN>llF)0rZ*5k(~^exfR z(P?REPoF+rxpJk>Xt>5d{O|*UAeSy(B8rP3=ceNV00Myjn1wD{WEel^&!1Ifx+V42&He-=gTbty?*`p_Sg=Tmut>!4L=pckbM2 zZEZD9eB;Iq3Waj=*wE=)*$J_x%ari)OY)GM@8)p6x z85y}?!Gdeot{EX#tJRsAnS1x{CA9k%7kAdwbn9>f1E;Oifdd=9{L%?v%rq{7jQsdx z5Vo}$h)$=6g@xtj=8lez8WPvm*7o-H9y)XgKP~_i6#R@`czP~3zy$yVL2_~q1_p8f zMgX+N9H?kCd2({V(W9Q9n&Qx*LqkJDB_$<>#3GT1$z&RR%$U&g=kKwLFTOAy`V0oW zs3`LK^<7*p8Q>#;9AFec0Wg9fzii*muB!TTT%4bYDO_D$ZEbBuA`#Y{%JZtLt9d*g zjYcCxj7KCA>6>r7!oxip8>NpQO9uwzG#bU$*4o$C*}-8kg3PjEBM9Q-<5N{tHQCl3 zujbF6FAxZbp35R43IP(ZKcZvtDfkx_-NMtm_LhbZEfLK$VuYi z;b8!nKYzX%?!@9F`b`8m_t_WQ+S(cbhK7bfM?5hx0RUKDZiY}aUboWl>tcUK?s@a( z0RUDKnIvX1Cl97(I~9wMh(w|Kme0tIfngyqjTnm!vlSS~MbY@w)Bt@W#(nv6-CR6l z@i2{;!{HzZ(%IQL7r&TJoN%AX9TW2)GP1~QpBR|Ey*=oN84QMEg>gqj zxalnE=}l%sx3#q~nM^x7JJ5ehuV25up`l@ppax1y+tJbSt*)7#-c(#{&cCs~zJBf6 zwb;{firC-Zzqz@&zrUZTJFQlW6$p*~%H`UiXuPhOm{?`Bn+eI~^2*A}fPl#`nE-LD_tLp_eJ7M9%vny64?$~iHJ^hhNrJmVY7uGd3-JP9-gosN^ zOO;BczrR1$oR;T#d3mW+s=B&a?rW7wbM|aeN{U#eq5|ljoU{PrYu2!C-u#oh`?T_K zXXl`k(-rJ`<}uo|f`Wo%GMVlko0fwdIdY`FzP_cU#l(1xMjICP&!nUpmC9;5F6Pbh z#fvdMKfSWD9Y2!^NhFeo4KFt;^bd6SgIKLrdw6&Z4-XrV7}hH&6iQZBmP{rS2m~{|$Yg`KU#OA9 zVi}2)ppP37;fDX7OG-+5^5jWIMux${8JT|Fx^;<(iK(fn+1c3!Ke1SJ-1P^n+f+uB zni`OnwgV@wP$-O#kB^Is!^-#iGG5z6Q8X+pEI&VAC=~km_~`7tz5j7=_`6DFBu$^8 zaR4P2KM@gb_*ns8{_V)HP8Z9O!W?*0dGb)v;+S;z0H)}3muDExvrLXT7qO3zhLjr-o($bR6 zW($QvJVOY)GaVWlij9q>)9DNbBPuGYtE-DBpT2r}dSYW^t*x!8RO<2L$91bPs|deI zmrAA4(a}~`R%9}H%a$!^X=yz@J@|*eefu^uGtGX(*h*z&(5dtvtYlXqV z!K|#Tyu7^H+FBGv?d(#bU8UA^`w5H@AKJ z_8mTac*&9_M5mt9_Z0xp+uK`OT3S|CR#jEi-QBI#8ZEg304!U!%*)Hm*VmWN=VLjh zd73M+zLZL(OeX8=>r*Hc*e4*CmX=mlR%|w#!{L}+zA@0}B>sK$Uw|+9B~A=QVE_OC M07*qoM6N<$ffmjD0&