diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
new file mode 100644
index 000000000..5a7e61055
--- /dev/null
+++ b/.git-blame-ignore-revs
@@ -0,0 +1,8 @@
+# Scala Steward: Reformat with scalafmt 3.8.6
+35c27463b6f38a27b119cc1d82f2e0e49e335789
+
+# Scala Steward: Reformat with scalafmt 3.9.0
+47449d54fdda17becd0fce8efd14c894563773c0
+
+# Scala Steward: Reformat with scalafmt 3.9.7
+d5280cb023facfd5af0f0a4eb456a848b63d0ee8
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 000000000..120c6893b
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,6 @@
+version: 2
+updates:
+ - package-ecosystem: "github-actions"
+ directory: "/"
+ schedule:
+ interval: "weekly"
\ No newline at end of file
diff --git a/.github/renovate.json b/.github/renovate.json
deleted file mode 100644
index 86e50ef6c..000000000
--- a/.github/renovate.json
+++ /dev/null
@@ -1,13 +0,0 @@
-{
- "automerge": true,
- "rebaseWhen": "conflicted",
- "labels": ["type: dependencies"],
- "packageRules": [
- {
- "matchManagers": [
- "sbt"
- ],
- "enabled": false
- }
- ]
-}
diff --git a/.github/workflows/auto-approve.yml b/.github/workflows/auto-approve.yml
index d183bced4..1d7854f14 100644
--- a/.github/workflows/auto-approve.yml
+++ b/.github/workflows/auto-approve.yml
@@ -5,9 +5,9 @@ on:
jobs:
auto-approve:
- runs-on: ubuntu-20.04
+ runs-on: ubuntu-22.04
steps:
- - uses: hmarr/auto-approve-action@v3.2.0
+ - uses: hmarr/auto-approve-action@v4.0.0
if: github.actor == 'scala-steward' || github.actor == 'renovate[bot]'
with:
github-token: "${{ secrets.GITHUB_TOKEN }}"
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index a5b0c4b06..0bf03ed7f 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -1,8 +1,8 @@
name: CI
env:
- JDK_JAVA_OPTIONS: -XX:+PrintCommandLineFlags # JDK_JAVA_OPTIONS is _the_ env. variable to use for modern Java
- JVM_OPTS: -XX:+PrintCommandLineFlags # for Java 8 only (sadly, it is not modern enough for JDK_JAVA_OPTIONS)
+ JDK_JAVA_OPTIONS: -XX:+PrintCommandLineFlags -Xms2G -Xmx8G -Xss4M -XX:+UseG1GC -XX:ReservedCodeCacheSize=512M -XX:NonProfiledCodeHeapSize=256M # JDK_JAVA_OPTIONS is _the_ env. variable to use for modern Java
+ SBT_OPTS: -XX:+PrintCommandLineFlags -Xms2G -Xmx8G -Xss4M -XX:+UseG1GC -XX:ReservedCodeCacheSize=512M -XX:NonProfiledCodeHeapSize=256M # Needed for sbt
on:
pull_request:
@@ -14,118 +14,133 @@ on:
jobs:
lint:
- runs-on: ubuntu-20.04
+ runs-on: ubuntu-22.04
timeout-minutes: 30
steps:
- name: Checkout current branch
- uses: actions/checkout@v3.3.0
+ uses: actions/checkout@v5.0.0
with:
fetch-depth: 0
- - name: Setup Java
- uses: actions/setup-java@v3.12.0
+ - name: Setup Action
+ uses: coursier/setup-action@v1
with:
- distribution: temurin
- java-version: 11
- check-latest: true
+ jvm: temurin:11
+ apps: sbt
- name: Cache scala dependencies
uses: coursier/cache-action@v6
- name: Lint code
- run: sbt check
+ run: sbt "++2.12.x; check; ++2.13.x; check; ++3.3.x; check"
benchmarks:
- runs-on: ubuntu-20.04
+ runs-on: ubuntu-22.04
strategy:
fail-fast: false
matrix:
- java: ['8', '11', '17']
- scala: ['2.13.11']
+ java: ['11', '21']
+ scala: ['2.13.x', '3.x']
steps:
- name: Checkout current branch
- uses: actions/checkout@v3.3.0
+ uses: actions/checkout@v5.0.0
with:
fetch-depth: 0
- - name: Setup Java
- uses: actions/setup-java@v3.12.0
+ - name: Setup Action
+ uses: coursier/setup-action@v1
with:
- distribution: temurin
- java-version: ${{ matrix.java }}
- check-latest: true
+ jvm: temurin:${{ matrix.java }}
+ apps: sbt
- name: Cache scala dependencies
uses: coursier/cache-action@v6
- name: Compile benchmarks
- run: sbt ++${{ matrix.scala }}! jmh:compile
+ run: sbt ++${{ matrix.scala }} jmh:compile
mdoc:
- runs-on: ubuntu-20.04
+ runs-on: ubuntu-22.04
timeout-minutes: 60
steps:
- name: Checkout current branch
- uses: actions/checkout@v3.3.0
- - name: Setup Java
- uses: actions/setup-java@v3.12.0
+ uses: actions/checkout@v5.0.0
+ - name: Setup Action
+ uses: coursier/setup-action@v1
with:
- distribution: temurin
- java-version: 8
- check-latest: true
+ jvm: temurin:11
+ apps: sbt
- name: Cache scala dependencies
uses: coursier/cache-action@v6
- name: Check Document Generation
run: sbt compileDocs
test:
- runs-on: ubuntu-20.04
+ runs-on: ubuntu-22.04
timeout-minutes: 30
strategy:
fail-fast: false
matrix:
- java: ['8', '11', '17']
- scala: ['2.12.18', '2.13.11', '3.3.0']
+ java: ['11', '21']
+ scala: ['2.12.x', '2.13.x', '3.x']
platform: ['JVM', 'JS', 'Native']
steps:
- name: Checkout current branch
- uses: actions/checkout@v3.3.0
+ uses: actions/checkout@v5.0.0
with:
fetch-depth: 0
- - name: Setup Java
- uses: actions/setup-java@v3.12.0
+ - name: Install Boehm GC
+ if: ${{ startsWith(matrix.platform, 'Native') }}
+ run: sudo apt-get update && sudo apt-get install -y libgc-dev
+ - name: Setup Action
+ uses: coursier/setup-action@v1
with:
- distribution: temurin
- java-version: ${{ matrix.java }}
- check-latest: true
+ jvm: temurin:${{ matrix.java }}
+ apps: sbt
- name: Cache scala dependencies
uses: coursier/cache-action@v6
- name: Install libuv
if: matrix.platform == 'Native'
run: sudo apt-get update && sudo apt-get install -y libuv1-dev
- name: Run Macros tests
- if: ${{ !startsWith(matrix.scala, '3.3.') }}
- run: sbt ++${{ matrix.scala }}! testScala2${{ matrix.platform }}
+ if: ${{ !startsWith(matrix.scala, '3.') }}
+ run: sbt ++${{ matrix.scala }} testScala2${{ matrix.platform }}
- name: Run tests
- run: sbt ++${{ matrix.scala }}! test${{ matrix.platform }}
+ run: sbt ++${{ matrix.scala }} test${{ matrix.platform }}
+
+ mima_check:
+ runs-on: ubuntu-22.04
+ timeout-minutes: 30
+ steps:
+ - name: Checkout current branch
+ uses: actions/checkout@v5.0.0
+ with:
+ fetch-depth: 300
+ - name: Fetch tags
+ run: git fetch --depth=300 origin +refs/tags/*:refs/tags/*
+ - name: Setup Action
+ uses: coursier/setup-action@v1
+ with:
+ jvm: temurin:11
+ apps: sbt
+ - run: sbt +mimaReportBinaryIssues
ci:
- runs-on: ubuntu-20.04
- needs: [lint, mdoc, benchmarks, test]
+ runs-on: ubuntu-22.04
+ needs: [lint, mdoc, benchmarks, test, mima_check]
steps:
- name: Aggregate of lint, and all tests
run: echo "ci passed"
publish:
- runs-on: ubuntu-20.04
+ runs-on: ubuntu-22.04
timeout-minutes: 60
needs: [ci]
- if: github.event_name != 'pull_request'
+ if: ${{ github.event_name != 'pull_request' }}
steps:
- name: Checkout current branch
- uses: actions/checkout@v3.3.0
+ uses: actions/checkout@v5.0.0
with:
fetch-depth: 0
- - name: Setup Java
- uses: actions/setup-java@v3.12.0
+ - name: Setup Action
+ uses: coursier/setup-action@v1
with:
- distribution: temurin
- java-version: 8
- check-latest: true
+ jvm: temurin:11
+ apps: sbt
- name: Release
run: sbt ci-release
env:
diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml
index f0b96769c..80e062055 100644
--- a/.github/workflows/release-drafter.yml
+++ b/.github/workflows/release-drafter.yml
@@ -6,8 +6,8 @@ on:
jobs:
update_release_draft:
- runs-on: ubuntu-20.04
+ runs-on: ubuntu-22.04
steps:
- - uses: release-drafter/release-drafter@v5
+ - uses: release-drafter/release-drafter@v6
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/scala-steward.yml b/.github/workflows/scala-steward.yml
new file mode 100644
index 000000000..822b2a9cf
--- /dev/null
+++ b/.github/workflows/scala-steward.yml
@@ -0,0 +1,27 @@
+name: Scala Steward
+
+# This workflow will launch every day at 00:00
+on:
+ schedule:
+ - cron: '0 0 * * *'
+ workflow_dispatch: {}
+
+permissions:
+ contents: write
+ pull-requests: write
+
+jobs:
+ scala-steward:
+ timeout-minutes: 45
+ runs-on: ubuntu-latest
+ name: Scala Steward
+ steps:
+ - name: Setup sbt
+ uses: sbt/setup-sbt@v1
+ - name: Scala Steward
+ uses: scala-steward-org/scala-steward-action@v2
+ with:
+ github-app-id: ${{ secrets.SCALA_STEWARD_GITHUB_APP_ID }}
+ github-app-installation-id: ${{ secrets.SCALA_STEWARD_GITHUB_APP_INSTALLATION_ID }}
+ github-app-key: ${{ secrets.SCALA_STEWARD_GITHUB_APP_PRIVATE_KEY }}
+ github-app-auth-only: true
diff --git a/.github/workflows/site.yml b/.github/workflows/site.yml
index 7cc15f37a..a0e7cc2aa 100644
--- a/.github/workflows/site.yml
+++ b/.github/workflows/site.yml
@@ -1,4 +1,4 @@
-# This file was autogenerated using `zio-sbt-website` via `sbt generateGithubWorkflow`
+# This file was autogenerated using `zio-sbt-website` via `sbt generateGithubWorkflow`
# task and should be included in the git repository. Please do not edit it manually.
name: Website
@@ -14,44 +14,38 @@ name: Website
jobs:
build:
name: Build and Test
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
if: ${{ github.event_name == 'pull_request' }}
steps:
- name: Git Checkout
- uses: actions/checkout@v3.3.0
+ uses: actions/checkout@v5.0.0
with:
fetch-depth: '0'
- - name: Setup Scala
- uses: actions/setup-java@v3.9.0
+ - name: Setup Action
+ uses: coursier/setup-action@v1
with:
- distribution: temurin
- java-version: 17
- check-latest: true
- - name: Check if the README file is up to date
- run: sbt docs/checkReadme
- - name: Check if the site workflow is up to date
- run: sbt docs/checkGithubWorkflow
+ jvm: temurin:17
+ apps: sbt
- name: Check artifacts build process
run: sbt +publishLocal
- name: Check website build process
run: sbt docs/clean; sbt docs/buildWebsite
publish-docs:
name: Publish Docs
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
if: ${{ ((github.event_name == 'release') && (github.event.action == 'published')) || (github.event_name == 'workflow_dispatch') }}
steps:
- name: Git Checkout
- uses: actions/checkout@v3.3.0
+ uses: actions/checkout@v5.0.0
with:
fetch-depth: '0'
- - name: Setup Scala
- uses: actions/setup-java@v3.9.0
+ - name: Setup Action
+ uses: coursier/setup-action@v1
with:
- distribution: temurin
- java-version: 17
- check-latest: true
+ jvm: temurin:17
+ apps: sbt
- name: Setup NodeJs
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v6
with:
node-version: 16.x
registry-url: https://registry.npmjs.org
@@ -61,22 +55,19 @@ jobs:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
generate-readme:
name: Generate README
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
if: ${{ (github.event_name == 'push') || ((github.event_name == 'release') && (github.event.action == 'published')) }}
steps:
- name: Git Checkout
- uses: actions/checkout@v3.3.0
+ uses: actions/checkout@v5.0.0
with:
ref: ${{ github.head_ref }}
fetch-depth: '0'
- - name: Setup Scala
- uses: actions/setup-java@v3.9.0
+ - name: Setup Action
+ uses: coursier/setup-action@v1
with:
- distribution: temurin
- java-version: 17
- check-latest: true
- - name: Generate Readme
- run: sbt docs/generateReadme
+ jvm: temurin:17
+ apps: sbt
- name: Commit Changes
run: |
git config --local user.email "github-actions[bot]@users.noreply.github.com"
@@ -84,7 +75,7 @@ jobs:
git add README.md
git commit -m "Update README.md" || echo "No changes to commit"
- name: Create Pull Request
- uses: peter-evans/create-pull-request@v4.2.3
+ uses: peter-evans/create-pull-request@v7.0.8
with:
body: |-
Autogenerated changes after running the `sbt docs/generateReadme` command of the [zio-sbt-website](https://zio.dev/zio-sbt) plugin.
@@ -95,4 +86,5 @@ jobs:
branch: zio-sbt-website/update-readme
commit-message: Update README.md
delete-branch: true
+ base: series/2.x
title: Update README.md
diff --git a/.jvmopts b/.jvmopts
index 26e6a558b..411a33b13 100644
--- a/.jvmopts
+++ b/.jvmopts
@@ -1,6 +1,6 @@
--Xmx4g
+-Xmx6g
-Xss2m
--XX:MaxMetaspaceSize=1g
+-XX:+UseG1GC
+-XX:InitialCodeCacheSize=512m
-XX:ReservedCodeCacheSize=512m
--XX:+UseParallelGC
-Dfile.encoding=UTF8
diff --git a/.scalafmt.conf b/.scalafmt.conf
index 46233e07a..3724ac424 100644
--- a/.scalafmt.conf
+++ b/.scalafmt.conf
@@ -1,10 +1,11 @@
-version = "2.7.5"
+version = "3.9.7"
+runner.dialect = scala213
maxColumn = 120
align.preset = most
align.multiline = false
continuationIndent.defnSite = 2
assumeStandardLibraryStripMargin = true
-docstrings = JavaDoc
+docstrings.style = Asterisk
lineEndings = preserve
includeCurlyBraceInSelectChains = false
danglingParentheses.preset = true
@@ -26,3 +27,17 @@ rewriteTokens = {
"→": "->"
"←": "<-"
}
+
+project.excludePaths = [
+ "glob:**/target/**"
+ "glob:**/resources/**"
+]
+
+fileOverride {
+ "glob:**/scala-3/**" {
+ runner.dialect = scala3
+ }
+ "glob:**/project/**" {
+ runner.dialect = scala3
+ }
+}
diff --git a/README.md b/README.md
index 070191c35..8677b58a7 100644
--- a/README.md
+++ b/README.md
@@ -6,7 +6,7 @@
[ZIO Json](https://github.com/zio/zio-json) is a fast and secure JSON library with tight ZIO integration.
-[](https://github.com/zio/zio/wiki/Project-Stages)  [](https://oss.sonatype.org/content/repositories/releases/dev/zio/zio-json_2.13/) [](https://oss.sonatype.org/content/repositories/snapshots/dev/zio/zio-json_2.13/) [](https://javadoc.io/doc/dev.zio/zio-json-docs_2.13) [](https://github.com/zio/zio-json)
+[](https://github.com/zio/zio/wiki/Project-Stages) [](https://oss.sonatype.org/content/repositories/releases/dev/zio/zio-json_2.13/) [](https://oss.sonatype.org/content/repositories/snapshots/dev/zio/zio-json_2.13/) [](https://javadoc.io/doc/dev.zio/zio-json-docs_2.13) [](https://github.com/zio/zio-json)
## Introduction
@@ -15,7 +15,7 @@ The goal of this project is to create the best all-round JSON library for Scala:
- **Performance** to handle more requests per second than the incumbents, i.e. reduced operational costs.
- **Security** to mitigate against adversarial JSON payloads that threaten the capacity of the server.
- **Fast Compilation** no shapeless, no type astronautics.
-- **Future-Proof**, prepared for Scala 3 and next-generation Java.
+- **Future-Proof**, prepared for Scala 3 and runs on JDK 11+ JVMs.
- **Simple** small codebase, concise documentation that covers everything.
- **Helpful errors** are readable by humans and machines.
- **ZIO Integration** so nothing more is required.
@@ -25,7 +25,7 @@ The goal of this project is to create the best all-round JSON library for Scala:
In order to use this library, we need to add the following line in our `build.sbt` file:
```scala
-libraryDependencies += "dev.zio" %% "zio-json" % "0.6.1"
+libraryDependencies += "dev.zio" %% "zio-json" % "0.7.42"
```
## Example
@@ -47,18 +47,24 @@ Say we want to be able to read some JSON like
into a Scala `case class`
```scala
-case class Banana(curvature: Double)
+final case class Banana(curvature: Double)
```
-To do this, we create an *instance* of the `JsonDecoder` typeclass for `Banana` using the `zio-json` code generator. It is best practice to put it on the companion of `Banana`, like so
+To do this, we derive an *instance* of the `JsonDecoder` typeclass for `Banana`.
```scala
-object Banana {
- implicit val decoder: JsonDecoder[Banana] = DeriveJsonDecoder.gen[Banana]
-}
+final case class Banana(curvature: Double) derives JsonDecoder
```
-_Note: If you’re using Scala 3 and your case class is defining default parameters, `-Yretain-trees` needs to be added to `scalacOptions`._
+> [!NOTE]
+>
+> In scala 2, we need to use the `zio-json` semi-automatic derivation. It is best practice to put it on the companion of `Banana`, like so
+>
+> ```scala
+> object Banana {
+> implicit val decoder: JsonDecoder[Banana] = DeriveJsonDecoder.gen[Banana]
+> }
+> ```
Now we can parse JSON into our object
@@ -67,13 +73,10 @@ scala> """{"curvature":0.5}""".fromJson[Banana]
val res: Either[String, Banana] = Right(Banana(0.5))
```
-Likewise, to produce JSON from our data we define a `JsonEncoder`
+Likewise, to produce JSON from our data we derive a `JsonEncoder`
```scala
-object Banana {
- ...
- implicit val encoder: JsonEncoder[Banana] = DeriveJsonEncoder.gen[Banana]
-}
+final case class Banana(curvature: Double) derives JsonEncoder
scala> Banana(0.5).toJson
val res: String = {"curvature":0.5}
@@ -85,30 +88,71 @@ val res: String =
}
```
+> [!NOTE]
+>
+> In scala 2:
+> ```scala
+> object Banana {
+> ...
+> implicit val encoder: JsonEncoder[Banana] = DeriveJsonEncoder.gen[Banana]
+> }
+> ```
+
And bad JSON will produce an error in `jq` syntax with an additional piece of contextual information (in parentheses)
```
-scala> """{"curvature": womp}""".fromJson[Banana]
-val res: Either[String, Banana] = Left(.curvature(expected a number, got w))
+scala> """{"curvature": true}""".fromJson[Banana]
+val res: Either[String, Banana] = Left(.curvature(expected a Double))
```
Say we extend our data model to include more data types
```scala
-sealed trait Fruit
-case class Banana(curvature: Double) extends Fruit
-case class Apple (poison: Boolean) extends Fruit
+enum Fruit {
+ case Banana(curvature: Double)
+ case Apple(poison: Boolean)
+}
```
-we can generate the encoder and decoder for the entire `sealed` family
+we can generate the encoder and decoder for the entire `sealed` family using `JsonCodec`
```scala
-object Fruit {
- implicit val decoder: JsonDecoder[Fruit] = DeriveJsonDecoder.gen[Fruit]
- implicit val encoder: JsonEncoder[Fruit] = DeriveJsonEncoder.gen[Fruit]
+enum Fruit derives JsonCodec {
+ case Banana(curvature: Double)
+ case Apple(poison: Boolean)
}
```
+> [!NOTE]
+>
+> In scala 2:
+>
+> ```scala mdoc:compile-only
+> import zio.json._
+>
+> sealed trait Fruit
+> final case class Banana(curvature: Double) extends Fruit
+> final case class Apple(poison: Boolean) extends Fruit
+>
+> object Fruit {
+> implicit val decoder: JsonDecoder[Fruit] =
+> DeriveJsonDecoder.gen[Fruit]
+>
+> implicit val encoder: JsonEncoder[Fruit] =
+> DeriveJsonEncoder.gen[Fruit]
+> }
+>
+> val json1 = """{ "Banana":{ "curvature":0.5 }}"""
+> val json2 = """{ "Apple": { "poison": false }}"""
+> val malformedJson = """{ "Banana":{ "curvature": true }}"""
+>
+> json1.fromJson[Fruit]
+> json2.fromJson[Fruit]
+> malformedJson.fromJson[Fruit]
+>
+> List(Apple(false), Banana(0.4)).toJsonPretty
+> ```
+
allowing us to load the fruit based on a single field type tag in the JSON
```
@@ -121,35 +165,9 @@ val res: Either[String, Fruit] = Right(Apple(false))
Almost all of the standard library data types are supported as fields on the case class, and it is easy to add support if one is missing.
-```scala
-import zio.json._
-
-sealed trait Fruit extends Product with Serializable
-case class Banana(curvature: Double) extends Fruit
-case class Apple(poison: Boolean) extends Fruit
-
-object Fruit {
- implicit val decoder: JsonDecoder[Fruit] =
- DeriveJsonDecoder.gen[Fruit]
-
- implicit val encoder: JsonEncoder[Fruit] =
- DeriveJsonEncoder.gen[Fruit]
-}
-
-val json1 = """{ "Banana":{ "curvature":0.5 }}"""
-val json2 = """{ "Apple": { "poison": false }}"""
-val malformedJson = """{ "Banana":{ "curvature": true }}"""
-
-json1.fromJson[Fruit]
-json2.fromJson[Fruit]
-malformedJson.fromJson[Fruit]
-
-List(Apple(false), Banana(0.4)).toJsonPretty
-```
-
# How
-Extreme **performance** is achieved by decoding JSON directly from the input source into business objects (docs/inspired by [plokhotnyuk](https://github.com/plokhotnyuk/jsoniter-scala)). Although not a requirement, the latest advances in [Java Loom](https://wiki.openjdk.java.net/display/loom/Main) can be used to support arbitrarily large payloads with near-zero overhead.
+High **performance** is achieved by decoding JSON directly from the input source into business objects. See benchmark results of throughput and allocation rate for synthetic and real-world message samples in comparison with other JSON parsers [here](https://plokhotnyuk.github.io/jsoniter-scala/).
Best in class **security** is achieved with an aggressive *early exit* strategy that avoids costly stack traces, even when parsing malformed numbers. Malicious (and badly formed) payloads are rejected before finishing reading.
@@ -165,11 +183,11 @@ Learn more on the [ZIO JSON homepage](https://zio.dev/zio-json/)!
## Contributing
-For the general guidelines, see ZIO [contributor's guide](https://zio.dev/about/contributing).
+For the general guidelines, see ZIO [contributor's guide](https://zio.dev/contributor-guidelines).
## Code of Conduct
-See the [Code of Conduct](https://zio.dev/about/code-of-conduct)
+See the [Code of Conduct](https://zio.dev/code-of-conduct)
## Support
diff --git a/build.sbt b/build.sbt
index c01b8261a..70adf24c1 100644
--- a/build.sbt
+++ b/build.sbt
@@ -1,22 +1,31 @@
-import BuildHelper._
+import BuildHelper.*
+import com.typesafe.tools.mima.core.Problem
+import com.typesafe.tools.mima.core.ProblemFilters.exclude
+import com.typesafe.tools.mima.plugin.MimaKeys.mimaPreviousArtifacts
import explicitdeps.ExplicitDepsPlugin.autoImport.moduleFilterRemoveValue
import sbtcrossproject.CrossPlugin.autoImport.crossProject
-Global / onChangedBuildSource := IgnoreSourceChanges
+Global / onChangedBuildSource := ReloadOnSourceChanges
inThisBuild(
List(
+ scalaVersion := Scala213,
organization := "dev.zio",
- homepage := Some(url("https://zio.dev/zio-json/")),
- licenses := List("Apache-2.0" -> url("http://www.apache.org/licenses/LICENSE-2.0")),
- developers := List(
+ homepage := Some(url("https://zio.dev/zio-json/")),
+ licenses := List("Apache-2.0" -> url("http://www.apache.org/licenses/LICENSE-2.0")),
+ developers := List(
Developer(
"jdegoes",
"John De Goes",
"john@degoes.net",
url("http://degoes.net")
)
- )
+ ),
+ publishTo := {
+ val centralSnapshots = "https://central.sonatype.com/repository/maven-snapshots/"
+ if (isSnapshot.value) Some("central-snapshots" at centralSnapshots)
+ else localStaging.value
+ }
)
)
@@ -27,41 +36,43 @@ addCommandAlias("prepare", "fmt")
addCommandAlias(
"testJVM",
- "zioJsonJVM/test; zioJsonYaml/test; zioJsonInteropHttp4s/test; zioJsonInteropScalaz7xJVM/test; zioJsonGolden/test"
+ "zioJsonJVM/test; zioJsonYaml/test; zioJsonInteropHttp4s/test; zioJsonGolden/test; zioJsonInteropRefinedJVM/test"
)
addCommandAlias(
- "testScala2JVM",
- "zioJsonMacrosJVM/test; zioJsonInteropRefinedJVM/test"
+ "testJS",
+ "zioJsonJS/test; zioJsonInteropRefinedJS/test"
)
addCommandAlias(
- "testScala2JS",
- "zioJsonMacrosJS/test; zioJsonInteropRefinedJS/test"
+ "testNative",
+ "zioJsonNative/test; zioJsonInteropRefinedNative/test"
)
addCommandAlias(
- "testScala2Native",
- "zioJsonMacrosNative/test; zioJsonInteropRefinedNative/test"
+ "testScala2JVM",
+ "zioJsonMacrosJVM/test; zioJsonInteropScalaz7xJVM/test"
)
addCommandAlias(
- "testJS",
- "zioJsonJS/test; zioJsonInteropScalaz7xJS/test"
+ "testScala2JS",
+ "zioJsonMacrosJS/test; zioJsonInteropScalaz7xJS/test"
)
addCommandAlias(
- "testNative",
- "zioJsonNative/test; zioJsonInteropScalaz7xNative/test"
+ "testScala2Native",
+ "zioJsonMacrosNative/test; zioJsonInteropScalaz7xNative/test"
)
-val zioVersion = "2.0.16"
+val zioVersion = "2.1.22"
lazy val zioJsonRoot = project
.in(file("."))
.settings(
- publish / skip := true,
- unusedCompileDependenciesFilter -= moduleFilter("org.scala-js", "scalajs-library")
+ publish / skip := true,
+ mimaPreviousArtifacts := Set(),
+ unusedCompileDependenciesFilter -= moduleFilter("org.scala-js", "scalajs-library"),
+ crossScalaVersions := Nil // https://www.scala-sbt.org/1.x/docs/Cross-Build.html#Cross+building+a+project+statefully,
)
.aggregate(
docs,
@@ -82,7 +93,7 @@ lazy val zioJsonRoot = project
zioJsonGolden
)
-val circeVersion = "0.14.3"
+val circeVersion = "0.14.15"
lazy val zioJson = crossProject(JSPlatform, JVMPlatform, NativePlatform)
.in(file("zio-json"))
@@ -97,58 +108,67 @@ lazy val zioJson = crossProject(JSPlatform, JVMPlatform, NativePlatform)
scalacOptions -= "-opt:l:inline",
scalacOptions -= "-opt-inline-from:zio.internal.**",
Test / scalacOptions ++= {
- if (scalaVersion.value == ScalaDotty)
- Vector("-Yretain-trees")
+ if (scalaVersion.value == Scala3)
+ Vector("-Yretain-trees", "-Xmax-inlines:128")
else
Vector.empty
},
libraryDependencies ++= Seq(
- "dev.zio" %%% "zio" % zioVersion,
- "dev.zio" %%% "zio-streams" % zioVersion,
- "org.scala-lang.modules" %%% "scala-collection-compat" % "2.9.0",
- "dev.zio" %%% "zio-test" % zioVersion % "test",
- "dev.zio" %%% "zio-test-sbt" % zioVersion % "test",
- "io.circe" %%% "circe-core" % circeVersion % "test",
- "io.circe" %%% "circe-generic" % circeVersion % "test",
- "io.circe" %%% "circe-parser" % circeVersion % "test"
+ "dev.zio" %%% "zio" % zioVersion,
+ "dev.zio" %%% "zio-streams" % zioVersion,
+ "org.scala-lang.modules" %%% "scala-collection-compat" % "2.14.0" % Test,
+ "dev.zio" %%% "zio-test" % zioVersion % Test,
+ "dev.zio" %%% "zio-test-sbt" % zioVersion % Test,
+ "com.github.plokhotnyuk.jsoniter-scala" %%% "jsoniter-scala-core" % "2.38.3" % Test,
+ "com.github.plokhotnyuk.jsoniter-scala" %%% "jsoniter-scala-macros" % "2.38.3" % Test,
+ "io.circe" %%% "circe-core" % circeVersion % Test,
+ "io.circe" %%% "circe-generic" % circeVersion % Test,
+ "io.circe" %%% "circe-parser" % circeVersion % Test,
+ "org.typelevel" %%% "jawn-ast" % "1.6.0" % Test
),
// scala version specific dependencies
libraryDependencies ++= {
CrossVersion.partialVersion(scalaVersion.value) match {
case Some((3, _)) =>
- Vector(
- "com.softwaremill.magnolia1_3" %%% "magnolia" % "1.3.0"
+ Seq(
+ "com.softwaremill.magnolia1_3" %%% "magnolia" % "1.3.18"
)
-
case _ =>
- Vector(
- "org.scala-lang" % "scala-reflect" % scalaVersion.value % Provided,
- "com.softwaremill.magnolia1_2" %%% "magnolia" % "1.1.3",
- "io.circe" %%% "circe-generic-extras" % circeVersion % "test",
- "com.github.plokhotnyuk.jsoniter-scala" %%% "jsoniter-scala-core" % "2.23.3" % "test",
- "com.github.plokhotnyuk.jsoniter-scala" %%% "jsoniter-scala-macros" % "2.23.3" % "test"
+ Seq(
+ "org.scala-lang" % "scala-reflect" % scalaVersion.value % Provided,
+ "com.softwaremill.magnolia1_2" %%% "magnolia" % "1.1.10"
)
}
},
Compile / sourceGenerators += Def.task {
- val dir = (Compile / sourceManaged).value
- val file = dir / "zio" / "json" / "GeneratedTupleDecoders.scala"
+ val dir = (Compile / sourceManaged).value
+ val file = dir / "zio" / "json" / "GeneratedTupleDecoders.scala"
val decoders = (1 to 22).map { i =>
val tparams = (1 to i).map(p => s"A$p").mkString(", ")
val implicits = (1 to i).map(p => s"A$p: JsonDecoder[A$p]").mkString(", ")
- val work = (1 to i)
- .map(p => s"val a$p = A$p.unsafeDecode(trace :+ traces($p), in)")
+ val work = (1 to i)
+ .map(p => s"val a$p = A$p.unsafeDecode(traces(${p - 1}) :: trace, in)")
.mkString("\n Lexer.char(trace, in, ',')\n ")
+ val work2 = (1 to i)
+ .map(p => s"val a$p = A$p.unsafeFromJsonAST(traces(${p - 1}) :: trace, arr($p - 1))")
+ .mkString("\n ")
val returns = (1 to i).map(p => s"a$p").mkString(", ")
-
s"""implicit def tuple$i[$tparams](implicit $implicits): JsonDecoder[Tuple$i[$tparams]] =
| new JsonDecoder[Tuple$i[$tparams]] {
- | val traces: Array[JsonError] = (0 to $i).map(JsonError.ArrayAccess(_)).toArray
+ | private[this] val traces: Array[JsonError] = (0 to ${i - 1}).map(JsonError.ArrayAccess(_)).toArray
| def unsafeDecode(trace: List[JsonError], in: RetractReader): Tuple$i[$tparams] = {
| Lexer.char(trace, in, '[')
| $work
| Lexer.char(trace, in, ']')
- | Tuple$i($returns)
+ | new Tuple$i($returns)
+ | }
+ | override def unsafeFromJsonAST(trace: List[JsonError], json: Json): Tuple$i[$tparams] = {
+ | json match {
+ | case Json.Arr(arr) if arr.length == $i =>
+ | $work2
+ | new Tuple$i($returns)
+ | case _ => Lexer.error("Expected array of size $i", trace)
+ | }
| }
| }""".stripMargin
}
@@ -157,6 +177,7 @@ lazy val zioJson = crossProject(JSPlatform, JVMPlatform, NativePlatform)
s"""package zio.json
|
|import zio.json.internal._
+ |import zio.json.ast._
|
|private[json] trait GeneratedTupleDecoders { this: JsonDecoder.type =>
| ${decoders.mkString("\n\n ")}
@@ -165,15 +186,14 @@ lazy val zioJson = crossProject(JSPlatform, JVMPlatform, NativePlatform)
Seq(file)
}.taskValue,
Compile / sourceGenerators += Def.task {
- val dir = (Compile / sourceManaged).value
- val file = dir / "zio" / "json" / "GeneratedTupleEncoders.scala"
+ val dir = (Compile / sourceManaged).value
+ val file = dir / "zio" / "json" / "GeneratedTupleEncoders.scala"
val encoders = (1 to 22).map { i =>
val tparams = (1 to i).map(p => s"A$p").mkString(", ")
val implicits = (1 to i).map(p => s"A$p: JsonEncoder[A$p]").mkString(", ")
- val work = (1 to i)
+ val work = (1 to i)
.map(p => s"A$p.unsafeEncode(t._$p, indent, out)")
.mkString("\n if (indent.isEmpty) out.write(',') else out.write(\", \")\n ")
-
s"""implicit def tuple$i[$tparams](implicit $implicits): JsonEncoder[Tuple$i[$tparams]] =
| new JsonEncoder[Tuple$i[$tparams]] {
| def unsafeEncode(t: Tuple$i[$tparams], indent: Option[Int], out: internal.Write): Unit = {
@@ -194,12 +214,11 @@ lazy val zioJson = crossProject(JSPlatform, JVMPlatform, NativePlatform)
Seq(file)
}.taskValue,
Compile / sourceGenerators += Def.task {
- val dir = (Compile / sourceManaged).value
- val file = dir / "zio" / "json" / "GeneratedTupleCodecs.scala"
+ val dir = (Compile / sourceManaged).value
+ val file = dir / "zio" / "json" / "GeneratedTupleCodecs.scala"
val codecs = (1 to 22).map { i =>
val tparamDecls = (1 to i).map(p => s"A$p: JsonEncoder: JsonDecoder").mkString(", ")
val tparams = (1 to i).map(p => s"A$p").mkString(", ")
-
s"""implicit def tuple$i[$tparamDecls]: JsonCodec[Tuple$i[$tparams]] =
| JsonCodec(JsonEncoder.tuple$i, JsonDecoder.tuple$i)""".stripMargin
}
@@ -213,36 +232,36 @@ lazy val zioJson = crossProject(JSPlatform, JVMPlatform, NativePlatform)
)
Seq(file)
}.taskValue,
- inConfig(Jmh)(org.scalafmt.sbt.ScalafmtPlugin.scalafmtConfigSettings)
+ inConfig(Jmh)(org.scalafmt.sbt.ScalafmtPlugin.scalafmtConfigSettings),
+ testFrameworks += new TestFramework("zio.test.sbt.ZTestFramework"),
+ mimaBinaryIssueFilters ++= Seq(
+ exclude[Problem]("zio.json.CaseObjectDecoder.*") // FIXME: false negative reported by mima
+ )
)
- .settings(testFrameworks += new TestFramework("zio.test.sbt.ZTestFramework"))
.jsSettings(
+ mimaBinaryIssueFilters ++= Seq(
+ exclude[Problem]("zio.JsonPackagePlatformSpecific.*"),
+ exclude[Problem]("zio.json.JsonDecoderPlatformSpecific.*"),
+ exclude[Problem]("zio.json.JsonEncoderPlatformSpecific.*"),
+ exclude[Problem]("zio.json.package.*")
+ ),
libraryDependencies ++= Seq(
- "io.github.cquiroz" %%% "scala-java-time" % "2.5.0",
- "io.github.cquiroz" %%% "scala-java-time-tzdb" % "2.5.0"
+ ("org.scala-js" %%% "scalajs-weakreferences" % "1.0.0").cross(CrossVersion.for3Use2_13),
+ "io.github.cquiroz" %%% "scala-java-time" % scalaJavaTimeVersion,
+ "io.github.cquiroz" %%% "scala-java-time-tzdb" % scalaJavaTimeVersion % Test
)
)
- .jvmSettings(
- libraryDependencies ++= {
- CrossVersion.partialVersion(scalaVersion.value) match {
- case Some((3, _)) =>
- Vector(
- "org.typelevel" %% "jawn-ast" % "1.5.1" % "test"
- )
-
- case _ =>
- Seq(
- "ai.x" %% "play-json-extensions" % "0.42.0" % "test",
- "com.typesafe.play" %%% "play-json" % "2.9.4" % "test",
- "org.typelevel" %% "jawn-ast" % "1.5.1" % "test"
- )
- }
- }
- )
- .nativeSettings(Test / fork := false)
+ .nativeSettings(nativeSettings)
.nativeSettings(
+ mimaBinaryIssueFilters ++= Seq(
+ exclude[Problem]("zio.JsonPackagePlatformSpecific.*"),
+ exclude[Problem]("zio.json.JsonDecoderPlatformSpecific.*"),
+ exclude[Problem]("zio.json.JsonEncoderPlatformSpecific.*"),
+ exclude[Problem]("zio.json.package.*")
+ ),
libraryDependencies ++= Seq(
- "io.github.cquiroz" %%% "scala-java-time" % "2.5.0"
+ "io.github.cquiroz" %%% "scala-java-time" % scalaJavaTimeVersion,
+ "io.github.cquiroz" %%% "scala-java-time-tzdb" % scalaJavaTimeVersion % Test
)
)
.enablePlugins(BuildInfoPlugin)
@@ -278,10 +297,11 @@ lazy val zioJsonYaml = project
.settings(buildInfoSettings("zio.json.yaml"))
.settings(
libraryDependencies ++= Seq(
- "org.yaml" % "snakeyaml" % "2.2",
- "dev.zio" %% "zio" % zioVersion,
- "dev.zio" %% "zio-test" % zioVersion % "test",
- "dev.zio" %% "zio-test-sbt" % zioVersion % "test"
+ "org.yaml" % "snakeyaml" % "2.5",
+ "org.scala-lang.modules" %% "scala-collection-compat" % "2.14.0",
+ "dev.zio" %% "zio" % zioVersion,
+ "dev.zio" %% "zio-test" % zioVersion % Test,
+ "dev.zio" %% "zio-test-sbt" % zioVersion % Test
),
testFrameworks += new TestFramework("zio.test.sbt.ZTestFramework")
)
@@ -295,16 +315,16 @@ lazy val zioJsonMacros = crossProject(JSPlatform, JVMPlatform, NativePlatform)
.settings(crossProjectSettings)
.settings(macroExpansionSettings)
.settings(
- crossScalaVersions -= ScalaDotty,
+ crossScalaVersions -= Scala3,
scalacOptions -= "-Xfatal-warnings", // not quite ready.
libraryDependencies ++= Seq(
"org.scala-lang" % "scala-reflect" % scalaVersion.value % Provided,
- "dev.zio" %%% "zio-test" % zioVersion % "test",
- "dev.zio" %%% "zio-test-sbt" % zioVersion % "test"
+ "dev.zio" %%% "zio-test" % zioVersion % Test,
+ "dev.zio" %%% "zio-test-sbt" % zioVersion % Test
),
testFrameworks += new TestFramework("zio.test.sbt.ZTestFramework")
)
- .nativeSettings(Test / fork := false)
+ .nativeSettings(nativeSettings)
lazy val zioJsonMacrosJVM = zioJsonMacros.jvm.dependsOn(zioJsonJVM)
@@ -317,14 +337,13 @@ lazy val zioJsonInteropHttp4s = project
.settings(stdSettings("zio-json-interop-http4s"))
.settings(buildInfoSettings("zio.json.interop.http4s"))
.settings(
- crossScalaVersions -= ScalaDotty,
libraryDependencies ++= Seq(
- "org.http4s" %% "http4s-dsl" % "0.23.20",
+ "org.http4s" %% "http4s-dsl" % "0.23.33",
"dev.zio" %% "zio" % zioVersion,
- "org.typelevel" %% "cats-effect" % "3.4.9",
- "dev.zio" %% "zio-interop-cats" % "23.0.03" % "test",
- "dev.zio" %% "zio-test" % zioVersion % "test",
- "dev.zio" %% "zio-test-sbt" % zioVersion % "test"
+ "org.typelevel" %% "cats-effect" % "3.6.3",
+ "dev.zio" %% "zio-interop-cats" % "23.1.0.5" % Test,
+ "dev.zio" %% "zio-test" % zioVersion % Test,
+ "dev.zio" %% "zio-test-sbt" % zioVersion % Test
),
testFrameworks += new TestFramework("zio.test.sbt.ZTestFramework")
)
@@ -338,9 +357,9 @@ lazy val zioJsonInteropRefined = crossProject(JSPlatform, JVMPlatform, NativePla
.settings(buildInfoSettings("zio.json.interop.refined"))
.settings(
libraryDependencies ++= Seq(
- "eu.timepit" %%% "refined" % "0.10.2",
- "dev.zio" %%% "zio-test" % zioVersion % "test",
- "dev.zio" %%% "zio-test-sbt" % zioVersion % "test"
+ "eu.timepit" %%% "refined" % "0.11.3",
+ "dev.zio" %%% "zio-test" % zioVersion % Test,
+ "dev.zio" %%% "zio-test-sbt" % zioVersion % Test
),
testFrameworks += new TestFramework("zio.test.sbt.ZTestFramework")
)
@@ -352,11 +371,11 @@ lazy val zioJsonInteropScalaz7x = crossProject(JSPlatform, JVMPlatform, NativePl
.settings(stdSettings("zio-json-interop-scalaz7x"))
.settings(buildInfoSettings("zio.json.interop.scalaz7x"))
.settings(
- crossScalaVersions -= ScalaDotty,
+ crossScalaVersions -= Scala3,
libraryDependencies ++= Seq(
- "org.scalaz" %%% "scalaz-core" % "7.3.7",
- "dev.zio" %%% "zio-test" % zioVersion % "test",
- "dev.zio" %%% "zio-test-sbt" % zioVersion % "test"
+ "org.scalaz" %%% "scalaz-core" % "7.3.8",
+ "dev.zio" %%% "zio-test" % zioVersion % Test,
+ "dev.zio" %%% "zio-test-sbt" % zioVersion % Test
),
testFrameworks += new TestFramework("zio.test.sbt.ZTestFramework")
)
@@ -374,12 +393,12 @@ lazy val docs = project
zioJsonInteropScalaz7x.jvm
)
.settings(
- crossScalaVersions -= ScalaDotty,
+ crossScalaVersions -= Scala3,
moduleName := "zio-json-docs",
scalacOptions += "-Ymacro-annotations",
- projectName := "ZIO JSON",
- mainModuleName := (zioJsonJVM / moduleName).value,
- projectStage := ProjectStage.ProductionReady,
+ projectName := "ZIO JSON",
+ mainModuleName := (zioJsonJVM / moduleName).value,
+ projectStage := ProjectStage.ProductionReady,
ScalaUnidoc / unidoc / unidocProjectFilter := inProjects(
zioJsonJVM,
zioJsonYaml,
@@ -389,7 +408,7 @@ lazy val docs = project
zioJsonInteropScalaz7x.jvm,
zioJsonGolden
),
- docsPublishBranch := "series/2.x",
+ mimaPreviousArtifacts := Set(),
readmeAcknowledgement :=
"""|- Uses [JsonTestSuite](https://github.com/nst/JSONTestSuite) to test parsing. (c) 2016 Nicolas Seriot)
|
diff --git a/docs/configuration.md b/docs/configuration.md
index 3c7da6f6f..9e95a60b9 100644
--- a/docs/configuration.md
+++ b/docs/configuration.md
@@ -55,8 +55,8 @@ sealed trait Fruit
) extends Fruit
object Fruit {
- implicit val encoder: JsonEncoder[Fruit] =
- DeriveJsonEncoder.gen[Fruit]
+ implicit val codec: JsonCodec[Fruit] =
+ DeriveJsonCodec.gen[Fruit]
}
val banana: Fruit = Banana(0.5)
@@ -80,6 +80,60 @@ banana.toJson
apple.toJson
```
+Another way of changing type hint is using `@jsonHintNames` annotation on sealed class. It allows to apply transformation
+to all type hint values in hierarchy. Same transformations are provided as for `@jsonMemberNames` annotation.
+
+Here's an example:
+
+```scala mdoc
+import zio.json._
+
+@jsonHintNames(SnakeCase)
+sealed trait FruitKind
+
+case class GoodFruit(good: Boolean) extends FruitKind
+
+case class BadFruit(bad: Boolean) extends FruitKind
+
+object FruitKind {
+ implicit val codec: JsonCodec[FruitKind] =
+ DeriveJsonCodec.gen[FruitKind]
+}
+
+val goodFruit: FruitKind = GoodFruit(true)
+val badFruit: FruitKind = BadFruit(true)
+
+goodFruit.toJson
+badFruit.toJson
+```
+
+Note that with this code, you can't directly decode the subclasses of `FruitKind`. You would need to create a dedicated decoder for each subclass.
+
+```scala mdoc
+object GoodFruit {
+ implicit val codec: JsonCodec[GoodFruit] =
+ DeriveJsonCodec.gen[GoodFruit]
+}
+```
+
+Since `GoodFruit` is only a case class, it will not require any kind of discriminator to be decoded.
+
+```scala mdoc
+"""{"good":true}""".fromJson[GoodFruit]
+```
+
+If you want for some reason to decode only for a specific type of `FruitKind` that has a discriminator, don't derive the codec for the subtype, but transform the `FruitKind` codec.
+
+```scala mdoc
+object BadFruit {
+ implicit val decoder: JsonDecoder[BadFruit] =
+ FruitKind.codec.decoder.mapOrFail {
+ case GoodFruit(_) => Left("Expected BadFruit, got GoodFruit")
+ case BadFruit(bad) => Right(BadFruit(bad))
+ }
+}
+```
+
## jsonDiscriminator
@@ -143,6 +197,43 @@ The following two expressions result in an equal value:
The `@jsonAliases` annotation supports multiple aliases. The annotation has no effect on encoding.
+## Nulls, explicitNulls
+
+By default `null` values are omitted from the JSON output. This behavior can be changed by using the `@jsonExplicitNull` annotation on a case class, field or setting `JsonCodecConfiguration.explicitNulls` to `true`.
+Missing nulls on decoding are always allowed.
+
+```scala mdoc
+@jsonExplicitNull
+case class Mango(ripeness: Option[Int])
+
+object Mango {
+ implicit val codec: JsonCodec[Mango] = DeriveJsonCodec.gen[Mango]
+}
+```
+The following expression results in a JSON document with a `null` value:
+```scala mdoc
+Mango(None).toJson
+"""{}""".fromJson[Mango]
+```
+
+## Empty Collections, explicitEmptyCollections
+
+By default `empty collections` (all supported collection types and case classes) are included from the JSON output an decoding requires empty collections to be present. This behavior can be changed by using the `@jsonExplicitEmptyCollections(encoding = false, decoding = false)` annotation on a case class, field or setting `JsonCodecConfiguration.explicitEmptyCollections` to `ExplicitEmptyCollections(encoding = false, decoding = false)`. The result is that empty collections are omitted from the JSON output and when decoding empty collections are created. It is also possible to have different values for encoding and decoding by using `@jsonExplicitEmptyCollections(encoding = true, decoding = false)` or `@jsonExplicitEmptyCollections(encoding = false, decoding = true)`.
+
+```scala mdoc
+@jsonExplicitEmptyCollections(encoding = false, decoding = false)
+case class Pineapple(leaves: List[String])
+
+object Pineapple {
+ implicit val codec: JsonCodec[Pineapple] = DeriveJsonCodec.gen[Pineapple]
+}
+```
+The following expression results in a JSON document with an empty collection:
+```scala mdoc
+Pineapple(Nil).toJson
+"""{}""".fromJson[Pineapple]
+```
+
## @jsonDerive
**Requires zio-json-macros**
diff --git a/docs/decoding.md b/docs/decoding.md
index 8c9a1581c..596aab4db 100644
--- a/docs/decoding.md
+++ b/docs/decoding.md
@@ -36,7 +36,7 @@ Now we can parse JSON into our object
### Automatic Derivation and case class default field values
-If a case class field is defined with a default value and the field is not present or `null`, the default value will be used.
+If a case class field is defined with a default value and the field is not present or `null`, the default value will be used (or evaluated when it is a method).
Say we have a Scala `case class`
@@ -51,6 +51,8 @@ implicit val decoder: JsonDecoder[Entity] =
"""{ "id": 42, "related": null }""".fromJson[Entity]
```
+_Note: If you’re using Scala 3 and your case class is defining default parameters, `-Yretain-trees` needs to be added to `scalacOptions`._
+
## ADTs
Say we extend our data model to include more data types
@@ -81,8 +83,41 @@ object Fruit {
"""{ "Apple": { "poison": false }}""".fromJson[Fruit]
```
+### String-based union types (Enum)
+The codecs support string-based union types (enums) out of the box. This is useful when the overhead of a Enum is not desired.
+
+```scala
+val appleOrBanana: "Apple" | "Banana" = "Apple"
+```
+Decoding succeeds because 'Apple' is a valid value
+```scala
+appleOrBanana.toJson
+"Apple".fromJson["Apple" | "Banana"]
+```
+Decoding fail because 'Pear' is not a valid value
+```scala
+"Peer".fromJson["Apple" | "Banana"]
+```
+
Almost all of the standard library data types are supported as fields on the case class, and it is easy to add support if one is missing.
+### Sealed families and enums for Scala 3
+Sealed families where all members are only objects, or a Scala 3 enum with all cases parameterless are interpreted as enumerations and will encode 1:1 with their value-names.
+```scala
+enum Foo derives JsonDecoder:
+ case Bar
+ case Baz
+ case Qux
+```
+or
+```scala
+sealed trait Foo derives JsonDecoder
+object Foo:
+ case object Bar extends Foo
+ case object Baz extends Foo
+ case object Qux extends Foo
+```
+
## Manual instances
Sometimes it is easier to reuse an existing `JsonDecoder` rather than generate a new one. This can be accomplished using convenience methods on the `JsonDecoder` typeclass to *derive* new decoders
@@ -172,3 +207,293 @@ implicit val decodeName: JsonDecoder[String Refined NonEmpty] =
```
Now the code compiles.
+
+# Parsing custom JSON
+
+In this section we show several approaches for decoding JSON that looks like:
+
+```json
+{
+ "01. symbol": "IBM",
+ "02. open": "182.4300",
+ "03. high": "182.8000"
+}
+```
+
+Which we want to decode into the following case class:
+
+```scala mdoc
+final case class Quote(
+ symbol: String,
+ open: String,
+ high: String
+)
+```
+
+All approaches have the same result:
+
+```scala mdoc:fail
+"""{"01. symbol":"IBM","02. open": "182.4300","03. high": "182.8000"}""".fromJson[Quote]
+// >> Right(Quote(IBM,182.4300,182.8000))
+```
+
+## Approach 1: use annotation hints
+
+In this approach we enrich the case class with annotations to tell the derived decoder which field names to use.
+Obviously, this approach only works if we can/want to change the case class.
+
+```scala mdoc:reset
+import zio.json._
+
+final case class Quote(
+ @jsonField("01. symbol") symbol: String,
+ @jsonField("02. open") open: String,
+ @jsonField("03. high") high: String
+)
+
+object Quote {
+ implicit val decoder: JsonDecoder[Quote] = DeriveJsonDecoder.gen[Quote]
+}
+```
+
+## Approach 2: use an intermediate case class
+
+Instead of hints, we can also put the actual field names in an intermediate case class. In our example the field names
+are not valid scala identifiers. We fix this by putting the names in backticks:
+
+```scala mdoc:reset
+import zio.json._
+
+final case class Quote(symbol: String, open: String, high: String)
+
+object Quote {
+ private final case class JsonQuote(
+ `01. symbol`: String,
+ `02. open`: String,
+ `03. high`: String
+ )
+
+ implicit val decoder: JsonDecoder[Quote] =
+ DeriveJsonDecoder
+ .gen[JsonQuote]
+ .map { case JsonQuote(s, o, h) => Quote(s, o, h) }
+}
+```
+
+## Approach 3: decode to JSON
+
+In this approach we first decode to the generic `Json` data structure. This approach is very flexible because it can
+extract data from any valid JSON.
+
+Note that this implementation is a bit sloppy. It uses `toString` on a JSON node. The node is not necessarily a
+String, it can be of any JSON type! So this might happily process JSON that doesn't match your expectations.
+
+```scala mdoc:reset
+import zio.json._
+import zio.json.ast.Json
+
+final case class Quote(symbol: String, open: String, high: String)
+
+object Quote {
+ implicit val decoder: JsonDecoder[Quote] = JsonDecoder[Json]
+ .mapOrFail {
+ case Json.Obj(fields) =>
+ def findField(name: String): Either[String, String] =
+ fields
+ .find(_._1 == name)
+ .map(_._2.toString()) // ⚠️ .toString on any JSON type
+ .toRight(left = s"Field '$name' is missing")
+
+ for {
+ symbol <- findField("01. symbol")
+ open <- findField("02. open")
+ high <- findField("03. high")
+ } yield Quote(symbol, open, high)
+ case _ =>
+ Left("Not a JSON record")
+ }
+}
+```
+
+## Approach 4: decode to JSON, use cursors
+
+Here we also first decode to `Json`, but now we use cursors to find the data we need. Here we do check that the fields
+are actually strings.
+
+```scala mdoc:reset
+import zio.json._
+import zio.json.ast.{Json, JsonCursor}
+
+final case class Quote(symbol: String, open: String, high: String)
+
+object Quote {
+ private val symbolC = JsonCursor.field("01. symbol") >>> JsonCursor.isString
+ private val openC = JsonCursor.field("02. open") >>> JsonCursor.isString
+ private val highC = JsonCursor.field("03. high") >>> JsonCursor.isString
+
+ implicit val decoder: JsonDecoder[Quote] = JsonDecoder[Json]
+ .mapOrFail { c =>
+ for {
+ symbol <- c.get(symbolC)
+ open <- c.get(openC)
+ high <- c.get(highC)
+ } yield Quote(symbol.value, open.value, high.value)
+ }
+}
+```
+
+# More custom decoder examples
+
+Let's consider an `Animal` case class with a `categories` field that should be a list of strings. However, some
+producers accidentally represent the categories as a comma-separated string instead of a proper list. We want to parse
+both cases.
+
+Here's a custom decode for our Animal case class:
+
+```scala mdoc:reset
+import zio.Chunk
+import zio.json._
+import zio.json.ast._
+
+case class Animal(name: String, categories: List[String])
+
+object Animal {
+ private val nameC = JsonCursor.field("name") >>> JsonCursor.isString
+ private val categoryArrayC = JsonCursor.field("categories") >>> JsonCursor.isArray
+ private val categoryStringC = JsonCursor.field("categories") >>> JsonCursor.isString
+
+ implicit val decoder: JsonDecoder[Animal] = JsonDecoder[Json]
+ .mapOrFail { c =>
+ for {
+ name <- c.get(nameC).map(_.value)
+ categories <- arrayCategory(c).map(_.toList)
+ .orElse(c.get(categoryStringC).map(_.value.split(',').map(_.trim).toList))
+ } yield Animal(name, categories)
+ }
+
+ private def arrayCategory(c: Json): Either[String, Chunk[String]] =
+ c.get(categoryArrayC)
+ .flatMap { arr =>
+ // Get the string elements, and sequence the obtained eithers to a single either
+ sequence(arr.elements.map(_.get(JsonCursor.isString).map(_.value)))
+ }
+
+ private def sequence[A, B](chunk: Chunk[Either[A, B]]): Either[A, Chunk[B]] =
+ chunk.partition(_.isLeft) match {
+ case (Nil, rights) => Right(rights.collect { case Right(r) => r })
+ case (lefts, _) => Left(lefts.collect { case Left(l) => l }.head)
+ }
+}
+```
+
+And now, the Json decoder for Animal can handle both formats:
+```scala mdoc
+"""{"name": "Dog", "categories": "Warm-blooded, Mammal"}""".fromJson[Animal]
+// >> Right(Animal(Dog,List(Warm-blooded, Mammal)))
+"""{"name": "Snake", "categories": [ "Cold-blooded", "Reptile"]}""".fromJson[Animal]
+// >> Right(Animal(Snake,List(Cold-blooded, Reptile)))
+```
+
+# JSON AST and Cursors
+
+In most cases it is not necessary to work with the JSON AST directly,
+instead it is more convenient to decode directly to domain objects.
+However, sometimes it is handy to work with a lower level representation of JSON.
+This may for example be the case when you need to work with deeply nested JSON structures
+that would result in deeply nested case classes,
+or when you expect a lot of variation in the JSON structure, which would result in nasty decoders.
+
+
+## JSON AST
+
+To get the AST representation of a JSON string, use the `fromJson[Json]` method.
+
+```scala mdoc
+import zio.json._
+import zio.json.ast._
+
+val jsonString: String = """{"name": "John Doe"}"""
+val jsonAst: Either[String, Json] = jsonString.fromJson[Json]
+```
+
+The `Json` type is a recursive data structure that can be navigated in a fairly straightforward way.
+
+```scala mdoc:reset
+
+import zio.Chunk
+import zio.json._
+import zio.json.ast.Json
+import zio.json.ast.Json._
+
+val jsonString: String = """{"name": "John Doe"}"""
+val jsonAst: Json = jsonString.fromJson[Json].toOption.get
+jsonAst match {
+ case Obj(fields: Chunk[(String, Json)]) => ()
+ case Arr(elements: Chunk[Json]) => ()
+ case Bool(value: Boolean) => ()
+ case Str(value: String) => ()
+ case Num(value: java.math.BigDecimal) => ()
+ case Json.Null => ()
+}
+```
+
+To get the `name` field, you could do the following:
+
+```scala mdoc
+import zio.json._
+import zio.json.ast.Json
+
+val json: Option[Json] = """{"name": "John Doe"}""".fromJson[Json].toOption
+val name: Option[String] = json.flatMap { json =>
+ json match {
+ case Json.Obj(fields) => fields.collectFirst { case ("name", Json.Str(name)) => name }
+ case _ => None
+ }
+}
+```
+
+## Cursors
+
+In practice, it is normally more convenient to use cursors to navigate the JSON AST.
+
+```scala mdoc:reset
+import zio.json._
+import zio.json.ast.Json
+import zio.json.ast.JsonCursor
+import zio.json.ast.Json.Str
+
+val json: Either[String, Json] = """{"name": "John Doe"}""".fromJson[Json]
+val cursor: JsonCursor[Json, Str] = JsonCursor.field("name").isString
+val name: Either[String, String] = json.flatMap(_.get(cursor).map(_.value))
+```
+
+Cursors can be composed to navigate more complex JSON structures.
+
+```scala mdoc
+import zio.json._
+import zio.json.ast.Json
+import zio.json.ast.JsonCursor
+
+val json1: Either[String, Json] = """{"posts": [{"id": 0, "title": "foo"}]}""".fromJson[Json]
+val json2: Either[String, Json] = """{"userPosts": [{"id": 1, "title": "bar"}]}""".fromJson[Json]
+
+val commonCursor =
+ JsonCursor.isArray >>>
+ JsonCursor.element(0) >>>
+ JsonCursor.isObject >>>
+ JsonCursor.field("title") >>>
+ JsonCursor.isString
+
+val cursor1 = JsonCursor.field("posts")
+val cursor2 = JsonCursor.field("userPosts")
+
+def getTitle(json: Either[String, Json]) =
+ for {
+ ast <- json
+ posts <- ast.get(cursor1).orElse(ast.get(cursor2))
+ title <- posts.get(commonCursor).map(_.value)
+ } yield title
+
+val title1 = getTitle(json1)
+val title2 = getTitle(json2)
+```
diff --git a/docs/encoding.md b/docs/encoding.md
index b43123d80..6c9f92d4a 100644
--- a/docs/encoding.md
+++ b/docs/encoding.md
@@ -55,6 +55,23 @@ apple.toJson
Almost all of the standard library data types are supported as fields on the case class, and it is easy to add support if one is missing.
+### Sealed families and enums for Scala 3
+Sealed families where all members are only objects, or a Scala 3 enum with all cases parameterless are interpreted as enumerations and will encode 1:1 with their value-names.
+```scala
+enum Foo derives JsonEncoder:
+ case Bar
+ case Baz
+ case Qux
+```
+or
+```scala
+sealed trait Foo derives JsonEncoder
+object Foo:
+ case object Bar extends Foo
+ case object Baz extends Foo
+ case object Qux extends Foo
+```
+
## Manual instances
Sometimes it is easier to reuse an existing `JsonEncoder` rather than generate a new one. This can be accomplished using convenience methods on the `JsonEncoder` typeclass to *derive* new decoders:
diff --git a/docs/index.md b/docs/index.mdx
similarity index 66%
rename from docs/index.md
rename to docs/index.mdx
index 205c70050..45d54619d 100644
--- a/docs/index.md
+++ b/docs/index.mdx
@@ -4,6 +4,9 @@ title: "Getting Started with ZIO Json"
sidebar_label: "Getting Started"
---
+import Tabs from '@theme/Tabs';
+import TabItem from '@theme/TabItem';
+
[ZIO Json](https://github.com/zio/zio-json) is a fast and secure JSON library with tight ZIO integration.
@PROJECT_BADGES@
@@ -28,16 +31,37 @@ In order to use this library, we need to add the following line in our `build.sb
libraryDependencies += "dev.zio" %% "zio-json" % "@VERSION@"
```
+For cross-platform projects with Scala.js and Scala Native need to replace `%%` operator by `%%%`,
+and optionally when using `java.time.ZoneId` and `java.time.ZonedDateTime` types need to add
+the dependency on the latest version of Timezone DB:
+
+```scala
+libraryDependencies += "io.github.cquiroz" %%% "scala-java-time-tzdb" % "latest.integration"
+```
+
## Example
Let's try a simple example of encoding and decoding JSON using ZIO JSON.
All the following code snippets assume that the following imports have been declared
+
+
+
```scala
import zio.json._
```
+
+
+
+```scala
+import zio.json.*
+```
+
+
+
+
Say we want to be able to read some JSON like
```json
@@ -47,9 +71,12 @@ Say we want to be able to read some JSON like
into a Scala `case class`
```scala
-case class Banana(curvature: Double)
+final case class Banana(curvature: Double)
```
+
+
+
To do this, we create an *instance* of the `JsonDecoder` typeclass for `Banana` using the `zio-json` code generator. It is best practice to put it on the companion of `Banana`, like so
```scala
@@ -58,7 +85,19 @@ object Banana {
}
```
-_Note: If you’re using Scala 3 and your case class is defining default parameters, `-Yretain-trees` needs to be added to `scalacOptions`._
+
+
+
+To do this, we derive an *instance* of the `JsonDecoder` typeclass for `Banana`.
+
+```scala
+final case class Banana(curvature: Double) derives JsonDecoder
+```
+
+Note: If your case class is defining default parameters, -Yretain-trees needs to be added to scalacOptions.
+
+
+
Now we can parse JSON into our object
@@ -67,14 +106,29 @@ scala> """{"curvature":0.5}""".fromJson[Banana]
val res: Either[String, Banana] = Right(Banana(0.5))
```
-Likewise, to produce JSON from our data we define a `JsonEncoder`
+Likewise, to produce JSON from our data we derive a `JsonEncoder`
+
+
+
```scala
object Banana {
...
implicit val encoder: JsonEncoder[Banana] = DeriveJsonEncoder.gen[Banana]
}
+```
+
+
+
+```scala
+final case class Banana(curvature: Double) derives JsonEncoder
+```
+
+
+
+
+```
scala> Banana(0.5).toJson
val res: String = {"curvature":0.5}
@@ -89,19 +143,38 @@ And bad JSON will produce an error in `jq` syntax with an additional piece of co
```
scala> """{"curvature": womp}""".fromJson[Banana]
-val res: Either[String, Banana] = Left(.curvature(expected a number, got w))
+val res: Either[String, Banana] = Left(.curvature(expected a Double))
```
Say we extend our data model to include more data types
+
+
+
```scala
sealed trait Fruit
-case class Banana(curvature: Double) extends Fruit
-case class Apple (poison: Boolean) extends Fruit
+final case class Banana(curvature: Double) extends Fruit
+final case class Apple (poison: Boolean) extends Fruit
+```
+
+
+
+
+```scala
+enum Fruit {
+ case Banana(curvature: Double)
+ case Apple (poison: Boolean)
+}
```
+
+
+
we can generate the encoder and decoder for the entire `sealed` family
+
+
+
```scala
object Fruit {
implicit val decoder: JsonDecoder[Fruit] = DeriveJsonDecoder.gen[Fruit]
@@ -109,6 +182,19 @@ object Fruit {
}
```
+
+
+
+```scala
+enum Fruit derives JsonCodec {
+ case Banana(curvature: Double)
+ case Apple (poison: Boolean)
+}
+```
+
+
+
+
allowing us to load the fruit based on a single field type tag in the JSON
```
@@ -121,6 +207,9 @@ val res: Either[String, Fruit] = Right(Apple(false))
Almost all of the standard library data types are supported as fields on the case class, and it is easy to add support if one is missing.
+
+
+
```scala mdoc:compile-only
import zio.json._
@@ -147,6 +236,33 @@ malformedJson.fromJson[Fruit]
List(Apple(false), Banana(0.4)).toJsonPretty
```
+
+
+
+```scala
+import zio.json.*
+
+enum Fruit derives JsonCodec {
+ case Banana(curvature: Double)
+ case Apple(poison: Boolean)
+}
+
+export Fruit.*
+
+val json1 = """{ "Banana":{ "curvature":0.5 }}"""
+val json2 = """{ "Apple": { "poison": false }}"""
+val malformedJson = """{ "Banana":{ "curvature": true }}"""
+
+json1.fromJson[Fruit]
+json2.fromJson[Fruit]
+malformedJson.fromJson[Fruit]
+
+List(Apple(false), Banana(0.4)).toJsonPretty
+```
+
+
+
+
# How
Extreme **performance** is achieved by decoding JSON directly from the input source into business objects (inspired by [plokhotnyuk](https://github.com/plokhotnyuk/jsoniter-scala)). Although not a requirement, the latest advances in [Java Loom](https://wiki.openjdk.java.net/display/loom/Main) can be used to support arbitrarily large payloads with near-zero overhead.
diff --git a/docs/security.md b/docs/security.md
index 194b5cfda..051b6b342 100644
--- a/docs/security.md
+++ b/docs/security.md
@@ -94,4 +94,4 @@ circe 4529 ( 7456) 2037 (1533)
This attack is very effective in schemas with lots of numbers, causing ops/sec to be halved with a 33% increase in memory usage.
-`zio-json` is resistant to a wide range of number based attacks because it uses a from-scratch number parser that will exit early when the number of bits of any number exceeds 128 bits, which can be customized by the system property `zio.json.number.bits`.
+`zio-json` is resistant to a wide range of number based attacks because it uses a from-scratch number parser that will exit early when the number of bits of any number exceeds 256 bits.
diff --git a/examples/interop-http4s/build.sbt b/examples/interop-http4s/build.sbt
index 3287077a3..2cc159389 100644
--- a/examples/interop-http4s/build.sbt
+++ b/examples/interop-http4s/build.sbt
@@ -3,8 +3,8 @@ val ZioJsonVersion = "0.1.3+8-6eb41b5a-SNAPSHOT"
lazy val zioJsonHttp4sExample = (project in file("."))
.settings(
- name := "zio-json-http4s-example",
- version := "1.0",
+ name := "zio-json-http4s-example",
+ version := "1.0",
scalaVersion := "2.13.5",
scalacOptions ++= Seq("-Xlint:_"),
// Only required when using a zio-json snapshot
diff --git a/examples/zio-json-golden/build.sbt b/examples/zio-json-golden/build.sbt
new file mode 100644
index 000000000..7a057ec43
--- /dev/null
+++ b/examples/zio-json-golden/build.sbt
@@ -0,0 +1,2 @@
+scalaVersion := "2.13.17"
+libraryDependencies += "dev.zio" %% "zio-json-golden" % "0.7.8"
diff --git a/examples/zio-json-golden/project/build.properties b/examples/zio-json-golden/project/build.properties
new file mode 100644
index 000000000..01a16ed14
--- /dev/null
+++ b/examples/zio-json-golden/project/build.properties
@@ -0,0 +1 @@
+sbt.version=1.11.7
diff --git a/examples/zio-json-golden/src/test/scala/EncodeDecodeSpec.scala b/examples/zio-json-golden/src/test/scala/EncodeDecodeSpec.scala
new file mode 100644
index 000000000..06bf26683
--- /dev/null
+++ b/examples/zio-json-golden/src/test/scala/EncodeDecodeSpec.scala
@@ -0,0 +1,15 @@
+import zio.json._
+import zio.json.golden._
+import zio.test._
+import zio.test.magnolia.DeriveGen
+
+object EncodeDecodeSpec extends ZIOSpecDefault {
+ case class Banana(curvature: Double)
+ object Banana {
+ implicit val codec: JsonCodec[Banana] = DeriveJsonCodec.gen[Banana]
+ }
+
+ def spec = suite("EncodeDecodeSpec")(
+ goldenTest(DeriveGen[Banana])
+ )
+}
diff --git a/project/BuildHelper.scala b/project/BuildHelper.scala
index 566e0df30..4e5d40d3b 100644
--- a/project/BuildHelper.scala
+++ b/project/BuildHelper.scala
@@ -1,28 +1,23 @@
-import explicitdeps.ExplicitDepsPlugin.autoImport._
-import org.portablescala.sbtplatformdeps.PlatformDepsPlugin.autoImport._
-import sbt.Keys._
-import sbt._
-import sbtbuildinfo.BuildInfoKeys._
-import sbtbuildinfo._
-import sbtcrossproject.CrossPlugin.autoImport._
+import com.typesafe.tools.mima.core.Problem
+import com.typesafe.tools.mima.core.ProblemFilters.exclude
+import com.typesafe.tools.mima.plugin.MimaKeys.{ mimaBinaryIssueFilters, mimaFailOnProblem, mimaPreviousArtifacts }
+import com.typesafe.tools.mima.plugin.MimaPlugin.autoImport.mimaCheckDirection
+import explicitdeps.ExplicitDepsPlugin.autoImport.*
+import org.portablescala.sbtplatformdeps.PlatformDepsPlugin.autoImport.*
+import sbt.*
+import sbt.Keys.*
+import sbtbuildinfo.*
+import sbtbuildinfo.BuildInfoKeys.*
+import sbtcrossproject.CrossPlugin.autoImport.*
+import sbtdynver.DynVerPlugin.autoImport.previousStableVersion
+import scalafix.sbt.ScalafixPlugin.autoImport.scalafixSemanticdb
-object BuildHelper {
- private val versions: Map[String, String] = {
- import org.snakeyaml.engine.v2.api.{ Load, LoadSettings }
- import java.util.{ List => JList, Map => JMap }
- import scala.jdk.CollectionConverters._
-
- val doc = new Load(LoadSettings.builder().build())
- .loadFromReader(scala.io.Source.fromFile(".github/workflows/ci.yml").bufferedReader())
- val yaml = doc.asInstanceOf[JMap[String, JMap[String, JMap[String, JMap[String, JMap[String, JList[String]]]]]]]
- val list = yaml.get("jobs").get("test").get("strategy").get("matrix").get("scala").asScala
- list.map(v => (v.split('.').take(2).mkString("."), v)).toMap
- }
- val Scala212: String = versions("2.12")
- val Scala213: String = versions("2.13")
- val ScalaDotty: String = "3.3.0"
+import scala.scalanative.sbtplugin.ScalaNativePlugin.autoImport.*
- val SilencerVersion = "1.7.13"
+object BuildHelper {
+ val Scala212: String = "2.12.20"
+ val Scala213: String = "2.13.17"
+ val Scala3: String = "3.3.7"
private val stdOptions = Seq(
"-deprecation",
@@ -32,7 +27,8 @@ object BuildHelper {
"-unchecked"
) ++ {
if (sys.env.contains("CI")) {
- Seq("-Xfatal-warnings")
+ // Seq("-Xfatal-warnings") // enable this when we are ready to enforce this
+ Nil
} else {
Nil // to enable Scalafix locally
}
@@ -58,27 +54,27 @@ object BuildHelper {
def buildInfoSettings(packageName: String) =
Seq(
- buildInfoKeys := Seq[BuildInfoKey](organization, moduleName, name, version, scalaVersion, sbtVersion, isSnapshot),
+ buildInfoKeys := Seq[BuildInfoKey](organization, moduleName, name, version, scalaVersion, sbtVersion, isSnapshot),
buildInfoPackage := packageName
)
val dottySettings = Seq(
- crossScalaVersions += ScalaDotty,
+ crossScalaVersions += Scala3,
scalacOptions ++= {
- if (scalaVersion.value == ScalaDotty)
+ if (scalaVersion.value == Scala3)
Seq("-noindent")
else
Seq()
},
scalacOptions --= {
- if (scalaVersion.value == ScalaDotty)
+ if (scalaVersion.value == Scala3)
Seq("-Xfatal-warnings")
else
Seq()
},
Compile / doc / sources := {
val old = (Compile / doc / sources).value
- if (scalaVersion.value == ScalaDotty) {
+ if (scalaVersion.value == Scala3) {
Nil
} else {
old
@@ -86,7 +82,7 @@ object BuildHelper {
},
Test / parallelExecution := {
val old = (Test / parallelExecution).value
- if (scalaVersion.value == ScalaDotty) {
+ if (scalaVersion.value == Scala3) {
false
} else {
old
@@ -94,10 +90,6 @@ object BuildHelper {
}
)
- val scalaReflectSettings = Seq(
- libraryDependencies ++= Seq("dev.zio" %%% "izumi-reflect" % "1.0.0-M10")
- )
-
// Keep this consistent with the version in .core-tests/shared/src/test/scala/REPLSpec.scala
val replSettings = makeReplSettings {
"""|import zio._
@@ -139,14 +131,15 @@ object BuildHelper {
def extraOptions(scalaVersion: String, optimize: Boolean) =
CrossVersion.partialVersion(scalaVersion) match {
- case Some((3, 1)) =>
+ case Some((3, _)) =>
Seq(
"-language:implicitConversions",
"-Xignore-scala2-macros"
)
case Some((2, 13)) =>
Seq(
- "-Ywarn-unused:params,-implicits"
+ "-Ywarn-unused:params,-implicits",
+ "-Wconf:msg=Boolean literals should be passed:s"
) ++ std2xOptions ++ optimizerOptions(optimize)
case Some((2, 12)) =>
Seq(
@@ -210,29 +203,34 @@ object BuildHelper {
)
def stdSettings(prjName: String) = Seq(
- name := s"$prjName",
- crossScalaVersions := Seq(Scala212, Scala213, ScalaDotty),
- ThisBuild / scalaVersion := Scala213,
+ name := s"$prjName",
+ crossScalaVersions := Seq(Scala212, Scala213, Scala3),
scalacOptions ++= stdOptions ++ extraOptions(scalaVersion.value, optimize = !isSnapshot.value),
libraryDependencies ++= {
- if (scalaVersion.value == ScalaDotty)
- Seq(
- "com.github.ghik" % s"silencer-lib_$Scala213" % SilencerVersion % Provided
- )
+ if (scalaVersion.value == Scala3) Seq.empty
else
Seq(
- "com.github.ghik" % "silencer-lib" % SilencerVersion % Provided cross CrossVersion.full,
- compilerPlugin("com.github.ghik" % "silencer-plugin" % SilencerVersion cross CrossVersion.full),
- compilerPlugin("org.typelevel" %% "kind-projector" % "0.13.2" cross CrossVersion.full)
+ compilerPlugin("org.typelevel" %% "kind-projector" % "0.13.4" cross CrossVersion.full)
)
},
- semanticdbEnabled := scalaVersion.value != ScalaDotty, // enable SemanticDB
- semanticdbOptions += "-P:semanticdb:synthetics:on",
- semanticdbVersion := "4.8.7",
+ versionScheme := Some("early-semver"),
+ semanticdbEnabled := scalaVersion.value == Scala213, // enable SemanticDB
+ semanticdbOptions ++= (CrossVersion.partialVersion(scalaVersion.value) match {
+ case Some((2, _)) => Seq("-P:semanticdb:synthetics:on")
+ case _ => Seq.empty
+ }),
+ semanticdbVersion := scalafixSemanticdb.revision, // use Scalafix compatible version,
Test / parallelExecution := true,
incOptions ~= (_.withLogRecompileOnMacro(false)),
autoAPIMappings := true,
- unusedCompileDependenciesFilter -= moduleFilter("org.scala-js", "scalajs-library")
+ unusedCompileDependenciesFilter -= moduleFilter("org.scala-js", "scalajs-library"),
+ mimaPreviousArtifacts := previousStableVersion.value.map(organization.value %% name.value % _).toSet,
+ mimaCheckDirection := "backward", // TODO: find how we can use "both" for patch versions of 1.x releases
+ mimaBinaryIssueFilters ++= Seq(
+ exclude[Problem]("zio.json.internal.*"),
+ exclude[Problem]("zio.json.yaml.internal.*")
+ ),
+ mimaFailOnProblem := true
)
def macroExpansionSettings = Seq(
@@ -254,7 +252,7 @@ object BuildHelper {
def macroDefinitionSettings = Seq(
scalacOptions += "-language:experimental.macros",
libraryDependencies ++= {
- if (scalaVersion.value == ScalaDotty) Seq()
+ if (scalaVersion.value == Scala3) Seq()
else
Seq(
"org.scala-lang" % "scala-reflect" % scalaVersion.value % "provided",
@@ -263,20 +261,24 @@ object BuildHelper {
}
)
- def jsSettings = Seq(
- libraryDependencies += "io.github.cquiroz" %%% "scala-java-time" % "2.2.2",
- libraryDependencies += "io.github.cquiroz" %%% "scala-java-time-tzdb" % "2.2.2"
- )
+ val scalaJavaTimeVersion = "2.6.0"
def nativeSettings = Seq(
- Test / skip := true,
- doc / skip := true,
- Compile / doc / sources := Seq.empty
+ nativeConfig ~= { cfg =>
+ import scala.scalanative.build.Mode
+
+ val os = System.getProperty("os.name").toLowerCase
+ // For some unknown reason, we can't run the test suites in debug mode on MacOS
+ if (os.contains("mac")) cfg.withMode(Mode.releaseFast)
+ else cfg
+ },
+ scalacOptions += "-P:scalanative:genStaticForwardersForNonTopLevelObjects",
+ Test / fork := false
)
val scalaReflectTestSettings: List[Setting[_]] = List(
libraryDependencies ++= {
- if (scalaVersion.value == ScalaDotty)
+ if (scalaVersion.value == Scala3)
Seq("org.scala-lang" % "scala-reflect" % Scala213 % Test)
else
Seq("org.scala-lang" % "scala-reflect" % scalaVersion.value % Test)
@@ -284,12 +286,10 @@ object BuildHelper {
)
def welcomeMessage = onLoadMessage := {
- import scala.Console
-
- def header(text: String): String = s"${Console.RED}$text${Console.RESET}"
+ def header(text: String): String = s"${scala.Console.RED}$text${scala.Console.RESET}"
- def item(text: String): String = s"${Console.GREEN}> ${Console.CYAN}$text${Console.RESET}"
- def subItem(text: String): String = s" ${Console.YELLOW}> ${Console.CYAN}$text${Console.RESET}"
+ def item(text: String): String = s"${scala.Console.GREEN}> ${scala.Console.CYAN}$text${scala.Console.RESET}"
+ def subItem(text: String): String = s" ${scala.Console.YELLOW}> ${scala.Console.CYAN}$text${scala.Console.RESET}"
s"""|${header(" ________ ___")}
|${header("|__ /_ _/ _ \\")}
diff --git a/project/NeoJmhPlugin.scala b/project/NeoJmhPlugin.scala
index 05a51e66e..45f4571d1 100644
--- a/project/NeoJmhPlugin.scala
+++ b/project/NeoJmhPlugin.scala
@@ -1,9 +1,8 @@
package fommil
-import sbt._
-import sbt.Keys._
+import sbt.*
+import sbt.Keys.*
-import scala.collection.immutable.Set
import scala.util.Try
object NeoJmhKeys {
@@ -27,11 +26,10 @@ object NeoJmhKeys {
}
/**
- * https://github.com/ktoso/sbt-jmh/ rewritten as an idiomatic sbt
- * Configuration (not requiring a separate Project).
+ * https://github.com/ktoso/sbt-jmh/ rewritten as an idiomatic sbt Configuration (not requiring a separate Project).
*/
object NeoJmhPlugin extends AutoPlugin {
- import NeoJmhKeys._
+ import NeoJmhKeys.*
val autoImport = NeoJmhKeys
val JmhInternal = (config("jmh-internal") extend Test).hide
@@ -45,16 +43,16 @@ object NeoJmhPlugin extends AutoPlugin {
override def projectConfigurations = Seq(Jmh, JmhInternal)
override def buildSettings = Seq(
- jmhVersion := "1.36",
+ jmhVersion := "1.37",
jmhExtrasVersion := "0.3.7"
)
override def projectSettings =
inConfig(Jmh)(
Defaults.testSettings ++ Seq(
- run := (run in JmhInternal).evaluated,
+ run := (JmhInternal / run).evaluated,
neoJmhGenerator := "reflection",
- neoJmhYourkit := Nil,
+ neoJmhYourkit := Nil,
javaOptions ++= Seq(
"-XX:+PerfDisableSharedMem",
"-XX:+AlwaysPreTouch",
@@ -71,11 +69,11 @@ object NeoJmhPlugin extends AutoPlugin {
)
) ++ inConfig(JmhInternal)(
Defaults.testSettings ++ Seq(
- javaOptions := (javaOptions in Jmh).value,
- envVars := (envVars in Jmh).value,
- mainClass in run := Some("org.openjdk.jmh.Main"),
- fork in run := true,
- dependencyClasspath ++= (fullClasspath in Jmh).value,
+ javaOptions := (Jmh / javaOptions).value,
+ envVars := (Jmh / envVars).value,
+ run / mainClass := Some("org.openjdk.jmh.Main"),
+ run / fork := true,
+ dependencyClasspath ++= (Jmh / fullClasspath).value,
sourceGenerators += generateJmhSourcesAndResources.map { case (sources, _) =>
sources
},
@@ -106,26 +104,26 @@ object NeoJmhPlugin extends AutoPlugin {
def backCompatProjectSettings: Seq[Setting[_]] = Seq(
// WORKAROUND https://github.com/sbt/sbt/issues/3935
- dependencyClasspathAsJars in NeoJmhPlugin.JmhInternal ++= (fullClasspathAsJars in NeoJmhKeys.Jmh).value
+ NeoJmhPlugin.JmhInternal / dependencyClasspathAsJars ++= (NeoJmhKeys.Jmh / fullClasspathAsJars).value
)
def generateBenchmarkSourcesAndResources: Def.Initialize[Task[(Seq[File], Seq[File])]] = Def.task {
val s = streams.value
val cacheDir = crossTarget.value / "jmh-cache"
- val bytecodeDir = (classDirectory in Jmh).value
+ val bytecodeDir = (Jmh / classDirectory).value
val sourceDir = sourceManaged.value
val resourceDir = resourceManaged.value
- val generator = (neoJmhGenerator in Jmh).value
+ val generator = (Jmh / neoJmhGenerator).value
val classpath = dependencyClasspath.value
- val javaHomeV = (javaHome in Jmh).value
- val outputStrategyV = (outputStrategy in Jmh).value
- val workingDirectory = Option((baseDirectory in Jmh).value)
- val connectInputV = (connectInput in Jmh).value
- val envVarsV = (envVars in Jmh).value
- val javaFlags = (javaOptions in Jmh).value.toVector
+ val javaHomeV = (Jmh / javaHome).value
+ val outputStrategyV = (Jmh / outputStrategy).value
+ val workingDirectory = Option((Jmh / baseDirectory).value)
+ val connectInputV = (Jmh / connectInput).value
+ val envVarsV = (Jmh / envVars).value
+ val javaFlags = (Jmh / javaOptions).value.toVector
val inputs: Set[File] = (bytecodeDir ** "*").filter(_.isFile).get.toSet
- val cachedGeneration = FileFunction.cached(cacheDir, FilesInfo.hash) { _ =>
+ val cachedGeneration = FileFunction.cached(cacheDir, FilesInfo.hash) { _ =>
IO.delete(sourceDir)
IO.createDirectory(sourceDir)
IO.delete(resourceDir)
diff --git a/project/build.properties b/project/build.properties
index 304098715..a360ccac1 100644
--- a/project/build.properties
+++ b/project/build.properties
@@ -1 +1 @@
-sbt.version=1.9.4
+sbt.version = 1.11.7
diff --git a/project/plugins.sbt b/project/plugins.sbt
index a12b7bd90..d359af9f5 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -1,15 +1,17 @@
-addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0")
-addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.11")
+addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.6.4")
+addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.13.1")
+addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.11.2")
addSbtPlugin("com.github.cb372" % "sbt-explicit-dependencies" % "0.3.1")
-addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.1")
-addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.10.0")
-addSbtPlugin("org.portable-scala" % "sbt-scala-native-crossproject" % "1.3.1")
-addSbtPlugin("org.portable-scala" % "sbt-scalajs-crossproject" % "1.3.1")
-addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.13.2")
-addSbtPlugin("org.scala-native" % "sbt-scala-native" % "0.4.14")
-addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.0")
+addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.4")
+addSbtPlugin("com.github.sbt" % "sbt-header" % "5.11.0")
+addSbtPlugin("org.portable-scala" % "sbt-scala-native-crossproject" % "1.3.2")
+addSbtPlugin("org.portable-scala" % "sbt-scalajs-crossproject" % "1.3.2")
+addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.20.1")
+addSbtPlugin("org.scala-native" % "sbt-scala-native" % "0.5.9")
+addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.4")
addSbtPlugin("pl.project13.scala" % "sbt-jcstress" % "0.2.0")
-addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.7")
-addSbtPlugin("dev.zio" % "zio-sbt-website" % "0.3.10")
+addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.4.0")
+addSbtPlugin("dev.zio" % "zio-sbt-website" % "0.4.0-alpha.35")
+addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.14.4")
-libraryDependencies += "org.snakeyaml" % "snakeyaml-engine" % "2.7"
+libraryDependencies += "org.snakeyaml" % "snakeyaml-engine" % "2.10"
diff --git a/zio-json-golden/src/main/scala/zio/json/golden/filehelpers.scala b/zio-json-golden/src/main/scala/zio/json/golden/filehelpers.scala
index 21a57da4b..db677d410 100644
--- a/zio-json-golden/src/main/scala/zio/json/golden/filehelpers.scala
+++ b/zio-json-golden/src/main/scala/zio/json/golden/filehelpers.scala
@@ -1,12 +1,10 @@
package zio.json.golden
import java.io.{ File, IOException }
-import java.nio.file.{ Path }
+import java.nio.file.Path
import zio.{ test => _, _ }
import zio.json._
-import zio.stacktracer.TracingImplicits.disableAutoTrace
-
import java.nio.file.Files
object filehelpers {
@@ -16,9 +14,11 @@ object filehelpers {
else ZIO.attempt(file.getParentFile).flatMap(getRootDir)
def createGoldenDirectory(pathToDir: String)(implicit trace: Trace): Task[Path] = {
- val _ = disableAutoTrace // TODO: Find a way to suppress the unused import warning
- val rootFile = new File(getClass.getResource("/").toURI)
-
+ val rootFile =
+ try new File(getClass.getResource("/").toURI)
+ catch { // fixes "java.lang.IllegalArgumentException: URI is not hierarchical" in unit tests with Scala 2.12.20
+ case _: IllegalArgumentException => new File(getClass.getResource(".").toURI)
+ }
for {
baseFile <- getRootDir(rootFile)
goldenDir = new File(baseFile.getParentFile, pathToDir)
diff --git a/zio-json-golden/src/main/scala/zio/json/golden/package.scala b/zio-json-golden/src/main/scala/zio/json/golden/package.scala
index 8a8cbc4c3..63da153ca 100644
--- a/zio-json-golden/src/main/scala/zio/json/golden/package.scala
+++ b/zio-json-golden/src/main/scala/zio/json/golden/package.scala
@@ -1,7 +1,5 @@
package zio.json
-import scala.annotation.nowarn
-
import zio.Tag
import zio.{ test => _, _ }
import zio.json.golden.filehelpers._
@@ -27,7 +25,7 @@ package object golden {
else DiffResult.Different(x, y)
}
- @nowarn implicit private lazy val diff: Diff[GoldenSample] = (x: GoldenSample, y: GoldenSample) =>
+ implicit private lazy val diff: Diff[GoldenSample] = (x: GoldenSample, y: GoldenSample) =>
Diff[Json].diff(x.samples, y.samples)
def goldenTest[A: Tag: JsonEncoder](
@@ -44,7 +42,7 @@ package object golden {
resourceDir <- createGoldenDirectory(s"src/test/resources/golden/$relativePath")
fileName = Paths.get(s"$name.json")
filePath = resourceDir.resolve(fileName)
- assertion <- ZIO.ifZIO(ZIO.attemptBlocking(Files.exists(filePath)))(
+ assertion <- ZIO.ifZIO(ZIO.attemptBlocking(Files.exists(filePath)))(
validateTest(resourceDir, name, gen, sampleSize),
createNewTest(resourceDir, name, gen, sampleSize)
)
@@ -64,7 +62,7 @@ package object golden {
for {
currentSample <- readSampleFromFile(filePath)
sample <- generateSample(gen, sampleSize)
- assertion <- if (sample == currentSample) {
+ assertion <- if (sample == currentSample) {
ZIO.succeed(assertTrue(sample == currentSample))
} else {
val diffFileName = Paths.get(s"${name}_changed.json")
@@ -89,7 +87,7 @@ package object golden {
for {
sample <- generateSample(gen, sampleSize)
- _ <-
+ _ <-
ZIO
.ifZIO(ZIO.attemptBlocking(Files.exists(filePath)))(ZIO.unit, ZIO.attemptBlocking(Files.createFile(filePath)))
_ <- writeSampleToFile(filePath, sample)
@@ -97,21 +95,20 @@ package object golden {
} yield TestResult(assertion)
}
+ /**
+ * Implementation inspired by zio-test [[zio.test#check]]
+ */
private def generateSample[A: JsonEncoder](
gen: Gen[Sized, A],
sampleSize: Int
)(implicit trace: Trace): ZIO[Sized, Exception, GoldenSample] =
- Gen
- .listOfN(sampleSize)(gen)
- .sample
+ gen.sample.forever
.map(_.value)
- .map { elements =>
- val jsonElements = elements.map(_.toJsonAST).collect { case Right(a) => a }
- val jsonArray = new Json.Arr(Chunk.fromIterable(jsonElements))
- GoldenSample(jsonArray)
- }
- .runHead
- .someOrFailException
+ .map(_.toJsonAST)
+ .collectRight
+ .take(sampleSize.toLong)
+ .runCollect
+ .map(jsonElements => GoldenSample(new Json.Arr(jsonElements)))
private def getName[A](implicit tag: Tag[A]): String =
tag.tag.shortName
diff --git a/zio-json-golden/src/test/resources/golden/filteredgentype/FilteredGenType.json b/zio-json-golden/src/test/resources/golden/filteredgentype/FilteredGenType.json
new file mode 100644
index 000000000..f5eed5eb3
--- /dev/null
+++ b/zio-json-golden/src/test/resources/golden/filteredgentype/FilteredGenType.json
@@ -0,0 +1,304 @@
+{
+ "samples" : [
+ {
+ "a" : -2.316566882566080907224842077045326E+76
+ },
+ {
+ "a" : -4.569412983810212758054894223077703E+76
+ },
+ {
+ "a" : 5.228989702464284595177704658722007E+76
+ },
+ {
+ "a" : 2.548479930525632094981978203969745E+76
+ },
+ {
+ "a" : 1.374041878513211278247992702527512E+76
+ },
+ {
+ "a" : 1.366736473169616603716555534546644E+76
+ },
+ {
+ "a" : 3.911179114719388819861885552560843E+76
+ },
+ {
+ "a" : 1.944890681930929342312977988211045E+76
+ },
+ {
+ "a" : 4.465885331138848906986116797747655E+76
+ },
+ {
+ "a" : 3.006637389783933263445463075262541E+76
+ },
+ {
+ "a" : -5.751649455858367893815313696913694E+76
+ },
+ {
+ "a" : -1.784887552001718903374836951202540E+76
+ },
+ {
+ "a" : -4.910947695145483225976445529943760E+76
+ },
+ {
+ "a" : -1.457349100691995363983649450959469E+76
+ },
+ {
+ "a" : -2.173117328253411917053167250124104E+76
+ },
+ {
+ "a" : -3.816709640530119614187277451466508E+76
+ },
+ {
+ "a" : -3.324016269398039518561448095600154E+76
+ },
+ {
+ "a" : 2.133340242534658987737410780967371E+76
+ },
+ {
+ "a" : 2.899391869007164686595655383666939E+75
+ },
+ {
+ "a" : 1.289089776333850543053015914013214E+75
+ },
+ {
+ "a" : -3.245057840870445841628197448179636E+75
+ },
+ {
+ "a" : 4.164835378606767954973150783579233E+76
+ },
+ {
+ "a" : -3.770560172944845271964280390072137E+76
+ },
+ {
+ "a" : 5.112438700963875953568847807288529E+76
+ },
+ {
+ "a" : -4.522549082517651900179843809145666E+76
+ },
+ {
+ "a" : 4.412399638137604448664402635608744E+76
+ },
+ {
+ "a" : 1.514016982543592890366775343872403E+76
+ },
+ {
+ "a" : -8.440138198169034012113061607781643E+75
+ },
+ {
+ "a" : 8.692134402815438942477068350566858E+75
+ },
+ {
+ "a" : 1.784909366617606219860919821779445E+75
+ },
+ {
+ "a" : -3.207891411756439559673859312030194E+76
+ },
+ {
+ "a" : 4.617070445357631467547656360854253E+76
+ },
+ {
+ "a" : 3.901654475403585079437303428377294E+75
+ },
+ {
+ "a" : 1.809738335539952279801907689220288E+75
+ },
+ {
+ "a" : 1.867508850719084820991569833616372E+76
+ },
+ {
+ "a" : -4.487014217306435076482810842738905E+76
+ },
+ {
+ "a" : -3.483434481661576170222377673573347E+76
+ },
+ {
+ "a" : -3.867693189336099364999112808463955E+76
+ },
+ {
+ "a" : 3.097928175532344917894163172264977E+76
+ },
+ {
+ "a" : -5.057812661184695904653860723677742E+76
+ },
+ {
+ "a" : 4.968641799647727075789350301886275E+75
+ },
+ {
+ "a" : -1.496654796873222027485144992012494E+76
+ },
+ {
+ "a" : -2.496076752945529018769889843804460E+76
+ },
+ {
+ "a" : -3.067118696151703815163256919101265E+76
+ },
+ {
+ "a" : 3.412873919590638540319289335182708E+76
+ },
+ {
+ "a" : 1.480433219806823809604660703733397E+76
+ },
+ {
+ "a" : -6.339658483521043521286676489621022E+75
+ },
+ {
+ "a" : -4.114346648557266778860963307677252E+76
+ },
+ {
+ "a" : 8.097697147044163995162055017419827E+75
+ },
+ {
+ "a" : 2.214651974619448279825666523589333E+76
+ },
+ {
+ "a" : -3.574516354020052112075424495657422E+76
+ },
+ {
+ "a" : 1.104360909238067990458557751532528E+76
+ },
+ {
+ "a" : 1.399684847297342561050267725918531E+76
+ },
+ {
+ "a" : 2.502156861243295972203472905374441E+76
+ },
+ {
+ "a" : -1.604836447654289039403981953467744E+76
+ },
+ {
+ "a" : 1.658811782314510920715101305263320E+76
+ },
+ {
+ "a" : 5.139354255763948897207867902940651E+76
+ },
+ {
+ "a" : 3.575018689737287736972267117825426E+76
+ },
+ {
+ "a" : 5.210629430128216788313314937720483E+76
+ },
+ {
+ "a" : 2.554485802500375585530452513390781E+76
+ },
+ {
+ "a" : 5.272520867084473865402574088199686E+76
+ },
+ {
+ "a" : 2.947116531100159457188114248938062E+76
+ },
+ {
+ "a" : 5.606673728406112706411336596318572E+76
+ },
+ {
+ "a" : -2.738195022969698014068611960296754E+76
+ },
+ {
+ "a" : -4.988632907332175914225601107044307E+76
+ },
+ {
+ "a" : 4.333006311262914052378138242825969E+76
+ },
+ {
+ "a" : 1.811966148571775577237795257899742E+76
+ },
+ {
+ "a" : 3.532034121631590266281611826625215E+76
+ },
+ {
+ "a" : -3.030563670404741512818043914917342E+76
+ },
+ {
+ "a" : 3.296229785246419707095000587083274E+76
+ },
+ {
+ "a" : -2.181511118190992226401258245868472E+75
+ },
+ {
+ "a" : -5.248524012170844440886915873124918E+76
+ },
+ {
+ "a" : -2.710770101040726300947437430936575E+76
+ },
+ {
+ "a" : -1.943466455385574430218910331374480E+76
+ },
+ {
+ "a" : -1.374906724157606925294570143297134E+76
+ },
+ {
+ "a" : 4.043622418717921248146223721034532E+76
+ },
+ {
+ "a" : 2.647781320119073554660652286568971E+76
+ },
+ {
+ "a" : 4.785326654831060667212354593166055E+76
+ },
+ {
+ "a" : -2.393552079769908150471681607119720E+76
+ },
+ {
+ "a" : -5.137463583987553425511474469196396E+74
+ },
+ {
+ "a" : 1.067970096951755185859241750338926E+76
+ },
+ {
+ "a" : 4.462068429365855045664774983601338E+76
+ },
+ {
+ "a" : 4.632622188172519290610657636467222E+76
+ },
+ {
+ "a" : 2.461239593255391819307695081683695E+76
+ },
+ {
+ "a" : 1.729396380088878041781707741829649E+76
+ },
+ {
+ "a" : 4.211133948578506902750612745348381E+76
+ },
+ {
+ "a" : 4.588026434593706190135184314006573E+76
+ },
+ {
+ "a" : 1.463113899048768702370675156356965E+76
+ },
+ {
+ "a" : -6.211105372002631227089153408882691E+75
+ },
+ {
+ "a" : -1.067219487693268067286110477325699E+76
+ },
+ {
+ "a" : -3.186432400285580393171287587355880E+76
+ },
+ {
+ "a" : -1.698453401490616715439518694710733E+76
+ },
+ {
+ "a" : -4.746614822847365827886773917395646E+76
+ },
+ {
+ "a" : 5.405374454168044475426989745354982E+76
+ },
+ {
+ "a" : 3.493241979655892720402456230475866E+76
+ },
+ {
+ "a" : -4.923252250897307532574828650812151E+76
+ },
+ {
+ "a" : -4.011060043301704192745191895787696E+76
+ },
+ {
+ "a" : -5.689109292584424095652541096534457E+76
+ },
+ {
+ "a" : 1.636983807742317199740108642526352E+76
+ },
+ {
+ "a" : -2.083099272612476220093157442600918E+75
+ }
+ ]
+}
\ No newline at end of file
diff --git a/zio-json-golden/src/test/scala/zio/json/golden/GoldenSpec.scala b/zio-json-golden/src/test/scala/zio/json/golden/GoldenSpec.scala
index 42547fc16..4f609e433 100644
--- a/zio-json-golden/src/test/scala/zio/json/golden/GoldenSpec.scala
+++ b/zio-json-golden/src/test/scala/zio/json/golden/GoldenSpec.scala
@@ -1,19 +1,19 @@
package zio.json.golden
+import zio._
import zio.json._
-import zio.json.golden._
+import zio.test.TestAspect.exceptScala212
import zio.test._
import zio.test.magnolia.DeriveGen
-import zio._
object GoldenSpec extends ZIOSpecDefault {
sealed trait SumType
object SumType {
- case object Case1 extends SumType
- case object Case2 extends SumType
- case object Case3 extends SumType
+ case object Case1 extends SumType
+ case object Case2 extends SumType
+ case class Case3() extends SumType
implicit val jsonCodec: JsonCodec[SumType] = DeriveJsonCodec.gen
}
@@ -29,6 +29,25 @@ object GoldenSpec extends ZIOSpecDefault {
implicit val jsonCodec: JsonCodec[RecordType] = DeriveJsonCodec.gen
}
+ final case class FilteredGenType(a: java.math.BigDecimal)
+ object FilteredGenType {
+ implicit val jsonCodec: JsonCodec[FilteredGenType] = DeriveJsonCodec.gen
+
+ val anyFilteredGenType: Gen[Any, FilteredGenType] = {
+
+ /**
+ * Copied from zio-json/shared/src/test/scala/zio/json/Gens.scala
+ */
+ val genBigDecimal: Gen[Any, java.math.BigDecimal] =
+ Gen
+ .bigDecimal((BigDecimal(2).pow(256) - 1) * -1, BigDecimal(2).pow(256) - 1)
+ .map(_.bigDecimal)
+ .filter(_.toBigInteger.bitLength < 256)
+
+ genBigDecimal.map(FilteredGenType.apply)
+ }
+ }
+
def spec: Spec[TestEnvironment with Scope, Any] = suite("GoldenSpec")(
goldenTest(DeriveGen[Int]),
goldenTest(DeriveGen[SumType]),
@@ -41,7 +60,11 @@ object GoldenSpec extends ZIOSpecDefault {
}, {
implicit val config: GoldenConfiguration = GoldenConfiguration.default.copy(relativePath = "recordtype")
goldenTest(DeriveGen[RecordType])
- }
+ }, {
+ implicit val config: GoldenConfiguration =
+ GoldenConfiguration.default.copy(relativePath = "filteredgentype", sampleSize = 100)
+ goldenTest(FilteredGenType.anyFilteredGenType)
+ } @@ exceptScala212 // Quick & Dirty fix. Scala 2.12 generates BigDecimal differently making the test fail for no good reason.
)
}
diff --git a/zio-json-interop-refined/shared/src/main/scala/zio/json/interop/refined/package.scala b/zio-json-interop-refined/shared/src/main/scala/zio/json/interop/refined/package.scala
index ba558edc8..65d874923 100644
--- a/zio-json-interop-refined/shared/src/main/scala/zio/json/interop/refined/package.scala
+++ b/zio-json-interop-refined/shared/src/main/scala/zio/json/interop/refined/package.scala
@@ -1,7 +1,7 @@
package zio.json.interop
import eu.timepit.refined.api.{ Refined, Validate }
-import eu.timepit.refined.{ refineV }
+import eu.timepit.refined.refineV
import zio.json._
package object refined {
diff --git a/zio-json-interop-refined/shared/src/test/scala/zio/json/interop/refined/RefinedSpec.scala b/zio-json-interop-refined/shared/src/test/scala/zio/json/interop/refined/RefinedSpec.scala
index 50dc8700c..63d4fa052 100644
--- a/zio-json-interop-refined/shared/src/test/scala/zio/json/interop/refined/RefinedSpec.scala
+++ b/zio-json-interop-refined/shared/src/test/scala/zio/json/interop/refined/RefinedSpec.scala
@@ -1,8 +1,8 @@
package zio.json.interop.refined
import eu.timepit.refined.api.Refined
-import eu.timepit.refined.auto._
import eu.timepit.refined.collection.NonEmpty
+import eu.timepit.refined.types.string.NonEmptyString
import zio.json._
import zio.test.Assertion._
import zio.test._
@@ -11,9 +11,11 @@ object RefinedSpec extends ZIOSpecDefault {
val spec: Spec[Environment, Any] =
suite("Refined")(
test("Refined") {
+ val person = Person(NonEmptyString.unsafeFrom("fommil"))
+ val validJson = """{"name":"fommil"}"""
assert("""{"name":""}""".fromJson[Person])(isLeft(equalTo(".name(Predicate isEmpty() did not fail.)"))) &&
- assert("""{"name":"fommil"}""".fromJson[Person])(isRight(equalTo(Person("fommil")))) &&
- assert(Person("fommil").toJson)(equalTo("""{"name":"fommil"}"""))
+ assert(validJson.fromJson[Person])(isRight(equalTo(person))) &&
+ assert(person.toJson)(equalTo(validJson))
}
)
diff --git a/zio-json-macros/shared/src/main/scala/zio/json/jsonDerive.scala b/zio-json-macros/shared/src/main/scala/zio/json/jsonDerive.scala
index 8c35c9994..7d16f549f 100644
--- a/zio-json-macros/shared/src/main/scala/zio/json/jsonDerive.scala
+++ b/zio-json-macros/shared/src/main/scala/zio/json/jsonDerive.scala
@@ -74,12 +74,11 @@ private[json] final class DeriveCodecMacros(val c: blackbox.Context) {
private[this] val EncoderClass = typeOf[JsonEncoder[_]].typeSymbol.asType
private[this] val CodecClass = typeOf[JsonCodec[_]].typeSymbol.asType
- private[this] val macroName: Tree = {
+ private[this] val macroName: Tree =
c.prefix.tree match {
case Apply(Select(New(name), _), _) => name
case _ => c.abort(c.enclosingPosition, "Unexpected macro application")
}
- }
private[this] val (codecStyle: JsonCodecStyle, codecType: JsonCodecType) = {
val style: JsonCodecStyle = macroName match {
@@ -114,11 +113,11 @@ private[json] final class DeriveCodecMacros(val c: blackbox.Context) {
}
private[this] def codec(clsDef: ClassDef): Tree = {
- val tpname = clsDef.name
- val tparams = clsDef.tparams
- val decoderName = TermName("decode" + tpname.decodedName)
- val encoderName = TermName("encode" + tpname.decodedName)
- val codecName = TermName("codecFor" + tpname.decodedName)
+ val tpname = clsDef.name
+ val tparams = clsDef.tparams
+ val decoderName = TermName("decode" + tpname.decodedName)
+ val encoderName = TermName("encode" + tpname.decodedName)
+ val codecName = TermName("codecFor" + tpname.decodedName)
val (decoder, encoder, codec) = if (tparams.isEmpty) {
val Type = tpname
(
@@ -127,12 +126,15 @@ private[json] final class DeriveCodecMacros(val c: blackbox.Context) {
q"""implicit val $codecName: $CodecClass[$Type] = _root_.zio.json.DeriveJsonCodec.gen[$Type]"""
)
} else {
- val tparamNames = tparams.map(_.name)
+ val tparamNames = tparams.map(_.name)
def mkImplicitParams(prefix: String, typeSymbol: TypeSymbol) =
- tparamNames.zipWithIndex.map { case (tparamName, i) =>
- val paramName = TermName(s"$prefix$i")
- val paramType = tq"$typeSymbol[$tparamName]"
- q"$paramName: $paramType"
+ tparamNames.map {
+ var i = 0
+ tparamName =>
+ val paramName = TermName(s"$prefix$i")
+ i += 1
+ val paramType = tq"$typeSymbol[$tparamName]"
+ q"$paramName: $paramType"
}
val decodeParams = mkImplicitParams("decode", DecoderClass)
val encodeParams = mkImplicitParams("encode", EncoderClass)
diff --git a/zio-json-macros/shared/src/test/scala/zio/json/DeriveSpec.scala b/zio-json-macros/shared/src/test/scala/zio/json/DeriveSpec.scala
index d62fd4d60..b09994a45 100644
--- a/zio-json-macros/shared/src/test/scala/zio/json/DeriveSpec.scala
+++ b/zio-json-macros/shared/src/test/scala/zio/json/DeriveSpec.scala
@@ -1,6 +1,5 @@
-package testzio.json
+package zio.json
-import zio.json._
import zio.test.Assertion._
import zio.test._
@@ -31,6 +30,13 @@ object DeriveSpec extends ZIOSpecDefault {
assert("""{"Child2":{}}""".fromJson[Parent])(isRight(equalTo(Child2()))) &&
assert("""{"type":"Child1"}""".fromJson[Parent])(isLeft(equalTo("(invalid disambiguator)")))
},
+ test("sum encoding with hint names") {
+ import examplesumhintnames._
+
+ assert("""{"child1":{}}""".fromJson[Parent])(isRight(equalTo(Child1()))) &&
+ assert("""{"child2":{}}""".fromJson[Parent])(isRight(equalTo(Child2()))) &&
+ assert("""{"type":"child1"}""".fromJson[Parent])(isLeft(equalTo("(invalid disambiguator)")))
+ },
test("sum alternative encoding") {
import examplealtsum._
@@ -38,8 +44,16 @@ object DeriveSpec extends ZIOSpecDefault {
assert("""{"hint":"Abel"}""".fromJson[Parent])(isRight(equalTo(Child2()))) &&
assert("""{"hint":"Samson"}""".fromJson[Parent])(isLeft(equalTo("(invalid disambiguator)"))) &&
assert("""{"Cain":{}}""".fromJson[Parent])(isLeft(equalTo("(missing hint 'hint')")))
+ },
+ test("sum alternative encoding with hint names") {
+ import examplealtsumhintnames._
+
+ assert("""{"hint":"child1"}""".fromJson[Parent])(isRight(equalTo(Child1()))) &&
+ assert("""{"hint":"Abel"}""".fromJson[Parent])(isRight(equalTo(Child2()))) &&
+ assert("""{"hint":"Child1"}""".fromJson[Parent])(isLeft(equalTo("(invalid disambiguator)"))) &&
+ assert("""{"child1":{}}""".fromJson[Parent])(isLeft(equalTo("(missing hint 'hint')")))
}
- )
+ ) @@ TestAspect.flaky // flaky only for Scala Native
)
object exampleproducts {
@@ -59,6 +73,15 @@ object DeriveSpec extends ZIOSpecDefault {
case class Child2() extends Parent
}
+ object examplesumhintnames {
+ @jsonDerive
+ @jsonHintNames(SnakeCase)
+ sealed abstract class Parent
+
+ case class Child1() extends Parent
+ case class Child2() extends Parent
+ }
+
object exampleempty {
@jsonDerive
case class Empty(a: Option[String])
@@ -78,6 +101,19 @@ object DeriveSpec extends ZIOSpecDefault {
case class Child2() extends Parent
}
+ object examplealtsumhintnames {
+
+ @jsonDerive
+ @jsonDiscriminator("hint")
+ @jsonHintNames(SnakeCase)
+ sealed abstract class Parent
+
+ case class Child1() extends Parent
+
+ @jsonHint("Abel")
+ case class Child2() extends Parent
+ }
+
object logEvent {
@jsonDerive(JsonDeriveConfig.Decoder)
case class Event(at: Long, message: String, a: Seq[String] = Nil)
diff --git a/zio-json-yaml/src/main/scala/zio/json/yaml/YamlOptions.scala b/zio-json-yaml/src/main/scala/zio/json/yaml/YamlOptions.scala
index df4940b46..957aa6a7b 100644
--- a/zio-json-yaml/src/main/scala/zio/json/yaml/YamlOptions.scala
+++ b/zio-json-yaml/src/main/scala/zio/json/yaml/YamlOptions.scala
@@ -20,11 +20,10 @@ case class YamlOptions(
)
object YamlOptions {
- private val defaultLineBreak: LineBreak = {
+ private val defaultLineBreak: LineBreak =
Set(LineBreak.MAC, LineBreak.WIN, LineBreak.UNIX)
.find(_.getString == System.lineSeparator())
.getOrElse(LineBreak.UNIX)
- }
val default: YamlOptions = YamlOptions(
() => new DumperOptions(),
diff --git a/zio-json-yaml/src/main/scala/zio/json/yaml/package.scala b/zio-json-yaml/src/main/scala/zio/json/yaml/package.scala
index a82339fd2..683696518 100644
--- a/zio-json-yaml/src/main/scala/zio/json/yaml/package.scala
+++ b/zio-json-yaml/src/main/scala/zio/json/yaml/package.scala
@@ -16,7 +16,7 @@ import java.io.{ StringReader, StringWriter }
import java.nio.charset.StandardCharsets
import java.util.Base64
import scala.jdk.CollectionConverters._
-import scala.util.Try
+import scala.util.control.NonFatal
import scala.util.matching.Regex
package object yaml {
@@ -67,12 +67,12 @@ package object yaml {
implicit final class DecoderYamlOps(private val raw: String) extends AnyVal {
def fromYaml[A](implicit decoder: JsonDecoder[A]): Either[String, A] =
- Try {
+ try {
val yaml = new Yaml().compose(new StringReader(raw))
- yamlToJson(yaml)
- }.toEither.left
- .map(_.getMessage)
- .flatMap(decoder.fromJsonAST(_))
+ decoder.fromJsonAST(yamlToJson(yaml))
+ } catch {
+ case NonFatal(e) => Left(e.getMessage)
+ }
}
private val multiline: Regex = "[\n\u0085\u2028\u2029]".r
@@ -127,7 +127,7 @@ package object yaml {
case Json.Num(value) =>
val stripped = value.stripTrailingZeros()
if (stripped.scale() <= 0) {
- new ScalarNode(Tag.INT, stripped.intValue().toString, null, null, options.scalarStyle(json))
+ new ScalarNode(Tag.INT, stripped.longValue.toString, null, null, options.scalarStyle(json))
} else {
new ScalarNode(Tag.FLOAT, stripped.toString, null, null, options.scalarStyle(json))
}
diff --git a/zio-json-yaml/src/test/scala/zio/json/yaml/YamlEncoderSpec.scala b/zio-json-yaml/src/test/scala/zio/json/yaml/YamlEncoderSpec.scala
index 8a053aee1..743175005 100644
--- a/zio-json-yaml/src/test/scala/zio/json/yaml/YamlEncoderSpec.scala
+++ b/zio-json-yaml/src/test/scala/zio/json/yaml/YamlEncoderSpec.scala
@@ -30,6 +30,11 @@ object YamlEncoderSpec extends ZIOSpecDefault {
isRight(equalTo("hello\n"))
)
},
+ test("large number") {
+ assert(Json.Num(2910000000L).toYaml(YamlOptions.default.copy(lineBreak = LineBreak.UNIX)))(
+ isRight(equalTo("2910000000\n"))
+ )
+ },
test("special characters in string") {
assert(Json.Arr(Json.Str("- [] &hello \\!")).toYaml(YamlOptions.default.copy(lineBreak = LineBreak.UNIX)))(
isRight(equalTo(" - '- [] &hello \\!'\n"))
@@ -70,7 +75,7 @@ object YamlEncoderSpec extends ZIOSpecDefault {
test("sequence root") {
assert(
Json
- .Arr(Json.Bool(true), Json.Bool(false), Json.Bool(true))
+ .Arr(Json.Bool.True, Json.Bool.False, Json.Bool.True)
.toYaml(YamlOptions.default.copy(lineBreak = LineBreak.UNIX))
)(
isRight(equalTo(""" - true
diff --git a/zio-json/js/src/main/scala/zio/json/internal/FastStringWrite.scala b/zio-json/js/src/main/scala/zio/json/internal/FastStringWrite.scala
new file mode 100644
index 000000000..811ca9405
--- /dev/null
+++ b/zio-json/js/src/main/scala/zio/json/internal/FastStringWrite.scala
@@ -0,0 +1,82 @@
+package zio.json.internal
+
+final class FastStringWrite(initial: Int) extends Write {
+ require(initial >= 8)
+ private[this] var chars: String = ""
+
+ @inline def reset(): Unit = chars = ""
+
+ @inline def write(s: String): Unit = chars += s
+
+ @inline def write(c: Char): Unit = chars += c
+
+ @inline override def write(cs: Array[Char], from: Int, to: Int): Unit = {
+ var i = from
+ while (i < to) {
+ chars += cs(i)
+ i += 1
+ }
+ }
+
+ @inline override def write(c1: Char, c2: Char): Unit = {
+ chars += c1
+ chars += c2
+ }
+
+ @inline override def write(c1: Char, c2: Char, c3: Char): Unit = {
+ chars += c1
+ chars += c2
+ chars += c3
+ }
+
+ @inline override def write(c1: Char, c2: Char, c3: Char, c4: Char): Unit = {
+ chars += c1
+ chars += c2
+ chars += c3
+ chars += c4
+ }
+
+ @inline override def write(c1: Char, c2: Char, c3: Char, c4: Char, c5: Char): Unit = {
+ chars += c1
+ chars += c2
+ chars += c3
+ chars += c4
+ chars += c5
+ }
+
+ @inline override def write(s: Short): Unit = {
+ chars += (s & 0xff).toChar
+ chars += (s >> 8).toChar
+ }
+
+ @inline override def write(s1: Short, s2: Short): Unit = {
+ chars += (s1 & 0xff).toChar
+ chars += (s1 >> 8).toChar
+ chars += (s2 & 0xff).toChar
+ chars += (s2 >> 8).toChar
+ }
+
+ @inline override def write(s1: Short, s2: Short, s3: Short): Unit = {
+ chars += (s1 & 0xff).toChar
+ chars += (s1 >> 8).toChar
+ chars += (s2 & 0xff).toChar
+ chars += (s2 >> 8).toChar
+ chars += (s3 & 0xff).toChar
+ chars += (s3 >> 8).toChar
+ }
+
+ @inline override def write(s1: Short, s2: Short, s3: Short, s4: Short): Unit = {
+ chars += (s1 & 0xff).toChar
+ chars += (s1 >> 8).toChar
+ chars += (s2 & 0xff).toChar
+ chars += (s2 >> 8).toChar
+ chars += (s3 & 0xff).toChar
+ chars += (s3 >> 8).toChar
+ chars += (s4 & 0xff).toChar
+ chars += (s4 >> 8).toChar
+ }
+
+ @inline def buffer: CharSequence = chars
+
+ @inline override def toString: String = chars
+}
diff --git a/zio-json/js/src/main/scala/zio/json/internal/SafeNumbers.scala b/zio-json/js/src/main/scala/zio/json/internal/SafeNumbers.scala
new file mode 100644
index 000000000..04af508a8
--- /dev/null
+++ b/zio-json/js/src/main/scala/zio/json/internal/SafeNumbers.scala
@@ -0,0 +1,1117 @@
+/*
+ * Copyright 2019-2022 John A. De Goes and the ZIO Contributors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package zio.json.internal
+
+import java.util.UUID
+
+/**
+ * Total, fast, number parsing.
+ *
+ * The Java and Scala standard libraries throw exceptions when we attempt to parse an invalid number. Unfortunately,
+ * exceptions are very expensive, and untrusted data can be maliciously constructed to DOS a server.
+ *
+ * This suite of functions mitigates against such attacks by building up the numbers one character at a time, which has
+ * been shown through extensive benchmarking to be orders of magnitude faster than exception-throwing stdlib parsers,
+ * for valid and invalid inputs. This approach, proposed by alexknvl, was also benchmarked against regexp-based
+ * pre-validation.
+ *
+ * Note that although the behaviour is identical to the Java stdlib when given the canonical form of a primitive (i.e.
+ * the .toString) of a number there may be differences in behaviour for non-canonical forms. e.g. the Java stdlib may
+ * reject "1.0" when parsed as an `BigInteger` but we may parse it as a `1`, although "1.1" would be rejected. Parsing
+ * of `BigDecimal` preserves the trailing zeros on the right but not on the left, e.g. "000.00001000" will be
+ * "1.000e-5", which is useful in cases where the trailing zeros denote measurement accuracy.
+ *
+ * `BigInteger`, `BigDecimal`, `Float` and `Double` have a configurable bit limit on the size of the significand, to
+ * avoid OOM style attacks, which is 256 bits by default.
+ *
+ * Results are contained in a specialisation of Option that avoids boxing.
+ */
+object SafeNumbers {
+ import UnsafeNumbers.UnsafeNumber
+
+ def byte(num: String): ByteOption =
+ try ByteSome(UnsafeNumbers.byte(num))
+ catch { case _: UnexpectedEnd | UnsafeNumber => ByteNone }
+
+ def short(num: String): ShortOption =
+ try ShortSome(UnsafeNumbers.short(num))
+ catch { case _: UnexpectedEnd | UnsafeNumber => ShortNone }
+
+ def int(num: String): IntOption =
+ try IntSome(UnsafeNumbers.int(num))
+ catch { case _: UnexpectedEnd | UnsafeNumber => IntNone }
+
+ def long(num: String): LongOption =
+ try LongSome(UnsafeNumbers.long(num))
+ catch { case _: UnexpectedEnd | UnsafeNumber => LongNone }
+
+ def bigInteger(num: String, max_bits: Int = 256): Option[java.math.BigInteger] =
+ try Some(UnsafeNumbers.bigInteger(num, max_bits))
+ catch { case _: UnexpectedEnd | UnsafeNumber => None }
+
+ def bigInt(num: String, max_bits: Int = 256): Option[BigInt] =
+ try Some(UnsafeNumbers.bigInt(num, max_bits))
+ catch { case _: UnexpectedEnd | UnsafeNumber => None }
+
+ def float(num: String, max_bits: Int = 256): FloatOption =
+ try FloatSome(UnsafeNumbers.float(num, max_bits))
+ catch { case _: UnexpectedEnd | UnsafeNumber => FloatNone }
+
+ def double(num: String, max_bits: Int = 256): DoubleOption =
+ try DoubleSome(UnsafeNumbers.double(num, max_bits))
+ catch { case _: UnexpectedEnd | UnsafeNumber => DoubleNone }
+
+ def bigDecimal(num: String, max_bits: Int = 256): Option[java.math.BigDecimal] =
+ try Some(UnsafeNumbers.bigDecimal(num, max_bits))
+ catch { case _: UnexpectedEnd | UnsafeNumber => None }
+
+ def toString(x: java.math.BigDecimal): String = {
+ val out = writes.get
+ write(x, out)
+ out.toString
+ }
+
+ def toString(x: java.math.BigInteger): String = {
+ val out = writes.get
+ write(x, out)
+ out.toString
+ }
+
+ def toString(x: Double): String = {
+ val out = writes.get
+ write(x, out)
+ out.toString
+ }
+
+ def toString(x: Float): String = {
+ val out = writes.get
+ write(x, out)
+ out.toString
+ }
+
+ def toString(x: UUID): String = {
+ val out = writes.get
+ write(x, out)
+ out.toString
+ }
+
+ def write(x: java.math.BigDecimal, out: Write): Unit = {
+ var exp = writeBigDecimal(x.unscaledValue, x.scale, 0, null, out)
+ if (exp != 0) {
+ var sc = '+'
+ if (exp < 0) {
+ sc = '-'
+ exp = -exp
+ }
+ out.write('E', sc)
+ writeMantissa(exp, out)
+ }
+ }
+
+ private[this] def writeBigDecimal(
+ x: java.math.BigInteger,
+ scale: Int,
+ blockScale: Int,
+ ss: Array[java.math.BigInteger],
+ out: Write
+ ): Int = {
+ val bitLen = x.bitLength
+ if (bitLen < 64) {
+ val v = x.longValue
+ val pv = Math.abs(v)
+ val digits = digitCount(pv)
+ val dotOff = scale - blockScale
+ val exp = (digits - 1) - dotOff
+ if (scale >= 0 && exp >= -6) {
+ if (exp < 0) {
+ if (v >= 0) out.write('0', '.')
+ else out.write('-', '0', '.')
+ var zeros = -exp - 1
+ while (zeros > 0) {
+ out.write('0')
+ zeros -= 1
+ }
+ write(pv, out)
+ } else if (dotOff > 0) writeLongWithDot(v, dotOff, out)
+ else write(v, out)
+ 0
+ } else {
+ if (digits > 1) writeLongWithDot(v, digits - 1, out)
+ else {
+ write(v, out)
+ if (blockScale > 0) out.write('.')
+ }
+ exp
+ }
+ } else {
+ val n = calculateTenPow18SquareNumber(bitLen)
+ val ss1 =
+ if (ss eq null) getTenPow18Squares(n)
+ else ss
+ val qr = x.divideAndRemainder(ss1(n))
+ val exp = writeBigDecimal(qr(0), scale, (18 << n) + blockScale, ss1, out)
+ writeBigDecimalRemainder(qr(1), scale, blockScale, n - 1, ss1, out)
+ exp
+ }
+ }
+
+ @inline private[this] def writeLongWithDot(v: Long, dotOff: Int, out: Write): Unit = {
+ val pow10 = pow10longs(dotOff)
+ val q = v / pow10
+ val r = Math.abs(v - q * pow10)
+ write(q, out)
+ out.write('.')
+ var zeros = dotOff - digitCount(r)
+ while (zeros > 0) {
+ out.write('0')
+ zeros -= 1
+ }
+ write(r, out)
+ }
+
+ private[this] def writeBigDecimalRemainder(
+ x: java.math.BigInteger,
+ scale: Int,
+ blockScale: Int,
+ n: Int,
+ ss: Array[java.math.BigInteger],
+ out: Write
+ ): Unit =
+ if (n < 0) {
+ val v = Math.abs(x.longValue)
+ var dotOff = scale - blockScale
+ if (dotOff > 0 && dotOff < 18) {
+ val pow10 = pow10longs(dotOff)
+ val q = v / pow10
+ val r = v - q * pow10
+ var zeros = 18 - dotOff - digitCount(q)
+ while (zeros > 0) {
+ out.write('0')
+ zeros -= 1
+ }
+ writeMantissa(q, out)
+ out.write('.')
+ dotOff -= digitCount(r)
+ while (dotOff > 0) {
+ out.write('0')
+ dotOff -= 1
+ }
+ writeMantissa(r, out)
+ } else {
+ if (dotOff == 18) out.write('.')
+ write18Digits(v, out)
+ }
+ } else {
+ val qr = x.divideAndRemainder(ss(n))
+ writeBigDecimalRemainder(qr(0), scale, (18 << n) + blockScale, n - 1, ss, out)
+ writeBigDecimalRemainder(qr(1), scale, blockScale, n - 1, ss, out)
+ }
+
+ def write(x: java.math.BigInteger, out: Write): Unit = writeBigInteger(x, null, out)
+
+ private[this] def writeBigInteger(x: java.math.BigInteger, ss: Array[java.math.BigInteger], out: Write): Unit = {
+ val bitLen = x.bitLength
+ if (bitLen < 64) write(x.longValue, out)
+ else {
+ val n = calculateTenPow18SquareNumber(bitLen)
+ val ss1 =
+ if (ss eq null) getTenPow18Squares(n)
+ else ss
+ val qr = x.divideAndRemainder(ss1(n))
+ writeBigInteger(qr(0), ss1, out)
+ writeBigIntegerRemainder(qr(1), n - 1, ss1, out)
+ }
+ }
+
+ private[this] def writeBigIntegerRemainder(
+ x: java.math.BigInteger,
+ n: Int,
+ ss: Array[java.math.BigInteger],
+ out: Write
+ ): Unit =
+ if (n < 0) write18Digits(Math.abs(x.longValue), out)
+ else {
+ val qr = x.divideAndRemainder(ss(n))
+ writeBigIntegerRemainder(qr(0), n - 1, ss, out)
+ writeBigIntegerRemainder(qr(1), n - 1, ss, out)
+ }
+
+ private[this] def calculateTenPow18SquareNumber(bitLen: Int): Int = {
+ val m = Math.max(
+ (bitLen * 0.016723888647998956).toInt - 1,
+ 1
+ ) // Math.max((x.bitLength * Math.log(2) / Math.log(1e18)).toInt - 1, 1)
+ 31 - java.lang.Integer.numberOfLeadingZeros(m)
+ }
+
+ private[this] def getTenPow18Squares(n: Int): Array[java.math.BigInteger] = {
+ var ss = tenPow18Squares
+ var i = ss.length
+ if (n >= i) {
+ var s = ss(i - 1)
+ ss = java.util.Arrays.copyOf(ss, n + 1)
+ while (i <= n) {
+ s = s.multiply(s)
+ ss(i) = s
+ i += 1
+ }
+ tenPow18Squares = ss
+ }
+ ss
+ }
+
+ // Based on the amazing work of Raffaello Giulietti
+ // "The Schubfach way to render doubles": https://drive.google.com/file/d/1luHhyQF9zKlM8yJ1nebU0OgVYhfC6CBN/view
+ // Sources with the license are here: https://github.com/c4f7fcce9cb06515/Schubfach/blob/3c92d3c9b1fead540616c918cdfef432bca53dfa/todec/src/math/DoubleToDecimal.java
+ def write(x: Double, out: Write): Unit = {
+ val bits = java.lang.Double.doubleToLongBits(x)
+ val ieeeExponent = (bits >> 52).toInt & 0x7ff
+ val ieeeMantissa = bits & 0xfffffffffffffL
+ if (ieeeExponent == 2047) {
+ out.write(
+ if (x != x) """"NaN""""
+ else if (bits < 0) """"-Infinity""""
+ else """"Infinity""""
+ )
+ } else {
+ if (bits < 0) out.write('-')
+ if (x == 0.0f) out.write('0', '.', '0')
+ else {
+ var e = ieeeExponent - 1075
+ var m = ieeeMantissa | 0x10000000000000L
+ var dv = 0L
+ var exp = 0
+ if (e == 0) dv = m
+ else if (e >= -52 && e < 0 && m << e == 0) dv = m >> -e
+ else {
+ var expShift, expCorr = 0
+ var cblShift = 2
+ if (ieeeExponent == 0) {
+ e = -1074
+ m = ieeeMantissa
+ if (ieeeMantissa < 3) {
+ m = (m << 3) + (m << 1)
+ expShift = 1
+ }
+ } else if (ieeeMantissa == 0 && ieeeExponent > 1) {
+ expCorr = 131007
+ cblShift = 1
+ }
+ exp = e * 315653 - expCorr >> 20
+ val i = exp + 324 << 1
+ val g1 = gs(i)
+ val g0 = gs(i + 1)
+ val h = (-exp * 108853 >> 15) + e + 2
+ val cb = m << 2
+ val outm1 = (m.toInt & 0x1) - 1
+ val vb = rop(g1, g0, cb << h)
+ val vbls = rop(g1, g0, cb - cblShift << h) + outm1
+ val vbrd = outm1 - rop(g1, g0, cb + 2 << h)
+ val s = vb >> 2
+ if (
+ s < 100 || {
+ var z = s
+ dv = s
+ dv = (dv >>> 1) + (dv >>> 2) // Based upon the divu10() code from Hacker's Delight 2nd Edition by Henry Warren
+ dv += dv >>> 4
+ dv += dv >>> 8
+ dv += dv >>> 16
+ dv += dv >>> 32
+ z -= dv & 0xfffffffffffffff8L
+ dv >>>= 3
+ if ((z - (dv << 1)).toInt >= 10) dv += 1L
+ val sp40 = (dv << 5) + (dv << 3)
+ val upin = (vbls - sp40).toInt
+ (((sp40 + vbrd).toInt + 40) ^ upin) >= 0 || {
+ dv += ~upin >>> 31
+ exp += 1
+ false
+ }
+ }
+ ) {
+ val s4 = s << 2
+ val uin = (vbls - s4).toInt
+ dv = (~ {
+ if ((((s4 + vbrd).toInt + 4) ^ uin) < 0) uin
+ else (vb.toInt & 0x3) + (s.toInt & 0x1) - 3
+ } >>> 31) + s
+ exp -= expShift
+ }
+ }
+ val len = digitCount(dv)
+ exp += len - 1
+ if (exp < -3 || exp >= 7) {
+ val sdv = stripTrailingZeros(dv)
+ writeMantissaWithDot(sdv, out)
+ if (sdv >= 10) out.write('E')
+ else out.write('0', 'E')
+ write(exp, out)
+ } else if (exp < 0) {
+ out.write('0', '.')
+ while ({
+ exp += 1
+ exp != 0
+ }) out.write('0')
+ writeMantissa(stripTrailingZeros(dv), out)
+ } else {
+ var pow10i = len - exp - 1
+ if (pow10i > 0) {
+ val pow10 = pow10longs(pow10i)
+ val q = dv / pow10
+ val r = dv - q * pow10
+ writeMantissa(q, out)
+ out.write('.')
+ pow10i -= digitCount(r)
+ while (pow10i > 0) {
+ out.write('0')
+ pow10i -= 1
+ }
+ writeMantissa(stripTrailingZeros(r), out)
+ } else {
+ writeMantissa(dv.toInt, out)
+ out.write('.', '0')
+ }
+ }
+ }
+ }
+ }
+
+ def write(x: Float, out: Write): Unit = {
+ val bits = java.lang.Float.floatToIntBits(x)
+ val ieeeExponent = (bits >> 23) & 0xff
+ val ieeeMantissa = bits & 0x7fffff
+ if (ieeeExponent == 255) {
+ out.write(
+ if (x != x) """"NaN""""
+ else if (bits < 0) """"-Infinity""""
+ else """"Infinity""""
+ )
+ } else {
+ if (bits < 0) out.write('-')
+ if (x == 0.0f) out.write('0', '.', '0')
+ else {
+ var e = ieeeExponent - 150
+ var m = ieeeMantissa | 0x800000
+ var dv, exp = 0
+ if (e == 0) dv = m
+ else if (e >= -23 && e < 0 && m << e == 0) dv = m >> -e
+ else {
+ var expShift, expCorr = 0
+ var cblShift = 2
+ if (ieeeExponent == 0) {
+ e = -149
+ m = ieeeMantissa
+ if (ieeeMantissa < 8) {
+ m *= 10
+ expShift = 1
+ }
+ } else if (ieeeMantissa == 0 && ieeeExponent > 1) {
+ expCorr = 131007
+ cblShift = 1
+ }
+ exp = e * 315653 - expCorr >> 20
+ val g1 = gs(exp + 324 << 1) + 1
+ val h = (-exp * 108853 >> 15) + e + 1
+ val cb = m << 2
+ val outm1 = (m & 0x1) - 1
+ val vb = rop(g1, cb << h)
+ val vbls = rop(g1, cb - cblShift << h) + outm1
+ val vbrd = outm1 - rop(g1, cb + 2 << h)
+ val s = vb >> 2
+ if (
+ s < 100 || {
+ dv = s / 10
+ val sp40 = dv * 40
+ val upin = vbls - sp40
+ ((sp40 + vbrd + 40) ^ upin) >= 0 || {
+ dv += ~upin >>> 31
+ exp += 1
+ false
+ }
+ }
+ ) {
+ val s4 = s << 2
+ val uin = vbls - s4
+ dv = (~ {
+ if (((s4 + vbrd + 4) ^ uin) < 0) uin
+ else (vb & 0x3) + (s & 0x1) - 3
+ } >>> 31) + s
+ exp -= expShift
+ }
+ }
+ val len = digitCount(dv)
+ exp += len - 1
+ if (exp < -3 || exp >= 7) {
+ val sdv = stripTrailingZeros(dv)
+ writeMantissaWithDot(sdv, out)
+ if (sdv >= 10) out.write('E')
+ else out.write('0', 'E')
+ write(exp, out)
+ } else if (exp < 0) {
+ out.write('0', '.')
+ while ({
+ exp += 1
+ exp != 0
+ }) out.write('0')
+ writeMantissa(stripTrailingZeros(dv), out)
+ } else {
+ var pow10i = len - exp - 1
+ if (pow10i > 0) {
+ val pow10 = pow10ints(pow10i)
+ val q = dv / pow10
+ val r = dv - q * pow10
+ writeMantissa(q, out)
+ out.write('.')
+ pow10i -= digitCount(r)
+ while (pow10i > 0) {
+ out.write('0')
+ pow10i -= 1
+ }
+ writeMantissa(stripTrailingZeros(r), out)
+ } else {
+ writeMantissa(dv, out)
+ out.write('.', '0')
+ }
+ }
+ }
+ }
+ }
+
+ def write(x: UUID, out: Write): Unit = {
+ val ds = lowerCaseHexDigits
+ val msb = x.getMostSignificantBits
+ val lsb = x.getLeastSignificantBits
+ val msb1 = (msb >> 32).toInt
+ val msb2 = msb.toInt
+ val lsb1 = (lsb >>> 32).toInt
+ val lsb2 = lsb.toInt
+ out.write(ds(msb1 >>> 24), ds(msb1 >> 16 & 0xff), ds(msb1 >> 8 & 0xff), ds(msb1 & 0xff))
+ out.write('-')
+ out.write(ds(msb2 >>> 24), ds(msb2 >> 16 & 0xff))
+ out.write('-')
+ out.write(ds(msb2 >> 8 & 0xff), ds(msb2 & 0xff))
+ out.write('-')
+ out.write(ds(lsb1 >>> 24), ds(lsb1 >> 16 & 0xff))
+ out.write('-')
+ out.write(ds(lsb1 >> 8 & 0xff), ds(lsb1 & 0xff))
+ out.write(ds(lsb2 >>> 24), ds(lsb2 >> 16 & 0xff), ds(lsb2 >> 8 & 0xff), ds(lsb2 & 0xff))
+ }
+
+ private[json] def writeHex(c: Char, out: Write): Unit = {
+ val ds = lowerCaseHexDigits
+ out.write(ds(c >> 8 & 0xff), ds(c & 0xff))
+ }
+
+ private[json] def writeNano(x: Int, out: Write): Unit = {
+ out.write('.')
+ var coeff = 100000000
+ while (coeff > x) {
+ out.write('0')
+ coeff /= 10
+ }
+ write(stripTrailingZeros(x), out)
+ }
+
+ @inline private[this] def rop(g1: Long, g0: Long, cp: Long): Long = {
+ val x = multiplyHigh(g0, cp) + (g1 * cp >>> 1)
+ var y = multiplyHigh(g1, cp)
+ if (x < 0) y += 1
+ if (-x != x) y |= 1
+ y
+ }
+
+ @inline private[this] def rop(g: Long, cp: Int): Int = {
+ val x = ((g & 0xffffffffL) * cp >>> 32) + (g >>> 32) * cp
+ (x >>> 31).toInt | -x.toInt >>> 31
+ }
+
+ @inline private[this] def multiplyHigh(x: Long, y: Long): Long = {
+ val x2 = x & 0xffffffffL
+ val y2 = y & 0xffffffffL
+ val b = x2 * y2
+ val x1 = x >>> 32
+ val y1 = y >>> 32
+ val a = x1 * y1
+ (((b >>> 32) + (x1 + x2) * (y1 + y2) - b - a) >>> 32) + a
+ }
+
+ @inline private[this] def stripTrailingZeros(x: Long): Long = {
+ var q0, q1 = x
+ if (
+ (q1 << 56 == 0L) && {
+ q0 = ((q1 >>> 8) * 2.56e-6).toLong // divide a medium positive long by 100000000
+ q0 * 100000000L == x
+ }
+ ) return stripTrailingZeros(q0.toInt).toLong
+ while ({
+ q0 = q1
+ var z = q1
+ q1 = (q1 >>> 1) + (q1 >>> 2) // Based upon the divu10() code from Hacker's Delight 2nd Edition by Henry Warren
+ q1 += q1 >>> 4
+ q1 += q1 >>> 8
+ q1 += q1 >>> 16
+ q1 += q1 >>> 32
+ z -= q1 & 0xfffffffffffffff8L
+ q1 >>>= 3
+ var r1 = (z - (q1 << 1)).toInt
+ if (r1 >= 10) {
+ q1 += 1L
+ r1 -= 10
+ }
+ r1 == 0
+ }) ()
+ q0
+ }
+
+ private[this] def stripTrailingZeros(x: Int): Int = {
+ var q0, q1 = x
+ while ({
+ q0 = q1
+ q1 /= 10
+ q1 * 10 == q0 // check if q is divisible by 100
+ }) ()
+ q0
+ }
+
+ @inline def write(a: Long, out: Write): Unit = {
+ var q0 = a
+ if (q0 < 0) {
+ q0 = -q0
+ out.write('-')
+ if (q0 == a) {
+ out.write('9', '2', '2')
+ q0 = 3372036854775808L
+ }
+ }
+ var q = q0.toInt
+ if (q0 == q) writeMantissa(q, out)
+ else {
+ var last: Char = 0
+ if (q0 >= 1000000000000000000L) {
+ var z = q0
+ q0 = (q0 >>> 1) + (q0 >>> 2) // Based upon the divu10() code from Hacker's Delight 2nd Edition by Henry Warren
+ q0 += q0 >>> 4
+ q0 += q0 >>> 8
+ q0 += q0 >>> 16
+ q0 += q0 >>> 32
+ z -= q0 & 0xfffffffffffffff8L
+ q0 >>>= 3
+ var r = (z - (q0 << 1)).toInt
+ if (r >= 10) {
+ q0 += 1L
+ r -= 10
+ }
+ last = (r | '0').toChar
+ }
+ val q1 = ((q0 >>> 8) * 2.56e-6).toLong // divide a medium positive long by 100000000
+ q = q1.toInt
+ if (q1 == q) writeMantissa(q, out)
+ else {
+ q = ((q1 >>> 8) * 1441151881L >>> 49).toInt // divide a small positive long by 100000000
+ writeMantissa(q, out)
+ write8Digits((q1 - q * 100000000L).toInt, out)
+ }
+ write8Digits((q0 - q1 * 100000000L).toInt, out)
+ if (last != 0) out.write(last)
+ }
+ }
+
+ @inline private[this] def writeMantissa(q0: Long, out: Write): Unit =
+ if (q0.toInt == q0) writeMantissa(q0.toInt, out)
+ else {
+ val q1 = ((q0 >>> 8) * 2.56e-6).toLong // divide a medium positive long by 100000000
+ writeMantissa(q1.toInt, out)
+ write8Digits((q0 - q1 * 100000000L).toInt, out)
+ }
+
+ @inline private[this] def writeMantissaWithDot(q0: Long, out: Write): Unit =
+ if (q0.toInt == q0) writeMantissaWithDot(q0.toInt, out)
+ else {
+ val q1 = ((q0 >>> 8) * 2.56e-6).toLong // divide a medium positive long by 100000000
+ writeMantissaWithDot(q1.toInt, out)
+ write8Digits((q0 - q1 * 100000000L).toInt, out)
+ }
+
+ def write(a: Int, out: Write): Unit = {
+ var q0 = a
+ if (q0 < 0) {
+ q0 = -q0
+ out.write('-')
+ if (q0 == a) {
+ out.write('2')
+ q0 = 147483648
+ }
+ }
+ writeMantissa(q0, out)
+ }
+
+ private[this] def writeMantissa(q0: Int, out: Write): Unit = {
+ val ds = digits
+ if (q0 < 100) {
+ if (q0 < 10) out.write((q0 | '0').toChar)
+ else out.write(ds(q0))
+ } else if (q0 < 10000) {
+ val q1 = q0 * 5243 >> 19 // divide a small positive int by 100
+ val d2 = ds(q0 - q1 * 100)
+ if (q0 < 1000) out.write((q1 | '0').toChar)
+ else out.write(ds(q1))
+ out.write(d2)
+ } else if (q0 < 1000000) {
+ val q1 = q0 / 100
+ val r1 = q0 - q1 * 100
+ val q2 = q1 * 5243 >> 19 // divide a small positive int by 100
+ val r2 = q1 - q2 * 100
+ if (q0 < 100000) out.write((q2 | '0').toChar)
+ else out.write(ds(q2))
+ out.write(ds(r2), ds(r1))
+ } else if (q0 < 100000000) {
+ if (q0 < 10000000) {
+ val q1 = q0 / 100
+ val r1 = q0 - q1 * 100
+ val q2 = q1 / 100
+ val r2 = q1 - q2 * 100
+ val q3 = q2 * 5243 >> 19 // divide a small positive int by 100
+ val r3 = q2 - q3 * 100
+ out.write((q3 | '0').toChar)
+ out.write(ds(r3), ds(r2), ds(r1))
+ } else write8Digits(q0, out)
+ } else {
+ val q1 = q0 / 100000000
+ val r1 = q0 - q1 * 100000000
+ if (q0 < 1000000000) out.write((q1 | '0').toChar)
+ else out.write(ds(q1))
+ write8Digits(r1, out)
+ }
+ }
+
+ private[this] def writeMantissaWithDot(q0: Int, out: Write): Unit = {
+ val ds = digits
+ if (q0 < 100) {
+ if (q0 < 10) out.write((q0 | '0').toChar, '.')
+ else {
+ val d1 = ds(q0)
+ out.write((d1 & 0xff).toChar, '.', (d1 >> 8).toChar)
+ }
+ } else if (q0 < 10000) {
+ val q1 = q0 * 5243 >> 19 // divide a small positive int by 100
+ val d2 = ds(q0 - q1 * 100)
+ if (q0 < 1000) out.write((q1 | '0').toChar, '.')
+ else {
+ val d1 = ds(q1)
+ out.write((d1 & 0xff).toChar, '.', (d1 >> 8).toChar)
+ }
+ out.write(d2)
+ } else if (q0 < 1000000) {
+ val q1 = q0 / 100
+ val r1 = q0 - q1 * 100
+ val q2 = q1 * 5243 >> 19 // divide a small positive int by 100
+ val r2 = q1 - q2 * 100
+ if (q0 < 100000) out.write((q2 | '0').toChar, '.')
+ else {
+ val d1 = ds(q2)
+ out.write((d1 & 0xff).toChar, '.', (d1 >> 8).toChar)
+ }
+ out.write(ds(r2), ds(r1))
+ } else if (q0 < 100000000) {
+ val q1 = q0 / 100
+ val r1 = q0 - q1 * 100
+ val q2 = q1 / 100
+ val r2 = q1 - q2 * 100
+ val q3 = q2 * 5243 >> 19 // divide a small positive int by 100
+ val r3 = q2 - q3 * 100
+ if (q0 < 10000000) out.write((q3 | '0').toChar, '.')
+ else {
+ val d1 = ds(q3)
+ out.write((d1 & 0xff).toChar, '.', (d1 >> 8).toChar)
+ }
+ out.write(ds(r3), ds(r2), ds(r1))
+ } else {
+ val q1 = q0 / 100000000
+ val r1 = q0 - q1 * 100000000
+ if (q0 < 1000000000) out.write((q1 | '0').toChar, '.')
+ else {
+ val d1 = ds(q1)
+ out.write((d1 & 0xff).toChar, '.', (d1 >> 8).toChar)
+ }
+ write8Digits(r1, out)
+ }
+ }
+
+ @inline private[this] def write18Digits(x: Long, out: Write): Unit = {
+ val q1 = ((x >>> 8) * 2.56e-6).toLong // divide a medium positive long by 100000000
+ val q2 = (q1 >>> 8) * 1441151881L >>> 49 // divide a small positive long by 100000000
+ out.write(digits(q2.toInt))
+ write8Digits((q1 - q2 * 100000000L).toInt, out)
+ write8Digits((x - q1 * 100000000L).toInt, out)
+ }
+
+ private[this] def write8Digits(x: Int, out: Write): Unit = {
+ val ds = digits
+ val q1 = x / 10000
+ val q2 = q1 * 5243 >> 19 // divide a small positive int by 100
+ out.write(ds(q2), ds(q1 - q2 * 100))
+ val r1 = x - q1 * 10000
+ val q3 = r1 * 5243 >> 19 // divide a small positive int by 100
+ out.write(ds(q3), ds(r1 - q3 * 100))
+ }
+
+ @inline private[json] def write4Digits(x: Int, out: Write): Unit = {
+ val ds = digits
+ val q = x * 5243 >> 19 // divide a 4-digit positive int by 100
+ out.write(ds(q), ds(x - q * 100))
+ }
+
+ @inline private[json] def write3Digits(x: Int, out: Write): Unit = {
+ val q = x * 1311 >> 17 // divide a 3-digit positive int by 100
+ out.write((q + '0').toChar)
+ out.write(digits(x - q * 100))
+ }
+
+ @inline private[json] def write2Digits(x: Int, out: Write): Unit =
+ out.write(digits(x))
+
+ @inline private[this] def digitCount(x: Long): Int =
+ if (x >= 100000000000000000L) {
+ if (x >= 1000000000000000000L) 19
+ else 18
+ } else if (x >= 1000000000000000L) {
+ if (x >= 10000000000000000L) 17
+ else 16
+ } else if (x >= 10000000000000L) {
+ if (x >= 100000000000000L) 15
+ else 14
+ } else if (x >= 100000000000L) {
+ if (x >= 1000000000000L) 13
+ else 12
+ } else if (x >= 1000000000L) {
+ if (x >= 10000000000L) 11
+ else 10
+ } else digitCount(x.toInt)
+
+ private[this] def digitCount(x: Int): Int =
+ if (x < 100) {
+ if (x < 10) 1
+ else 2
+ } else if (x < 10000) {
+ if (x < 1000) 3
+ else 4
+ } else if (x < 1000000) {
+ if (x < 100000) 5
+ else 6
+ } else if (x < 100000000) {
+ if (x < 10000000) 7
+ else 8
+ } else {
+ if (x < 1000000000) 9
+ else 10
+ }
+
+ private[this] final val digits: Array[Short] = Array(
+ 12336, 12592, 12848, 13104, 13360, 13616, 13872, 14128, 14384, 14640, 12337, 12593, 12849, 13105, 13361, 13617,
+ 13873, 14129, 14385, 14641, 12338, 12594, 12850, 13106, 13362, 13618, 13874, 14130, 14386, 14642, 12339, 12595,
+ 12851, 13107, 13363, 13619, 13875, 14131, 14387, 14643, 12340, 12596, 12852, 13108, 13364, 13620, 13876, 14132,
+ 14388, 14644, 12341, 12597, 12853, 13109, 13365, 13621, 13877, 14133, 14389, 14645, 12342, 12598, 12854, 13110,
+ 13366, 13622, 13878, 14134, 14390, 14646, 12343, 12599, 12855, 13111, 13367, 13623, 13879, 14135, 14391, 14647,
+ 12344, 12600, 12856, 13112, 13368, 13624, 13880, 14136, 14392, 14648, 12345, 12601, 12857, 13113, 13369, 13625,
+ 13881, 14137, 14393, 14649
+ )
+
+ private[this] final val lowerCaseHexDigits: Array[Short] = Array(
+ 12336, 12592, 12848, 13104, 13360, 13616, 13872, 14128, 14384, 14640, 24880, 25136, 25392, 25648, 25904, 26160,
+ 12337, 12593, 12849, 13105, 13361, 13617, 13873, 14129, 14385, 14641, 24881, 25137, 25393, 25649, 25905, 26161,
+ 12338, 12594, 12850, 13106, 13362, 13618, 13874, 14130, 14386, 14642, 24882, 25138, 25394, 25650, 25906, 26162,
+ 12339, 12595, 12851, 13107, 13363, 13619, 13875, 14131, 14387, 14643, 24883, 25139, 25395, 25651, 25907, 26163,
+ 12340, 12596, 12852, 13108, 13364, 13620, 13876, 14132, 14388, 14644, 24884, 25140, 25396, 25652, 25908, 26164,
+ 12341, 12597, 12853, 13109, 13365, 13621, 13877, 14133, 14389, 14645, 24885, 25141, 25397, 25653, 25909, 26165,
+ 12342, 12598, 12854, 13110, 13366, 13622, 13878, 14134, 14390, 14646, 24886, 25142, 25398, 25654, 25910, 26166,
+ 12343, 12599, 12855, 13111, 13367, 13623, 13879, 14135, 14391, 14647, 24887, 25143, 25399, 25655, 25911, 26167,
+ 12344, 12600, 12856, 13112, 13368, 13624, 13880, 14136, 14392, 14648, 24888, 25144, 25400, 25656, 25912, 26168,
+ 12345, 12601, 12857, 13113, 13369, 13625, 13881, 14137, 14393, 14649, 24889, 25145, 25401, 25657, 25913, 26169,
+ 12385, 12641, 12897, 13153, 13409, 13665, 13921, 14177, 14433, 14689, 24929, 25185, 25441, 25697, 25953, 26209,
+ 12386, 12642, 12898, 13154, 13410, 13666, 13922, 14178, 14434, 14690, 24930, 25186, 25442, 25698, 25954, 26210,
+ 12387, 12643, 12899, 13155, 13411, 13667, 13923, 14179, 14435, 14691, 24931, 25187, 25443, 25699, 25955, 26211,
+ 12388, 12644, 12900, 13156, 13412, 13668, 13924, 14180, 14436, 14692, 24932, 25188, 25444, 25700, 25956, 26212,
+ 12389, 12645, 12901, 13157, 13413, 13669, 13925, 14181, 14437, 14693, 24933, 25189, 25445, 25701, 25957, 26213,
+ 12390, 12646, 12902, 13158, 13414, 13670, 13926, 14182, 14438, 14694, 24934, 25190, 25446, 25702, 25958, 26214
+ )
+
+ private[this] val gs: Array[Long] = Array(
+ 5696189077778435540L, 6557778377634271669L, 9113902524445496865L, 1269073367360058862L, 7291122019556397492L,
+ 1015258693888047090L, 5832897615645117993L, 6346230177223303157L, 4666318092516094394L, 8766332956520552849L,
+ 7466108948025751031L, 8492109508320019073L, 5972887158420600825L, 4949013199285060097L, 4778309726736480660L,
+ 3959210559428048077L, 7645295562778369056L, 6334736895084876923L, 6116236450222695245L, 3223115108696946377L,
+ 4892989160178156196L, 2578492086957557102L, 7828782656285049914L, 436238524390181040L, 6263026125028039931L,
+ 2193665226883099993L, 5010420900022431944L, 9133629810990300641L, 8016673440035891111L, 9079784475471615541L,
+ 6413338752028712889L, 5419153173006337271L, 5130671001622970311L, 6179996945776024979L, 8209073602596752498L,
+ 6198646298499729642L, 6567258882077401998L, 8648265853541694037L, 5253807105661921599L, 1384589460720489745L,
+ 8406091369059074558L, 5904691951894693915L, 6724873095247259646L, 8413102376257665455L, 5379898476197807717L,
+ 4885807493635177203L, 8607837561916492348L, 438594360332462878L, 6886270049533193878L, 4040224303007880625L,
+ 5509016039626555102L, 6921528257148214824L, 8814425663402488164L, 3695747581953323071L, 7051540530721990531L,
+ 4801272472933613619L, 5641232424577592425L, 1996343570975935733L, 9025971879324147880L, 3194149713561497173L,
+ 7220777503459318304L, 2555319770849197738L, 5776622002767454643L, 3888930224050313352L, 4621297602213963714L,
+ 6800492993982161005L, 7394076163542341943L, 5346765568258592123L, 5915260930833873554L, 7966761269348784022L,
+ 4732208744667098843L, 8218083422849982379L, 7571533991467358150L, 2080887032334240837L, 6057227193173886520L,
+ 1664709625867392670L, 4845781754539109216L, 1331767700693914136L, 7753250807262574745L, 7664851543223128102L,
+ 6202600645810059796L, 6131881234578502482L, 4962080516648047837L, 3060830580291846824L, 7939328826636876539L,
+ 6742003335837910079L, 6351463061309501231L, 7238277076041283225L, 5081170449047600985L, 3945947253462071419L,
+ 8129872718476161576L, 6313515605539314269L, 6503898174780929261L, 3206138077060496254L, 5203118539824743409L,
+ 720236054277441842L, 8324989663719589454L, 4841726501585817270L, 6659991730975671563L, 5718055608639608977L,
+ 5327993384780537250L, 8263793301653597505L, 8524789415648859601L, 3998697245790980200L, 6819831532519087681L,
+ 1354283389261828999L, 5455865226015270144L, 8462124340893283845L, 8729384361624432231L, 8005375723316388668L,
+ 6983507489299545785L, 4559626171282155773L, 5586805991439636628L, 3647700937025724618L, 8938889586303418605L,
+ 3991647091870204227L, 7151111669042734884L, 3193317673496163382L, 5720889335234187907L, 4399328546167885867L,
+ 9153422936374700651L, 8883600081239572549L, 7322738349099760521L, 5262205657620702877L, 5858190679279808417L,
+ 2365090118725607140L, 4686552543423846733L, 7426095317093351197L, 7498484069478154774L, 813706063123630946L,
+ 5998787255582523819L, 2495639257869859918L, 4799029804466019055L, 3841185813666843096L, 7678447687145630488L,
+ 6145897301866948954L, 6142758149716504390L, 8606066656235469486L, 4914206519773203512L, 6884853324988375589L,
+ 7862730431637125620L, 3637067690497580296L, 6290184345309700496L, 2909654152398064237L, 5032147476247760397L,
+ 483048914547496228L, 8051435961996416635L, 2617552670646949126L, 6441148769597133308L, 2094042136517559301L,
+ 5152919015677706646L, 5364582523955957764L, 8244670425084330634L, 4893983223587622099L, 6595736340067464507L,
+ 5759860986241052841L, 5276589072053971606L, 918539974250931950L, 8442542515286354569L, 7003687180914356604L,
+ 6754034012229083655L, 7447624152102440445L, 5403227209783266924L, 5958099321681952356L, 8645163535653227079L,
+ 3998935692578258285L, 6916130828522581663L, 5043822961433561789L, 5532904662818065330L, 7724407183888759755L,
+ 8852647460508904529L, 3135679457367239799L, 7082117968407123623L, 4353217973264747001L, 5665694374725698898L,
+ 7171923193353707924L, 9065110999561118238L, 407030665140201709L, 7252088799648894590L, 4014973346854071690L,
+ 5801671039719115672L, 3211978677483257352L, 4641336831775292537L, 8103606164099471367L, 7426138930840468060L,
+ 5587072233075333540L, 5940911144672374448L, 4469657786460266832L, 4752728915737899558L, 7265075043910123789L,
+ 7604366265180639294L, 556073626030467093L, 6083493012144511435L, 2289533308195328836L, 4866794409715609148L,
+ 1831626646556263069L, 7786871055544974637L, 1085928227119065748L, 6229496844435979709L, 6402765803808118083L,
+ 4983597475548783767L, 6966887050417449628L, 7973755960878054028L, 3768321651184098759L, 6379004768702443222L,
+ 6704006135689189330L, 5103203814961954578L, 1673856093809441141L, 8165126103939127325L, 833495342724150664L,
+ 6532100883151301860L, 666796274179320531L, 5225680706521041488L, 533437019343456425L, 8361089130433666380L,
+ 8232196860433350926L, 6688871304346933104L, 6585757488346680741L, 5351097043477546483L, 7113280398048299755L,
+ 8561755269564074374L, 313202192651548637L, 6849404215651259499L, 2095236161492194072L, 5479523372521007599L,
+ 3520863336564710419L, 8767237396033612159L, 99358116390671185L, 7013789916826889727L, 1924160900483492110L,
+ 5611031933461511781L, 7073351942499659173L, 8977651093538418850L, 7628014293257544353L, 7182120874830735080L,
+ 6102411434606035483L, 5745696699864588064L, 4881929147684828386L, 9193114719783340903L, 2277063414182859933L,
+ 7354491775826672722L, 5510999546088198270L, 5883593420661338178L, 719450822128648293L, 4706874736529070542L,
+ 4264909472444828957L, 7530999578446512867L, 8668529563282681493L, 6024799662757210294L, 3245474835884234871L,
+ 4819839730205768235L, 4441054276078343059L, 7711743568329229176L, 7105686841725348894L, 6169394854663383341L,
+ 3839875066009323953L, 4935515883730706673L, 1227225645436504001L, 7896825413969130677L, 118886625327451240L,
+ 6317460331175304541L, 5629132522374826477L, 5053968264940243633L, 2658631610528906020L, 8086349223904389813L,
+ 2409136169475294470L, 6469079379123511850L, 5616657750322145900L, 5175263503298809480L, 4493326200257716720L,
+ 8280421605278095168L, 7189321920412346751L, 6624337284222476135L, 217434314217011916L, 5299469827377980908L,
+ 173947451373609533L, 8479151723804769452L, 7657013551681595899L, 6783321379043815562L, 2436262026603366396L,
+ 5426657103235052449L, 7483032843395558602L, 8682651365176083919L, 6438829327320028278L, 6946121092140867135L,
+ 6995737869226977784L, 5556896873712693708L, 5596590295381582227L, 8891034997940309933L, 7109870065239576402L,
+ 7112827998352247947L, 153872830078795637L, 5690262398681798357L, 5657121486175901994L, 9104419837890877372L,
+ 1672696748397622544L, 7283535870312701897L, 6872180620830963520L, 5826828696250161518L, 1808395681922860493L,
+ 4661462957000129214L, 5136065360280198718L, 7458340731200206743L, 2683681354335452463L, 5966672584960165394L,
+ 5836293898210272294L, 4773338067968132315L, 6513709525939172997L, 7637340908749011705L, 1198563204647900987L,
+ 6109872726999209364L, 958850563718320789L, 4887898181599367491L, 2611754858345611793L, 7820637090558987986L,
+ 489458958611068546L, 6256509672447190388L, 7770264796372675483L, 5005207737957752311L, 682188614985274902L,
+ 8008332380732403697L, 6625525006089305327L, 6406665904585922958L, 1611071190129533939L, 5125332723668738366L,
+ 4978205766845537474L, 8200532357869981386L, 4275780412210949635L, 6560425886295985109L, 1575949922397804547L,
+ 5248340709036788087L, 3105434345289198799L, 8397345134458860939L, 6813369359833673240L, 6717876107567088751L,
+ 7295369895237893754L, 5374300886053671001L, 3991621508819359841L, 8598881417685873602L, 2697245599369065423L,
+ 6879105134148698881L, 7691819701608117823L, 5503284107318959105L, 4308781353915539097L, 8805254571710334568L,
+ 6894050166264862555L, 7044203657368267654L, 9204588947753800367L, 5635362925894614123L, 9208345565573995455L,
+ 9016580681431382598L, 3665306460692661759L, 7213264545145106078L, 6621593983296039730L, 5770611636116084862L,
+ 8986624001378742108L, 4616489308892867890L, 3499950386361083363L, 7386382894228588624L, 5599920618177733380L,
+ 5909106315382870899L, 6324610901913141866L, 4727285052306296719L, 6904363128901468655L, 7563656083690074751L,
+ 5512957784129484362L, 6050924866952059801L, 2565691819932632328L, 4840739893561647841L, 207879048575150701L,
+ 7745183829698636545L, 5866629699833106606L, 6196147063758909236L, 4693303759866485285L, 4956917651007127389L,
+ 1909968600522233067L, 7931068241611403822L, 6745298575577483229L, 6344854593289123058L, 1706890045720076260L,
+ 5075883674631298446L, 5054860851317971332L, 8121413879410077514L, 4398428547366843807L, 6497131103528062011L,
+ 5363417245264430207L, 5197704882822449609L, 2446059388840589004L, 8316327812515919374L, 7603043836886852730L,
+ 6653062250012735499L, 7927109476880437346L, 5322449800010188399L, 8186361988875305038L, 8515919680016301439L,
+ 7564155960087622576L, 6812735744013041151L, 7895999175441053223L, 5450188595210432921L, 4472124932981887417L,
+ 8720301752336692674L, 3466051078029109543L, 6976241401869354139L, 4617515269794242796L, 5580993121495483311L,
+ 5538686623206349399L, 8929588994392773298L, 5172549782388248714L, 7143671195514218638L, 7827388640652509295L,
+ 5714936956411374911L, 727887690409141951L, 9143899130258199857L, 6698643526767492606L, 7315119304206559886L,
+ 1669566006672083762L, 5852095443365247908L, 8714350434821487656L, 4681676354692198327L, 1437457125744324640L,
+ 7490682167507517323L, 4144605808561874585L, 5992545734006013858L, 7005033461591409992L, 4794036587204811087L,
+ 70003547160262509L, 7670458539527697739L, 1956680082827375175L, 6136366831622158191L, 3410018473632855302L,
+ 4909093465297726553L, 883340371535329080L, 7854549544476362484L, 8792042223940347174L, 6283639635581089987L,
+ 8878308186523232901L, 5026911708464871990L, 3413297734476675998L, 8043058733543795184L, 5461276375162681596L,
+ 6434446986835036147L, 6213695507501100438L, 5147557589468028918L, 1281607591258970028L, 8236092143148846269L,
+ 205897738643396882L, 6588873714519077015L, 2009392598285672668L, 5271098971615261612L, 1607514078628538134L,
+ 8433758354584418579L, 4416696933176616176L, 6747006683667534863L, 5378031953912248102L, 5397605346934027890L,
+ 7991774377871708805L, 8636168555094444625L, 3563466967739958280L, 6908934844075555700L, 2850773574191966624L,
+ 5527147875260444560L, 2280618859353573299L, 8843436600416711296L, 3648990174965717279L, 7074749280333369037L,
+ 1074517732601618662L, 5659799424266695229L, 6393637408194160414L, 9055679078826712367L, 4695796630997791177L,
+ 7244543263061369894L, 67288490056322619L, 5795634610449095915L, 1898505199416013257L, 4636507688359276732L,
+ 1518804159532810606L, 7418412301374842771L, 4274761062623452130L, 5934729841099874217L, 1575134442727806543L,
+ 4747783872879899373L, 6794130776295110719L, 7596454196607838997L, 9025934834701221989L, 6077163357286271198L,
+ 3531399053019067268L, 4861730685829016958L, 6514468057157164137L, 7778769097326427133L, 8578474484080507458L,
+ 6223015277861141707L, 1328756365151540482L, 4978412222288913365L, 6597028314234097870L, 7965459555662261385L,
+ 1331873265919780784L, 6372367644529809108L, 1065498612735824627L, 5097894115623847286L, 4541747704930570025L,
+ 8156630584998155658L, 3577447513147001717L, 6525304467998524526L, 6551306825259511697L, 5220243574398819621L,
+ 3396371052836654196L, 8352389719038111394L, 1744844869796736390L, 6681911775230489115L, 3240550303208344274L,
+ 5345529420184391292L, 2592440242566675419L, 8552847072295026067L, 5992578795477635832L, 6842277657836020854L,
+ 1104714221640198342L, 5473822126268816683L, 2728445784683113836L, 8758115402030106693L, 2520838848122026975L,
+ 7006492321624085354L, 5706019893239531903L, 5605193857299268283L, 6409490321962580684L, 8968310171678829253L,
+ 8410510107769173933L, 7174648137343063403L, 1194384864102473662L, 5739718509874450722L, 4644856706023889253L,
+ 9183549615799121156L, 53073100154402158L, 7346839692639296924L, 7421156109607342373L, 5877471754111437539L,
+ 7781599295056829060L, 4701977403289150031L, 8069953843416418410L, 7523163845262640050L, 9222577334724359132L,
+ 6018531076210112040L, 7378061867779487306L, 4814824860968089632L, 5902449494223589845L, 7703719777548943412L,
+ 2065221561273923105L, 6162975822039154729L, 7186200471132003969L, 4930380657631323783L, 7593634784276558337L,
+ 7888609052210118054L, 1081769210616762369L, 6310887241768094443L, 2710089775864365057L, 5048709793414475554L,
+ 5857420635433402369L, 8077935669463160887L, 3837849794580578305L, 6462348535570528709L, 8604303057777328129L,
+ 5169878828456422967L, 8728116853592817665L, 8271806125530276748L, 6586289336264687617L, 6617444900424221398L,
+ 8958380283753660417L, 5293955920339377119L, 1632681004890062849L, 8470329472543003390L, 6301638422566010881L,
+ 6776263578034402712L, 5041310738052808705L, 5421010862427522170L, 343699775700336641L, 8673617379884035472L,
+ 549919641120538625L, 6938893903907228377L, 5973958935009296385L, 5551115123125782702L, 1089818333265526785L,
+ 8881784197001252323L, 3588383740595798017L, 7105427357601001858L, 6560055807218548737L, 5684341886080801486L,
+ 8937393460516749313L, 9094947017729282379L, 1387108685230112769L, 7275957614183425903L, 2954361355555045377L,
+ 5820766091346740722L, 6052837899185946625L, 4656612873077392578L, 1152921504606846977L, 7450580596923828125L, 1L,
+ 5960464477539062500L, 1L, 4768371582031250000L, 1L, 7629394531250000000L, 1L, 6103515625000000000L, 1L,
+ 4882812500000000000L, 1L, 7812500000000000000L, 1L, 6250000000000000000L, 1L, 5000000000000000000L, 1L,
+ 8000000000000000000L, 1L, 6400000000000000000L, 1L, 5120000000000000000L, 1L, 8192000000000000000L, 1L,
+ 6553600000000000000L, 1L, 5242880000000000000L, 1L, 8388608000000000000L, 1L, 6710886400000000000L, 1L,
+ 5368709120000000000L, 1L, 8589934592000000000L, 1L, 6871947673600000000L, 1L, 5497558138880000000L, 1L,
+ 8796093022208000000L, 1L, 7036874417766400000L, 1L, 5629499534213120000L, 1L, 9007199254740992000L, 1L,
+ 7205759403792793600L, 1L, 5764607523034234880L, 1L, 4611686018427387904L, 1L, 7378697629483820646L,
+ 3689348814741910324L, 5902958103587056517L, 1106804644422573097L, 4722366482869645213L, 6419466937650923963L,
+ 7555786372591432341L, 8426472692870523179L, 6044629098073145873L, 4896503746925463381L, 4835703278458516698L,
+ 7606551812282281028L, 7737125245533626718L, 1102436455425918676L, 6189700196426901374L, 4571297979082645264L,
+ 4951760157141521099L, 5501712790637071373L, 7922816251426433759L, 3268717242906448711L, 6338253001141147007L,
+ 4459648201696114131L, 5070602400912917605L, 9101741783469756789L, 8112963841460668169L, 5339414816696835055L,
+ 6490371073168534535L, 6116206260728423206L, 5192296858534827628L, 4892965008582738565L, 8307674973655724205L,
+ 5984069606361426541L, 6646139978924579364L, 4787255685089141233L, 5316911983139663491L, 5674478955442268148L,
+ 8507059173023461586L, 5389817513965718714L, 6805647338418769269L, 2467179603801619810L, 5444517870735015415L,
+ 3818418090412251009L, 8711228593176024664L, 6109468944659601615L, 6968982874540819731L, 6732249563098636453L,
+ 5575186299632655785L, 3541125243107954001L, 8920298079412249256L, 5665800388972726402L, 7136238463529799405L,
+ 2687965903807225960L, 5708990770823839524L, 2150372723045780768L, 9134385233318143238L, 7129945171615159552L,
+ 7307508186654514591L, 169932915179262157L, 5846006549323611672L, 7514643961627230372L, 4676805239458889338L,
+ 2322366354559873974L, 7482888383134222941L, 1871111759924843197L, 5986310706507378352L, 8875587037423695204L,
+ 4789048565205902682L, 3411120815197045840L, 7662477704329444291L, 7302467711686228506L, 6129982163463555433L,
+ 3997299761978027643L, 4903985730770844346L, 6887188624324332438L, 7846377169233350954L, 7330152984177021577L,
+ 6277101735386680763L, 7708796794712572423L, 5021681388309344611L, 633014213657192454L, 8034690221294951377L,
+ 6546845963964373411L, 6427752177035961102L, 1548127956429588405L, 5142201741628768881L, 6772525587256536209L,
+ 8227522786606030210L, 7146692124868547611L, 6582018229284824168L, 5717353699894838089L, 5265614583427859334L,
+ 8263231774657780795L, 8424983333484574935L, 7687147617339583786L, 6739986666787659948L, 6149718093871667029L,
+ 5391989333430127958L, 8609123289839243947L, 8627182933488204734L, 2706550819517059345L, 6901746346790563787L,
+ 4009915062984602637L, 5521397077432451029L, 8741955272500547595L, 8834235323891921647L, 8453105213888010667L,
+ 7067388259113537318L, 3073135356368498210L, 5653910607290829854L, 6147857099836708891L, 9046256971665327767L,
+ 4302548137625868741L, 7237005577332262213L, 8976061732213560478L, 5789604461865809771L, 1646826163657982898L,
+ 4631683569492647816L, 8696158560410206965L, 7410693711188236507L, 1001132845059645012L, 5928554968950589205L,
+ 6334929498160581494L, 4742843975160471364L, 5067943598528465196L, 7588550360256754183L, 2574686535532678828L,
+ 6070840288205403346L, 5749098043168053386L, 4856672230564322677L, 2754604027163487547L, 7770675568902916283L,
+ 6252040850832535236L, 6216540455122333026L, 8690981495407938512L, 4973232364097866421L, 5108110788955395648L,
+ 7957171782556586274L, 4483628447586722714L, 6365737426045269019L, 5431577165440333333L, 5092589940836215215L,
+ 6189936139723221828L, 8148143905337944345L, 680525786702379117L, 6518515124270355476L, 544420629361903293L,
+ 5214812099416284380L, 7814234132973343281L, 8343699359066055009L, 3279402575902573442L, 6674959487252844007L,
+ 4468196468093013915L, 5339967589802275205L, 9108580396587276617L, 8543948143683640329L, 5350356597684866779L,
+ 6835158514946912263L, 6124959685518848585L, 5468126811957529810L, 8589316563156989191L, 8749002899132047697L,
+ 4519534464196406897L, 6999202319305638157L, 9149650793469991003L, 5599361855444510526L, 3630371820034082479L,
+ 8958978968711216842L, 2119246097312621643L, 7167183174968973473L, 7229420099962962799L, 5733746539975178779L,
+ 249512857857504755L, 9173994463960286046L, 4088569387313917931L, 7339195571168228837L, 1426181102480179183L,
+ 5871356456934583069L, 6674968104097008831L, 4697085165547666455L, 7184648890648562227L, 7515336264876266329L,
+ 2272066188182923754L, 6012269011901013063L, 3662327357917294165L, 4809815209520810450L, 6619210701075745655L,
+ 7695704335233296721L, 1367365084866417240L, 6156563468186637376L, 8472589697376954439L, 4925250774549309901L,
+ 4933397350530608390L, 7880401239278895842L, 4204086946107063100L, 6304320991423116673L, 8897292778998515965L,
+ 5043456793138493339L, 1583811001085947287L, 8069530869021589342L, 6223446416479425982L, 6455624695217271474L,
+ 1289408318441630463L, 5164499756173817179L, 2876201062124259532L, 8263199609878107486L, 8291270514140725574L,
+ 6610559687902485989L, 4788342003941625298L, 5288447750321988791L, 5675348010524255400L, 8461516400515182066L,
+ 5391208002096898316L, 6769213120412145653L, 2468291994306563491L, 5415370496329716522L, 5663982410187161116L,
+ 8664592794127546436L, 1683674226815637140L, 6931674235302037148L, 8725637010936330358L, 5545339388241629719L,
+ 1446486386636198802L, 8872543021186607550L, 6003727033359828406L, 7098034416949286040L, 4802981626687862725L,
+ 5678427533559428832L, 3842385301350290180L, 9085484053695086131L, 7992490889531419449L, 7268387242956068905L,
+ 4549318304254180398L, 5814709794364855124L, 3639454643403344318L, 4651767835491884099L, 4756238122093630616L,
+ 7442828536787014559L, 2075957773236943501L, 5954262829429611647L, 3505440625960509963L, 4763410263543689317L,
+ 8338375722881273455L, 7621456421669902908L, 5962703527126216881L, 6097165137335922326L, 8459511636442883828L,
+ 4877732109868737861L, 4922934901783351901L, 7804371375789980578L, 4187347028111452718L, 6243497100631984462L,
+ 7039226437231072498L, 4994797680505587570L, 1942032335042947675L, 7991676288808940112L, 3107251736068716280L,
+ 6393341031047152089L, 8019824610967838509L, 5114672824837721671L, 8260534096145225969L, 8183476519740354675L,
+ 304133702235675419L, 6546781215792283740L, 243306961788540335L, 5237424972633826992L, 194645569430832268L,
+ 8379879956214123187L, 2156107318460286790L, 6703903964971298549L, 7258909076881094917L, 5363123171977038839L,
+ 7651801668875831096L, 8580997075163262143L, 6708859448088464268L, 6864797660130609714L, 9056436373212681737L,
+ 5491838128104487771L, 9089823505941100552L, 8786941004967180435L, 1630996757909074751L, 7029552803973744348L,
+ 1304797406327259801L, 5623642243178995478L, 4733186739803718164L, 8997827589086392765L, 5728424376314993901L,
+ 7198262071269114212L, 4582739501051995121L, 5758609657015291369L, 9200214822954461581L, 9213775451224466191L,
+ 9186320494614273045L, 7371020360979572953L, 5504381988320463275L, 5896816288783658362L, 8092854405398280943L,
+ 4717453031026926690L, 2784934709576714431L, 7547924849643082704L, 4455895535322743090L, 6038339879714466163L,
+ 5409390835629149634L, 4830671903771572930L, 8016861483245230030L, 7729075046034516689L, 3603606336337592240L,
+ 6183260036827613351L, 4727559476441028954L, 4946608029462090681L, 1937373173781868001L, 7914572847139345089L,
+ 8633820300163854287L, 6331658277711476071L, 8751730647502038591L, 5065326622169180857L, 5156710110630675711L,
+ 8104522595470689372L, 872038547525260492L, 6483618076376551497L, 6231654060133073878L, 5186894461101241198L,
+ 1295974433364548779L, 8299031137761985917L, 228884686012322885L, 6639224910209588733L, 5717130970922723793L,
+ 5311379928167670986L, 8263053591480089358L, 8498207885068273579L, 308164894771456841L, 6798566308054618863L,
+ 2091206323188120634L, 5438853046443695090L, 5362313873292406831L, 8702164874309912144L, 8579702197267850929L,
+ 6961731899447929715L, 8708436165185235905L, 5569385519558343772L, 6966748932148188724L, 8911016831293350036L,
+ 3768100661953281312L, 7128813465034680029L, 1169806122191669888L, 5703050772027744023L, 2780519305124291072L,
+ 9124881235244390437L, 2604156480827910553L, 7299904988195512349L, 7617348406775193928L, 5839923990556409879L,
+ 7938553132791110304L, 4671939192445127903L, 8195516913603843405L, 7475102707912204646L, 2044780617540418478L,
+ 5980082166329763716L, 9014522123516155429L, 4784065733063810973L, 5366943291441969181L, 7654505172902097557L,
+ 6742434858936195528L, 6123604138321678046L, 1704599072407046100L, 4898883310657342436L, 8742376887409457526L,
+ 7838213297051747899L, 1075082168258445910L, 6270570637641398319L, 2704740141977711890L, 5016456510113118655L,
+ 4008466520953124674L, 8026330416180989848L, 6413546433524999478L, 6421064332944791878L, 8820185961561909905L,
+ 5136851466355833503L, 1522125547136662440L, 8218962346169333605L, 590726468047704741L, 6575169876935466884L,
+ 472581174438163793L, 5260135901548373507L, 2222739346921486196L, 8416217442477397611L, 5401057362445333075L,
+ 6732973953981918089L, 2476171482585311299L, 5386379163185534471L, 3825611593439204201L, 8618206661096855154L,
+ 2431629734760816398L, 6894565328877484123L, 3789978195179608280L, 5515652263101987298L, 6721331370885596947L,
+ 8825043620963179677L, 8909455786045999954L, 7060034896770543742L, 3438215814094889640L, 5648027917416434993L,
+ 8284595873388777197L, 9036844667866295990L, 2187306953196312545L, 7229475734293036792L, 1749845562557050036L,
+ 5783580587434429433L, 6933899672158505514L, 4626864469947543547L, 13096515613938926L, 7402983151916069675L,
+ 1865628832353257443L, 5922386521532855740L, 1492503065882605955L, 4737909217226284592L, 1194002452706084764L,
+ 7580654747562055347L, 3755078331700690783L, 6064523798049644277L, 8538085887473418112L, 4851619038439715422L,
+ 3141119895236824166L, 7762590461503544675L, 6870466239749873827L, 6210072369202835740L, 5496372991799899062L,
+ 4968057895362268592L, 4397098393439919250L, 7948892632579629747L, 8880031836874825961L, 6359114106063703798L,
+ 3414676654757950445L, 5087291284850963038L, 6421090138548270680L, 8139666055761540861L, 8429069814306277926L,
+ 6511732844609232689L, 4898581444074067179L, 5209386275687386151L, 5763539562630208905L, 8335018041099817842L,
+ 5532314485466423924L, 6668014432879854274L, 736502773631228816L, 5334411546303883419L, 2433876626275938215L,
+ 8535058474086213470L, 7583551416783411467L, 6828046779268970776L, 6066841133426729173L, 5462437423415176621L,
+ 3008798499370428177L, 8739899877464282594L, 1124728784250774760L, 6991919901971426075L, 2744457434771574970L,
+ 5593535921577140860L, 2195565947817259976L, 8949657474523425376L, 3512905516507615961L, 7159725979618740301L,
+ 965650005835137607L, 5727780783694992240L, 8151217634151930732L, 9164449253911987585L, 3818576177788313364L,
+ 7331559403129590068L, 3054860942230650691L, 5865247522503672054L, 6133237568526430876L, 4692198018002937643L,
+ 6751264462192099863L, 7507516828804700229L, 8957348732136404618L, 6006013463043760183L, 9010553393080078856L,
+ 4804810770435008147L, 1674419492351197600L, 7687697232696013035L, 4523745595132871322L, 6150157786156810428L,
+ 3618996476106297057L, 4920126228925448342L, 6584545995626947969L, 7872201966280717348L, 3156575963519296104L,
+ 6297761573024573878L, 6214609585557347207L, 5038209258419659102L, 8661036483187788089L, 8061134813471454564L,
+ 6478960743616640295L, 6448907850777163651L, 7027843002264267398L, 5159126280621730921L, 3777599994440458757L,
+ 8254602048994769474L, 2354811176362823687L, 6603681639195815579L, 3728523348461214111L, 5282945311356652463L,
+ 4827493086139926451L, 8452712498170643941L, 5879314530452927160L, 6762169998536515153L, 2858777216991386566L,
+ 5409735998829212122L, 5976370588335019576L, 8655577598126739396L, 2183495311852210675L, 6924462078501391516L,
+ 9125493878965589187L, 5539569662801113213L, 5455720695801516188L, 8863311460481781141L, 6884478705911470739L,
+ 7090649168385424913L, 3662908557358221429L, 5672519334708339930L, 6619675660628487467L, 9076030935533343889L,
+ 1368109020150804139L, 7260824748426675111L, 2939161623491598473L, 5808659798741340089L, 506654891422323617L,
+ 4646927838993072071L, 2249998320508814055L, 7435084542388915313L, 9134020534926967972L, 5948067633911132251L,
+ 1773193205828708893L, 4758454107128905800L, 8797252194146787761L, 7613526571406249281L, 4852231473780084609L,
+ 6090821257124999425L, 2037110771653112526L, 4872657005699999540L, 1629688617322490021L, 7796251209119999264L,
+ 2607501787715984033L, 6237000967295999411L, 3930675837543742388L, 4989600773836799529L, 1299866262664038749L,
+ 7983361238138879246L, 5769134835004372321L, 6386688990511103397L, 2770633460632542696L, 5109351192408882717L,
+ 7750529990618899641L, 8174961907854212348L, 5022150355506418780L, 6539969526283369878L, 7707069099147045347L,
+ 5231975621026695903L, 631632057204770793L, 8371160993642713444L, 8389308921011453915L, 6696928794914170755L,
+ 8556121544180118293L, 5357543035931336604L, 6844897235344094635L, 8572068857490138567L, 5417812354437685931L,
+ 6857655085992110854L, 644901068808238421L, 5486124068793688683L, 2360595262417545899L, 8777798510069901893L,
+ 1932278012497118276L, 7022238808055921514L, 5235171224739604944L, 5617791046444737211L, 6032811387162639117L,
+ 8988465674311579538L, 5963149404718312264L, 7190772539449263630L, 8459868338516560134L, 5752618031559410904L,
+ 6767894670813248108L, 9204188850495057447L, 5294608251188331487L
+ )
+
+ private[this] final val pow10ints: Array[Int] =
+ Array(1, 10, 100, 1000, 10000, 100000, 1000000, 10000000, 100000000, 1000000000)
+
+ private[this] final val pow10longs: Array[Long] =
+ Array(1L, 10L, 100L, 1000L, 10000L, 100000L, 1000000L, 10000000L, 100000000L, 1000000000L, 10000000000L,
+ 100000000000L, 1000000000000L, 10000000000000L, 100000000000000L, 1000000000000000L, 10000000000000000L,
+ 100000000000000000L, 1000000000000000000L)
+
+ @volatile private[this] var tenPow18Squares: Array[java.math.BigInteger] =
+ Array(java.math.BigInteger.valueOf(1000000000000000000L))
+
+ private[this] val writes = new ThreadLocal[FastStringWrite] {
+ override def initialValue(): FastStringWrite = new FastStringWrite(64)
+
+ override def get: FastStringWrite = {
+ val w = super.get
+ w.reset()
+ w
+ }
+ }
+}
diff --git a/zio-json/js/src/main/scala/zio/json/internal/UnsafeNumbers.scala b/zio-json/js/src/main/scala/zio/json/internal/UnsafeNumbers.scala
new file mode 100644
index 000000000..c903b73e2
--- /dev/null
+++ b/zio-json/js/src/main/scala/zio/json/internal/UnsafeNumbers.scala
@@ -0,0 +1,791 @@
+/*
+ * Copyright 2019-2022 John A. De Goes and the ZIO Contributors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package zio.json.internal
+
+import scala.util.control.NoStackTrace
+
+// The underlying implementation uses an exception that has no stack trace for
+// the failure case, which is 20x faster than retaining stack traces. Therefore,
+// we require no boxing of the results on the happy path. This slows down the
+// unhappy path a little bit, but it's still on the same order of magnitude as
+// the happy path.
+//
+// This API should only be used by people who know what they are doing. Note
+// that Reader implementations consume one character beyond the number that is
+// parsed, because there is no terminator character.
+object UnsafeNumbers {
+
+ // should never escape into user code
+ case object UnsafeNumber
+ extends Exception("if you see this a dev made a mistake using UnsafeNumbers")
+ with NoStackTrace
+
+ def byte(num: String): Byte =
+ byte_(new FastStringReader(num), true)
+
+ def byte_(in: OneCharReader, consume: Boolean): Byte = {
+ var current =
+ if (consume) in.readChar().toInt
+ else in.nextNonWhitespace().toInt
+ val negate = current == '-'
+ if (negate) current = in.readChar().toInt
+ if (current >= '0' && current <= '9') {
+ var accum = current - '0'
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ accum = accum * 10 + (current - '0')
+ if (accum > 128) throw UnsafeNumber
+ }
+ if (!consume || current == -1) {
+ if (negate) return (-accum).toByte
+ else if (accum < 128) return accum.toByte
+ }
+ }
+ throw UnsafeNumber
+ }
+
+ def short(num: String): Short =
+ short_(new FastStringReader(num), true)
+
+ def short_(in: OneCharReader, consume: Boolean): Short = {
+ var current =
+ if (consume) in.readChar().toInt
+ else in.nextNonWhitespace().toInt
+ val negate = current == '-'
+ if (negate) current = in.readChar().toInt
+ if (current >= '0' && current <= '9') {
+ var accum = current - '0'
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ accum = accum * 10 + (current - '0')
+ if (accum > 32768) throw UnsafeNumber
+ }
+ if (!consume || current == -1) {
+ if (negate) return (-accum).toShort
+ else if (accum < 32768) return accum.toShort
+ }
+ }
+ throw UnsafeNumber
+ }
+
+ def int(num: String): Int =
+ int_(new FastStringReader(num), true)
+
+ def int_(in: OneCharReader, consume: Boolean): Int = {
+ var current =
+ if (consume) in.readChar().toInt
+ else in.nextNonWhitespace().toInt
+ val negate = current == '-'
+ if (negate) current = in.readChar().toInt
+ if (current >= '0' && current <= '9') {
+ var accum = '0' - current
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ if (
+ accum < -214748364 || {
+ accum = accum * 10 + ('0' - current)
+ accum > 0
+ }
+ ) throw UnsafeNumber
+ }
+ if (!consume || current == -1) {
+ if (negate) return accum
+ else if (accum != -2147483648) return -accum
+ }
+ }
+ throw UnsafeNumber
+ }
+
+ def long(num: String): Long =
+ long_(new FastStringReader(num), true)
+
+ def long_(in: OneCharReader, consume: Boolean): Long = {
+ var current =
+ if (consume) in.readChar().toInt
+ else in.nextNonWhitespace().toInt
+ val negate = current == '-'
+ if (negate) current = in.readChar().toInt
+ if (current >= '0' && current <= '9') {
+ var accum = ('0' - current).toLong
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ if (
+ accum < -922337203685477580L || {
+ accum = (accum << 3) + (accum << 1) + ('0' - current)
+ accum > 0
+ }
+ ) throw UnsafeNumber
+ }
+ if (!consume || current == -1) {
+ if (negate) return accum
+ else if (accum != -9223372036854775808L) return -accum
+ }
+ }
+ throw UnsafeNumber
+ }
+
+ def bigInteger(num: String, max_bits: Int): java.math.BigInteger =
+ bigInteger_(new FastStringReader(num), true, max_bits)
+
+ def bigInteger_(in: OneCharReader, consume: Boolean, max_bits: Int): java.math.BigInteger = {
+ var current =
+ if (consume) in.readChar().toInt
+ else in.nextNonWhitespace().toInt
+ val negate = current == '-'
+ if (negate) current = in.readChar().toInt
+ if (current >= '0' && current <= '9') {
+ var loM10 = (current - '0').toLong
+ var loDigits = 1
+ var hiM10: java.math.BigDecimal = null
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ if (loM10 < 922337203685477580L) {
+ loM10 = (loM10 << 3) + (loM10 << 1) + (current - '0')
+ loDigits += 1
+ } else {
+ if (negate) loM10 = -loM10
+ val bd = java.math.BigDecimal.valueOf(loM10)
+ if (hiM10 eq null) hiM10 = bd
+ else {
+ hiM10 = hiM10.scaleByPowerOfTen(loDigits).add(bd)
+ if (hiM10.unscaledValue.bitLength >= max_bits) throw UnsafeNumber
+ }
+ loM10 = (current - '0').toLong
+ loDigits = 1
+ }
+ }
+ if (!consume || current == -1) {
+ if (negate) loM10 = -loM10
+ if (hiM10 eq null) return java.math.BigInteger.valueOf(loM10)
+ val bi = hiM10.scaleByPowerOfTen(loDigits).add(java.math.BigDecimal.valueOf(loM10)).unscaledValue
+ if (bi.bitLength < max_bits) return bi
+ }
+ }
+ throw UnsafeNumber
+ }
+
+ def bigInt(num: String, max_bits: Int): BigInt =
+ bigInt_(new FastStringReader(num), true, max_bits)
+
+ def bigInt_(in: OneCharReader, consume: Boolean, max_bits: Int): BigInt = {
+ var current =
+ if (consume) in.readChar().toInt
+ else in.nextNonWhitespace().toInt
+ val negate = current == '-'
+ if (negate) current = in.readChar().toInt
+ if (current >= '0' && current <= '9') {
+ var loM10 = (current - '0').toLong
+ var loDigits = 1
+ var hiM10: java.math.BigDecimal = null
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ if (loM10 < 922337203685477580L) {
+ loM10 = (loM10 << 3) + (loM10 << 1) + (current - '0')
+ loDigits += 1
+ } else {
+ if (negate) loM10 = -loM10
+ val bd = java.math.BigDecimal.valueOf(loM10)
+ if (hiM10 eq null) hiM10 = bd
+ else {
+ hiM10 = hiM10.scaleByPowerOfTen(loDigits).add(bd)
+ if (hiM10.unscaledValue.bitLength >= max_bits) throw UnsafeNumber
+ }
+ loM10 = (current - '0').toLong
+ loDigits = 1
+ }
+ }
+ if (!consume || current == -1) {
+ if (negate) loM10 = -loM10
+ if (hiM10 eq null) return BigInt(loM10)
+ val bi = hiM10.scaleByPowerOfTen(loDigits).add(java.math.BigDecimal.valueOf(loM10)).unscaledValue
+ if (bi.bitLength < max_bits) return new BigInt(bi)
+ }
+ }
+ throw UnsafeNumber
+ }
+
+ def bigDecimal(num: String, max_bits: Int): java.math.BigDecimal =
+ bigDecimal_(new FastStringReader(num), true, max_bits)
+
+ def bigDecimal_(in: OneCharReader, consume: Boolean, max_bits: Int): java.math.BigDecimal = {
+ var current =
+ if (consume) in.readChar().toInt
+ else in.nextNonWhitespace().toInt
+ val negate = current == '-'
+ if (negate) current = in.readChar().toInt
+ var loM10 = 0L
+ var loDigits = 0
+ var hiM10: java.math.BigDecimal = null
+ if (current >= '0' && current <= '9') {
+ loM10 = (current - '0').toLong
+ loDigits = 1
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ if (loM10 < 922337203685477580L) {
+ loM10 = (loM10 << 3) + (loM10 << 1) + (current - '0')
+ loDigits += 1
+ } else {
+ hiM10 = toBigDecimal(hiM10, loM10, loDigits, max_bits, negate)
+ loM10 = (current - '0').toLong
+ loDigits = 1
+ }
+ }
+ }
+ var e10 = 0
+ if (current == '.') {
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ if (loM10 < 922337203685477580L) {
+ loM10 = (loM10 << 3) + (loM10 << 1) + (current - '0')
+ loDigits += 1
+ e10 -= 1
+ } else {
+ hiM10 = toBigDecimal(hiM10, loM10, loDigits, max_bits, negate)
+ loM10 = (current - '0').toLong
+ loDigits = 1
+ e10 -= 1
+ }
+ }
+ }
+ if (
+ loDigits != 0 && ((current | 0x20) != 'e' || {
+ current = in.readChar().toInt
+ val negateExp = current == '-'
+ if (negateExp || current == '+') current = in.readChar().toInt
+ (current >= '0' && current <= '9') && {
+ var exp = '0' - current
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ if (
+ exp < -214748364 || {
+ exp = exp * 10 + ('0' - current)
+ exp > 0
+ }
+ ) throw UnsafeNumber
+ }
+ negateExp && {
+ e10 += exp
+ e10 <= 0
+ } || !negateExp && {
+ e10 -= exp
+ exp != -2147483648
+ }
+ }
+ }) && (!consume || current == -1)
+ ) {
+ if (hiM10 eq null) {
+ if (negate) loM10 = -loM10
+ return java.math.BigDecimal.valueOf(loM10, -e10)
+ }
+ return toBigDecimal(hiM10, loM10, loDigits, e10, max_bits, negate)
+ }
+ throw UnsafeNumber
+ }
+
+ @noinline private[this] def toBigDecimal(
+ hi: java.math.BigDecimal,
+ lo: Long,
+ loDigits: Int,
+ max_bits: Int,
+ negate: Boolean
+ ): java.math.BigDecimal = {
+ var loM10 = lo
+ if (negate) loM10 = -loM10
+ var hiM10 = java.math.BigDecimal.valueOf(loM10)
+ if (hi eq null) return hiM10
+ hiM10 = hi.scaleByPowerOfTen(loDigits).add(hiM10)
+ if (hiM10.unscaledValue.bitLength < max_bits) return hiM10
+ throw UnsafeNumber
+ }
+
+ @noinline private[this] def toBigDecimal(
+ hi: java.math.BigDecimal,
+ lo: Long,
+ loDigits: Int,
+ e10: Int,
+ max_bits: Int,
+ negate: Boolean
+ ): java.math.BigDecimal = {
+ var loM10 = lo
+ if (negate) loM10 = -loM10
+ var hiM10 = java.math.BigDecimal.valueOf(loM10, -e10)
+ if (hi eq null) return hiM10
+ val n = loDigits.toLong + e10
+ if (
+ n.toInt == n && {
+ val scale = hi.scale - n
+ scale.toInt == scale
+ } && {
+ hiM10 = hi.scaleByPowerOfTen(n.toInt).add(hiM10)
+ hiM10.unscaledValue.bitLength < max_bits
+ }
+ ) return hiM10
+ throw UnsafeNumber
+ }
+
+ def float(num: String, max_bits: Int): Float =
+ float_(new FastStringReader(num), true, max_bits)
+
+ def float_(in: OneCharReader, consume: Boolean, max_bits: Int): Float = {
+ var current =
+ if (consume) in.readChar().toInt
+ else in.nextNonWhitespace().toInt
+ val negate = current == '-'
+ if (negate) current = in.readChar().toInt
+ else if (current == 'N') {
+ readAll(in, "aN", consume)
+ return Float.NaN
+ }
+ if (current == 'I' || current == '+') {
+ if (current == '+' && in.readChar() != 'I') throw UnsafeNumber
+ readAll(in, "nfinity", consume)
+ return if (negate) Float.NegativeInfinity else Float.PositiveInfinity
+ }
+ var loM10 = 0L
+ var loDigits = 0
+ var hiM10: java.math.BigDecimal = null
+ if (current >= '0' && current <= '9') {
+ loM10 = (current - '0').toLong
+ loDigits = 1
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ if (loM10 < 922337203685477580L) {
+ loM10 = (loM10 << 3) + (loM10 << 1) + (current - '0')
+ loDigits += 1
+ } else {
+ hiM10 = toBigDecimal(hiM10, loM10, loDigits, max_bits, negate)
+ loM10 = (current - '0').toLong
+ loDigits = 1
+ }
+ }
+ }
+ var e10 = 0
+ if (current == '.') {
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ if (loM10 < 922337203685477580L) {
+ loM10 = (loM10 << 3) + (loM10 << 1) + (current - '0')
+ loDigits += 1
+ e10 -= 1
+ } else {
+ hiM10 = toBigDecimal(hiM10, loM10, loDigits, max_bits, negate)
+ loM10 = (current - '0').toLong
+ loDigits = 1
+ e10 -= 1
+ }
+ }
+ }
+ if (
+ loDigits != 0 && ((current | 0x20) != 'e' || {
+ current = in.readChar().toInt
+ val negateExp = current == '-'
+ if (negateExp || current == '+') current = in.readChar().toInt
+ (current >= '0' && current <= '9') && {
+ var exp = '0' - current
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ if (
+ exp < -214748364 || {
+ exp = exp * 10 + ('0' - current)
+ exp > 0
+ }
+ ) throw UnsafeNumber
+ }
+ negateExp && {
+ e10 += exp
+ e10 <= 0
+ } || !negateExp && {
+ e10 -= exp
+ exp != -2147483648
+ }
+ }
+ }) && (!consume || current == -1)
+ ) {
+ if (hiM10 eq null) {
+ var x =
+ if (e10 == 0) loM10.toFloat
+ else {
+ if (loM10 < 4294967296L && e10 >= loDigits - 23 && e10 <= 19 - loDigits) {
+ val pow10 = pow10Doubles
+ (if (e10 < 0) loM10 / pow10(-e10)
+ else loM10 * pow10(e10)).toFloat
+ } else toFloat(loM10, e10)
+ }
+ if (negate) x = -x
+ return x
+ }
+ return toBigDecimal(hiM10, loM10, loDigits, e10, max_bits, negate).floatValue
+ }
+ throw UnsafeNumber
+ }
+
+ // Based on the 'Moderate Path' algorithm from the awesome library of Alexander Huszagh: https://github.com/Alexhuszagh/rust-lexical
+ // Here is his inspiring post: https://www.reddit.com/r/rust/comments/a6j5j1/making_rust_float_parsing_fast_and_correct
+ @noinline private[this] def toFloat(m10: Long, e10: Int): Float =
+ if (m10 == 0 || e10 < -64) 0.0f
+ else if (e10 >= 39) Float.PositiveInfinity
+ else {
+ var shift = java.lang.Long.numberOfLeadingZeros(m10)
+ var m2 = unsignedMultiplyHigh(pow10Mantissas(e10 + 343), m10 << shift)
+ var e2 = (e10 * 108853 >> 15) - shift + 1 // (e10 * Math.log(10) / Math.log(2)).toInt - shift + 1
+ shift = java.lang.Long.numberOfLeadingZeros(m2)
+ m2 <<= shift
+ e2 -= shift
+ val truncatedBitNum = Math.max(-149 - e2, 40)
+ val savedBitNum = 64 - truncatedBitNum
+ val mask = -1L >>> Math.max(savedBitNum, 0)
+ val halfwayDiff = (m2 & mask) - (mask >>> 1)
+ if (Math.abs(halfwayDiff) > 1 || savedBitNum <= 0) java.lang.Float.intBitsToFloat {
+ var mf = 0
+ if (savedBitNum > 0) mf = (m2 >>> truncatedBitNum).toInt
+ e2 += truncatedBitNum
+ if (savedBitNum >= 0 && halfwayDiff > 0) {
+ if (mf == 0xffffff) {
+ mf = 0x800000
+ e2 += 1
+ } else mf += 1
+ }
+ if (e2 == -149) mf
+ else if (e2 >= 105) 0x7f800000
+ else e2 + 150 << 23 | mf & 0x7fffff
+ }
+ else java.math.BigDecimal.valueOf(m10, -e10).floatValue
+ }
+
+ def double(num: String, max_bits: Int): Double =
+ double_(new FastStringReader(num), true, max_bits)
+
+ def double_(in: OneCharReader, consume: Boolean, max_bits: Int): Double = {
+ var current =
+ if (consume) in.readChar().toInt
+ else in.nextNonWhitespace().toInt
+ val negate = current == '-'
+ if (negate) current = in.readChar().toInt
+ else if (current == 'N') {
+ readAll(in, "aN", consume)
+ return Double.NaN
+ }
+ if (current == 'I' || current == '+') {
+ if (current == '+' && in.readChar() != 'I') throw UnsafeNumber
+ readAll(in, "nfinity", consume)
+ return if (negate) Double.NegativeInfinity else Double.PositiveInfinity
+ }
+ var loM10 = 0L
+ var loDigits = 0
+ var hiM10: java.math.BigDecimal = null
+ if (current >= '0' && current <= '9') {
+ loM10 = (current - '0').toLong
+ loDigits = 1
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ if (loM10 < 922337203685477580L) {
+ loM10 = (loM10 << 3) + (loM10 << 1) + (current - '0')
+ loDigits += 1
+ } else {
+ hiM10 = toBigDecimal(hiM10, loM10, loDigits, 0, max_bits, negate)
+ loM10 = (current - '0').toLong
+ loDigits = 1
+ }
+ }
+ }
+ var e10 = 0
+ if (current == '.') {
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ if (loM10 < 922337203685477580L) {
+ loM10 = (loM10 << 3) + (loM10 << 1) + (current - '0')
+ loDigits += 1
+ e10 -= 1
+ } else {
+ hiM10 = toBigDecimal(hiM10, loM10, loDigits, 0, max_bits, negate)
+ loM10 = (current - '0').toLong
+ loDigits = 1
+ e10 -= 1
+ }
+ }
+ }
+ if (
+ loDigits != 0 && ((current | 0x20) != 'e' || {
+ current = in.readChar().toInt
+ val negateExp = current == '-'
+ if (negateExp || current == '+') current = in.readChar().toInt
+ (current >= '0' && current <= '9') && {
+ var exp = '0' - current
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ if (
+ exp < -214748364 || {
+ exp = exp * 10 + ('0' - current)
+ exp > 0
+ }
+ ) throw UnsafeNumber
+ }
+ negateExp && {
+ e10 += exp
+ e10 <= 0
+ } || !negateExp && {
+ e10 -= exp
+ exp != -2147483648
+ }
+ }
+ }) && (!consume || current == -1)
+ ) {
+ if (hiM10 eq null) {
+ var x =
+ if (e10 == 0) loM10.toDouble
+ else {
+ if (loM10 < 4503599627370496L && e10 >= -22 && e10 <= 38 - loDigits) {
+ val pow10 = pow10Doubles
+ if (e10 < 0) loM10 / pow10(-e10)
+ else if (e10 <= 22) loM10 * pow10(e10)
+ else {
+ val slop = 16 - loDigits
+ (loM10 * pow10(slop)) * pow10(e10 - slop)
+ }
+ } else toDouble(loM10, e10)
+ }
+ if (negate) x = -x
+ return x
+ }
+ return toBigDecimal(hiM10, loM10, loDigits, e10, max_bits, negate).doubleValue
+ }
+ throw UnsafeNumber
+ }
+
+ // Based on the 'Moderate Path' algorithm from the awesome library of Alexander Huszagh: https://github.com/Alexhuszagh/rust-lexical
+ // Here is his inspiring post: https://www.reddit.com/r/rust/comments/a6j5j1/making_rust_float_parsing_fast_and_correct
+ @inline private[this] def toDouble(m10: Long, e10: Int): Double =
+ if (m10 == 0 || e10 < -343) 0.0
+ else if (e10 >= 310) Double.PositiveInfinity
+ else {
+ var shift = java.lang.Long.numberOfLeadingZeros(m10)
+ var m2 = unsignedMultiplyHigh(pow10Mantissas(e10 + 343), m10 << shift)
+ var e2 = (e10 * 108853 >> 15) - shift + 1 // (e10 * Math.log(10) / Math.log(2)).toInt - shift + 1
+ shift = java.lang.Long.numberOfLeadingZeros(m2)
+ m2 <<= shift
+ e2 -= shift
+ val truncatedBitNum = Math.max(-1074 - e2, 11)
+ val savedBitNum = 64 - truncatedBitNum
+ val mask = -1L >>> Math.max(savedBitNum, 0)
+ val halfwayDiff = (m2 & mask) - (mask >>> 1)
+ if (Math.abs(halfwayDiff) > 1 || savedBitNum <= 0) java.lang.Double.longBitsToDouble {
+ if (savedBitNum <= 0) m2 = 0
+ m2 >>>= truncatedBitNum
+ e2 += truncatedBitNum
+ if (savedBitNum >= 0 && halfwayDiff > 0) {
+ if (m2 == 0x1fffffffffffffL) {
+ m2 = 0x10000000000000L
+ e2 += 1
+ } else m2 += 1
+ }
+ if (e2 == -1074) m2
+ else if (e2 >= 972) 0x7ff0000000000000L
+ else (e2 + 1075).toLong << 52 | m2 & 0xfffffffffffffL
+ }
+ else java.math.BigDecimal.valueOf(m10, -e10).doubleValue
+ }
+
+ @noinline private[this] def readAll(in: OneCharReader, s: String, consume: Boolean): Unit = {
+ val len = s.length
+ var i = 0
+ while (i < len) {
+ if (in.readChar() != s.charAt(i)) throw UnsafeNumber
+ i += 1
+ }
+ val current = in.read() // to be consistent read the terminator
+ if (consume && current != -1 || !consume && current != '"') throw UnsafeNumber
+ }
+
+ // 64-bit unsigned multiplication was adopted from the great Hacker's Delight function
+ // (Henry S. Warren, Hacker's Delight, Addison-Wesley, 2nd edition, Fig. 8.2)
+ // https://doc.lagout.org/security/Hackers%20Delight.pdf
+ @inline private[this] def unsignedMultiplyHigh(x: Long, y: Long): Long = {
+ val xl = x & 0xffffffffL
+ val xh = x >>> 32
+ val yl = y & 0xffffffffL
+ val yh = y >>> 32
+ val t = xh * yl + (xl * yl >>> 32)
+ xh * yh + (t >>> 32) + (xl * yh + (t & 0xffffffffL) >>> 32)
+ }
+
+ private[this] final val pow10Doubles: Array[Double] =
+ Array(1, 1e+1, 1e+2, 1e+3, 1e+4, 1e+5, 1e+6, 1e+7, 1e+8, 1e+9, 1e+10, 1e+11, 1e+12, 1e+13, 1e+14, 1e+15, 1e+16,
+ 1e+17, 1e+18, 1e+19, 1e+20, 1e+21, 1e+22)
+
+ private[this] final val pow10Mantissas: Array[Long] = Array(
+ -4671960508600951122L, -1228264617323800998L, -7685194413468457480L, -4994806998408183946L, -1631822729582842029L,
+ -7937418233630358124L, -5310086773610559751L, -2025922448585811785L, -8183730558007214222L, -5617977179081629873L,
+ -2410785455424649437L, -8424269937281487754L, -5918651403174471789L, -2786628235540701832L, -8659171674854020501L,
+ -6212278575140137722L, -3153662200497784248L, -8888567902952197011L, -6499023860262858360L, -3512093806901185046L,
+ -9112587656954322510L, -6779048552765515233L, -3862124672529506138L, -215969822234494768L, -7052510166537641086L,
+ -4203951689744663454L, -643253593753441413L, -7319562523736982739L, -4537767136243840520L, -1060522901877412746L,
+ -7580355841314464822L, -4863758783215693124L, -1468012460592228501L, -7835036815511224669L, -5182110000961642932L,
+ -1865951482774665761L, -8083748704375247957L, -5492999862041672042L, -2254563809124702148L, -8326631408344020699L,
+ -5796603242002637969L, -2634068034075909558L, -8563821548938525330L, -6093090917745768758L, -3004677628754823043L,
+ -8795452545612846258L, -6382629663588669919L, -3366601061058449494L, -9021654690802612790L, -6665382345075878084L,
+ -3720041912917459700L, -38366372719436721L, -6941508010590729807L, -4065198994811024355L, -469812725086392539L,
+ -7211161980820077193L, -4402266457597708587L, -891147053569747830L, -7474495936122174250L, -4731433901725329908L,
+ -1302606358729274481L, -7731658001846878407L, -5052886483881210105L, -1704422086424124727L, -7982792831656159810L,
+ -5366805021142811859L, -2096820258001126919L, -8228041688891786181L, -5673366092687344822L, -2480021597431793123L,
+ -8467542526035952558L, -5972742139117552794L, -2854241655469553088L, -8701430062309552536L, -6265101559459552766L,
+ -3219690930897053053L, -8929835859451740015L, -6550608805887287114L, -3576574988931720989L, -9152888395723407474L,
+ -6829424476226871438L, -3925094576856201394L, -294682202642863838L, -7101705404292871755L, -4265445736938701790L,
+ -720121152745989333L, -7367604748107325189L, -4597819916706768583L, -1135588877456072824L, -7627272076051127371L,
+ -4922404076636521310L, -1541319077368263733L, -7880853450996246689L, -5239380795317920458L, -1937539975720012668L,
+ -8128491512466089774L, -5548928372155224313L, -2324474446766642487L, -8370325556870233411L, -5851220927660403859L,
+ -2702340141148116920L, -8606491615858654931L, -6146428501395930760L, -3071349608317525546L, -8837122532839535322L,
+ -6434717147622031249L, -3431710416100151157L, -9062348037703676329L, -6716249028702207507L, -3783625267450371480L,
+ -117845565885576446L, -6991182506319567135L, -4127292114472071014L, -547429124662700864L, -7259672230555269896L,
+ -4462904269766699466L, -966944318780986428L, -7521869226879198374L, -4790650515171610063L, -1376627125537124675L,
+ -7777920981101784778L, -5110715207949843068L, -1776707991509915931L, -8027971522334779313L, -5423278384491086237L,
+ -2167411962186469893L, -8272161504007625539L, -5728515861582144020L, -2548958808550292121L, -8510628282985014432L,
+ -6026599335303880135L, -2921563150702462265L, -8743505996830120772L, -6317696477610263061L, -3285434578585440922L,
+ -8970925639256982432L, -6601971030643840136L, -3640777769877412266L, -9193015133814464522L, -6879582898840692749L,
+ -3987792605123478032L, -373054737976959636L, -7150688238876681629L, -4326674280168464132L, -796656831783192261L,
+ -7415439547505577019L, -4657613415954583370L, -1210330751515841308L, -7673985747338482674L, -4980796165745715438L,
+ -1614309188754756393L, -7926472270612804602L, -5296404319838617848L, -2008819381370884406L, -8173041140997884610L,
+ -5604615407819967859L, -2394083241347571919L, -8413831053483314306L, -5905602798426754978L, -2770317479606055818L,
+ -8648977452394866743L, -6199535797066195524L, -3137733727905356501L, -8878612607581929669L, -6486579741050024183L,
+ -3496538657885142324L, -9102865688819295809L, -6766896092596731857L, -3846934097318526917L, -196981603220770742L,
+ -7040642529654063570L, -4189117143640191558L, -624710411122851544L, -7307973034592864071L, -4523280274813692185L,
+ -1042414325089727327L, -7569037980822161435L, -4849611457600313890L, -1450328303573004458L, -7823984217374209643L,
+ -5168294253290374149L, -1848681798185579782L, -8072955151507069220L, -5479507920956448621L, -2237698882768172872L,
+ -8316090829371189901L, -5783427518286599473L, -2617598379430861437L, -8553528014785370254L, -6080224000054324913L,
+ -2988593981640518238L, -8785400266166405755L, -6370064314280619289L, -3350894374423386208L, -9011838011655698236L,
+ -6653111496142234891L, -3704703351750405709L, -19193171260619233L, -6929524759678968877L, -4050219931171323192L,
+ -451088895536766085L, -7199459587351560659L, -4387638465762062920L, -872862063775190746L, -7463067817500576073L,
+ -4717148753448332187L, -1284749923383027329L, -7720497729755473937L, -5038936143766954517L, -1686984161281305242L,
+ -7971894128441897632L, -5353181642124984136L, -2079791034228842266L, -8217398424034108273L, -5660062011615247437L,
+ -2463391496091671392L, -8457148712698376476L, -5959749872445582691L, -2838001322129590460L, -8691279853972075893L,
+ -6252413799037706963L, -3203831230369745799L, -8919923546622172981L, -6538218414850328322L, -3561087000135522498L,
+ -9143208402725783417L, -6817324484979841368L, -3909969587797413806L, -275775966319379353L, -7089889006590693952L,
+ -4250675239810979535L, -701658031336336515L, -7356065297226292178L, -4583395603105477319L, -1117558485454458744L,
+ -7616003081050118571L, -4908317832885260310L, -1523711272679187483L, -7869848573065574033L, -5225624697904579637L,
+ -1920344853953336643L, -8117744561361917258L, -5535494683275008668L, -2307682335666372931L, -8359830487432564938L,
+ -5838102090863318269L, -2685941595151759932L, -8596242524610931813L, -6133617137336276863L, -3055335403242958174L,
+ -8827113654667930715L, -6422206049907525490L, -3416071543957018958L, -9052573742614218705L, -6704031159840385477L,
+ -3768352931373093942L, -98755145788979524L, -6979250993759194058L, -4112377723771604669L, -528786136287117932L,
+ -7248020362820530564L, -4448339435098275301L, -948738275445456222L, -7510490449794491995L, -4776427043815727089L,
+ -1358847786342270957L, -7766808894105001205L, -5096825099203863602L, -1759345355577441598L, -8017119874876982855L,
+ -5409713825168840664L, -2150456263033662926L, -8261564192037121185L, -5715269221619013577L, -2532400508596379068L,
+ -8500279345513818773L, -6013663163464885563L, -2905392935903719049L, -8733399612580906262L, -6305063497298744923L,
+ -3269643353196043250L, -8961056123388608887L, -6589634135808373205L, -3625356651333078602L, -9183376934724255983L,
+ -6867535149977932074L, -3972732919045027189L, -354230130378896082L, -7138922859127891907L, -4311967555482476980L,
+ -778273425925708321L, -7403949918844649557L, -4643251380128424042L, -1192378206733142148L, -7662765406849295699L,
+ -4966770740134231719L, -1596777406740401745L, -7915514906853832947L, -5282707615139903279L, -1991698500497491195L,
+ -8162340590452013853L, -5591239719637629412L, -2377363631119648861L, -8403381297090862394L, -5892540602936190089L,
+ -2753989735242849707L, -8638772612167862923L, -6186779746782440750L, -3121788665050663033L, -8868646943297746252L,
+ -6474122660694794911L, -3480967307441105734L, -9093133594791772940L, -6754730975062328271L, -3831727700400522434L,
+ -177973607073265139L, -7028762532061872568L, -4174267146649952806L, -606147914885053103L, -7296371474444240046L,
+ -4508778324627912153L, -1024286887357502287L, -7557708332239520786L, -4835449396872013078L, -1432625727662628443L,
+ -7812920107430224633L, -5154464115860392887L, -1831394126398103205L, -8062150356639896359L, -5466001927372482545L,
+ -2220816390788215277L, -8305539271883716405L, -5770238071427257602L, -2601111570856684098L, -8543223759426509417L,
+ -6067343680855748868L, -2972493582642298180L, -8775337516792518219L, -6357485877563259869L, -3335171328526686933L,
+ -9002011107970261189L, -6640827866535438582L, -3689348814741910324L, -9223372036854775808L, -6917529027641081856L,
+ -4035225266123964416L, -432345564227567616L, -7187745005283311616L, -4372995238176751616L, -854558029293551616L,
+ -7451627795949551616L, -4702848726509551616L, -1266874889709551616L, -7709325833709551616L, -5024971273709551616L,
+ -1669528073709551616L, -7960984073709551616L, -5339544073709551616L, -2062744073709551616L, -8206744073709551616L,
+ -5646744073709551616L, -2446744073709551616L, -8446744073709551616L, -5946744073709551616L, -2821744073709551616L,
+ -8681119073709551616L, -6239712823709551616L, -3187955011209551616L, -8910000909647051616L, -6525815118631426616L,
+ -3545582879861895366L, -9133518327554766460L, -6805211891016070171L, -3894828845342699810L, -256850038250986858L,
+ -7078060301547948643L, -4235889358507547899L, -683175679707046970L, -7344513827457986212L, -4568956265895094861L,
+ -1099509313941480672L, -7604722348854507276L, -4894216917640746191L, -1506085128623544835L, -7858832233030797378L,
+ -5211854272861108819L, -1903131822648998119L, -8106986416796705681L, -5522047002568494197L, -2290872734783229842L,
+ -8349324486880600507L, -5824969590173362730L, -2669525969289315508L, -8585982758446904049L, -6120792429631242157L,
+ -3039304518611664792L, -8817094351773372351L, -6409681921289327535L, -3400416383184271515L, -9042789267131251553L,
+ -6691800565486676537L, -3753064688430957767L, -79644842111309304L, -6967307053960650171L, -4097447799023424810L,
+ -510123730351893109L, -7236356359111015049L, -4433759430461380907L, -930513269649338230L, -7499099821171918250L,
+ -4762188758037509908L, -1341049929119499481L, -7755685233340769032L, -5082920523248573386L, -1741964635633328828L,
+ -8006256924911912374L, -5396135137712502563L, -2133482903713240300L, -8250955842461857044L, -5702008784649933400L,
+ -2515824962385028846L, -8489919629131724885L, -6000713517987268202L, -2889205879056697349L, -8723282702051517699L,
+ -6292417359137009220L, -3253835680493873621L, -8951176327949752869L, -6577284391509803182L, -3609919470959866074L,
+ -9173728696990998152L, -6855474852811359786L, -3957657547586811828L, -335385916056126881L, -7127145225176161157L,
+ -4297245513042813542L, -759870872876129024L, -7392448323188662496L, -4628874385558440216L, -1174406963520662366L,
+ -7651533379841495835L, -4952730706374481889L, -1579227364540714458L, -7904546130479028392L, -5268996644671397586L,
+ -1974559787411859078L, -8151628894773493780L, -5577850100039479321L, -2360626606621961247L, -8392920656779807636L,
+ -5879464802547371641L, -2737644984756826647L, -8628557143114098510L, -6174010410465235234L, -3105826994654156138L,
+ -8858670899299929442L, -6461652605697523899L, -3465379738694516970L, -9083391364325154962L, -6742553186979055799L,
+ -3816505465296431844L, -158945813193151901L, -7016870160886801794L, -4159401682681114339L, -587566084924005019L,
+ -7284757830718584993L, -4494261269970843337L, -1006140569036166268L, -7546366883288685774L, -4821272585683469313L,
+ -1414904713676948737L, -7801844473689174817L, -5140619573684080617L, -1814088448677712867L, -8051334308064652398L,
+ -5452481866653427593L, -2203916314889396588L, -8294976724446954723L, -5757034887131305500L, -2584607590486743971L,
+ -8532908771695296838L, -6054449946191733143L, -2956376414312278525L, -8765264286586255934L, -6344894339805432014L,
+ -3319431906329402113L, -8992173969096958177L, -6628531442943809817L, -3673978285252374367L, -9213765455923815836L,
+ -6905520801477381891L, -4020214983419339459L, -413582710846786420L, -7176018221920323369L, -4358336758973016307L,
+ -836234930288882479L, -7440175859071633406L, -4688533805412153853L, -1248981238337804412L, -7698142301602209614L,
+ -5010991858575374113L, -1652053804791829737L, -7950062655635975442L, -5325892301117581398L, -2045679357969588844L,
+ -8196078626372074883L, -5633412264537705700L, -2430079312244744221L, -8436328597794046994L, -5933724728815170839L,
+ -2805469892591575644L, -8670947710510816634L, -6226998619711132888L, -3172062256211528206L, -8900067937773286985L,
+ -6513398903789220827L, -3530062611309138130L, -9123818159709293187L, -6793086681209228580L, -3879672333084147821L,
+ -237904397927796872L, -7066219276345954901L, -4221088077005055722L, -664674077828931749L, -7332950326284164199L,
+ -4554501889427817345L, -1081441343357383777L, -7593429867239446717L, -4880101315621920492L, -1488440626100012711L,
+ -7847804418953589800L, -5198069505264599346L, -1885900863153361279L, -8096217067111932656L, -5508585315462527915L,
+ -2274045625900771990L, -8338807543829064350L, -5811823411358942533L, -2653093245771290262L, -8575712306248138270L,
+ -6107954364382784934L, -3023256937051093263L, -8807064613298015146L, -6397144748195131028L, -3384744916816525881L,
+ -9032994600651410532L, -6679557232386875260L, -3737760522056206171L, -60514634142869810L, -6955350673980375487L,
+ -4082502324048081455L, -491441886632713915L, -7224680206786528053L, -4419164240055772162L, -912269281642327298L,
+ -7487697328667536418L, -4747935642407032618L, -1323233534581402868L, -7744549986754458649L, -5069001465015685407L,
+ -1724565812842218855L, -7995382660667468640L, -5382542307406947896L, -2116491865831296966L, -8240336443785642460L,
+ -5688734536304665171L, -2499232151953443560L, -8479549122611984081L, -5987750384837592197L, -2873001962619602342L,
+ -8713155254278333320L, -6279758049420528746L, -3238011543348273028L, -8941286242233752499L, -6564921784364802720L,
+ -3594466212028615495L, -9164070410158966541L, -6843401994271320272L, -3942566474411762436L, -316522074587315140L,
+ -7115355324258153819L, -4282508136895304370L, -741449152691742558L, -7380934748073420955L, -4614482416664388289L,
+ -1156417002403097458L, -7640289654143017767L, -4938676049251384305L, -1561659043136842477L, -7893565929601608404L,
+ -5255271393574622601L, -1957403223540890347L, -8140906042354138323L, -5564446534515285000L, -2343872149716718346L,
+ -8382449121214030822L, -5866375383090150624L, -2721283210435300376L, -8618331034163144591L, -6161227774276542835L,
+ -3089848699418290639L, -8848684464777513506L, -6449169562544503978L, -3449775934753242068L, -9073638986861858149L,
+ -6730362715149934782L, -3801267375510030573L, -139898200960150313L, -7004965403241175802L, -4144520735624081848L,
+ -568964901102714406L, -7273132090830278360L, -4479729095110460046L, -987975350460687153L, -7535013621679011327L,
+ -4807081008671376254L, -1397165242411832414L, -7790757304148477115L, -5126760611758208489L, -1796764746270372707L,
+ -8040506994060064798L, -5438947724147693094L, -2186998636757228463L, -8284403175614349646L, -5743817951090549153L,
+ -2568086420435798537L, -8522583040413455942L, -6041542782089432023L, -2940242459184402125L, -8755180564631333184L,
+ -6332289687361778576L, -3303676090774835316L, -8982326584375353929L, -6616222212041804507L, -3658591746624867729L,
+ -9204148869281624187L, -6893500068174642330L, -4005189066790915008L, -394800315061255856L, -7164279224554366766L,
+ -4343663012265570553L, -817892746904575288L, -7428711994456441411L, -4674203974643163860L, -1231068949876566920L,
+ -7686947121313936181L, -4996997883215032323L, -1634561335591402499L, -7939129862385708418L, -5312226309554747619L,
+ -2028596868516046619L, -8185402070463610993L, -5620066569652125837L
+ )
+}
diff --git a/zio-json/jvm-native/src/main/scala/zio/json/internal/FastStringWrite.scala b/zio-json/jvm-native/src/main/scala/zio/json/internal/FastStringWrite.scala
new file mode 100644
index 000000000..ef8cc9b31
--- /dev/null
+++ b/zio-json/jvm-native/src/main/scala/zio/json/internal/FastStringWrite.scala
@@ -0,0 +1,169 @@
+package zio.json.internal
+
+import java.nio.CharBuffer
+import java.util.Arrays
+
+final class FastStringWrite(initial: Int) extends Write {
+ require(initial >= 8)
+ private[this] var chars: Array[Char] = new Array[Char](initial)
+ private[this] var count: Int = 0
+
+ @inline def reset(): Unit = count = 0
+
+ def write(s: String): Unit = {
+ val l = s.length
+ var cs = chars
+ val i = count
+ if (i + l >= cs.length) {
+ cs = Arrays.copyOf(cs, Math.max(cs.length << 1, i + l))
+ chars = cs
+ }
+ s.getChars(0, l, cs, i)
+ count = i + l
+ }
+
+ def write(c: Char): Unit = {
+ var cs = chars
+ val i = count
+ if (i + 1 >= cs.length) {
+ cs = Arrays.copyOf(cs, cs.length << 1)
+ chars = cs
+ }
+ cs(i) = c
+ count = i + 1
+ }
+
+ override def write(cs: Array[Char], from: Int, to: Int): Unit = {
+ var cs_ = chars
+ val from_ = count
+ val len = to - from
+ if (from_ + len >= cs_.length) {
+ cs_ = Arrays.copyOf(cs_, Math.max(cs_.length << 1, from_ + len))
+ chars = cs_
+ }
+ var i = 0
+ while (i < len) {
+ cs_(from_ + i) = cs(from + i)
+ i += 1
+ }
+ count = from_ + len
+ }
+
+ override def write(c1: Char, c2: Char): Unit = {
+ var cs = chars
+ val i = count
+ if (i + 1 >= cs.length) {
+ cs = Arrays.copyOf(cs, cs.length << 1)
+ chars = cs
+ }
+ cs(i) = c1
+ cs(i + 1) = c2
+ count = i + 2
+ }
+
+ override def write(c1: Char, c2: Char, c3: Char): Unit = {
+ var cs = chars
+ val i = count
+ if (i + 2 >= cs.length) {
+ cs = Arrays.copyOf(cs, cs.length << 1)
+ chars = cs
+ }
+ cs(i) = c1
+ cs(i + 1) = c2
+ cs(i + 2) = c3
+ count = i + 3
+ }
+
+ override def write(c1: Char, c2: Char, c3: Char, c4: Char): Unit = {
+ var cs = chars
+ val i = count
+ if (i + 3 >= cs.length) {
+ cs = Arrays.copyOf(cs, cs.length << 1)
+ chars = cs
+ }
+ cs(i) = c1
+ cs(i + 1) = c2
+ cs(i + 2) = c3
+ cs(i + 3) = c4
+ count = i + 4
+ }
+
+ override def write(c1: Char, c2: Char, c3: Char, c4: Char, c5: Char): Unit = {
+ var cs = chars
+ val i = count
+ if (i + 4 >= cs.length) {
+ cs = Arrays.copyOf(cs, cs.length << 1)
+ chars = cs
+ }
+ cs(i) = c1
+ cs(i + 1) = c2
+ cs(i + 2) = c3
+ cs(i + 3) = c4
+ cs(i + 4) = c5
+ count = i + 5
+ }
+
+ override def write(s: Short): Unit = {
+ var cs = chars
+ val i = count
+ if (i + 1 >= cs.length) {
+ cs = Arrays.copyOf(cs, cs.length << 1)
+ chars = cs
+ }
+ cs(i) = (s & 0xff).toChar
+ cs(i + 1) = (s >> 8).toChar
+ count = i + 2
+ }
+
+ override def write(s1: Short, s2: Short): Unit = {
+ var cs = chars
+ val i = count
+ if (i + 3 >= cs.length) {
+ cs = Arrays.copyOf(cs, cs.length << 1)
+ chars = cs
+ }
+ cs(i) = (s1 & 0xff).toChar
+ cs(i + 1) = (s1 >> 8).toChar
+ cs(i + 2) = (s2 & 0xff).toChar
+ cs(i + 3) = (s2 >> 8).toChar
+ count = i + 4
+ }
+
+ override def write(s1: Short, s2: Short, s3: Short): Unit = {
+ var cs = chars
+ val i = count
+ if (i + 5 >= cs.length) {
+ cs = Arrays.copyOf(cs, cs.length << 1)
+ chars = cs
+ }
+ cs(i) = (s1 & 0xff).toChar
+ cs(i + 1) = (s1 >> 8).toChar
+ cs(i + 2) = (s2 & 0xff).toChar
+ cs(i + 3) = (s2 >> 8).toChar
+ cs(i + 4) = (s3 & 0xff).toChar
+ cs(i + 5) = (s3 >> 8).toChar
+ count = i + 6
+ }
+
+ override def write(s1: Short, s2: Short, s3: Short, s4: Short): Unit = {
+ var cs = chars
+ val i = count
+ if (i + 7 >= cs.length) {
+ cs = Arrays.copyOf(cs, cs.length << 1)
+ chars = cs
+ }
+ cs(i) = (s1 & 0xff).toChar
+ cs(i + 1) = (s1 >> 8).toChar
+ cs(i + 2) = (s2 & 0xff).toChar
+ cs(i + 3) = (s2 >> 8).toChar
+ cs(i + 4) = (s3 & 0xff).toChar
+ cs(i + 5) = (s3 >> 8).toChar
+ cs(i + 6) = (s4 & 0xff).toChar
+ cs(i + 7) = (s4 >> 8).toChar
+ count = i + 8
+ }
+
+ def buffer: CharSequence = CharBuffer.wrap(chars, 0, count)
+
+ override def toString: String = new String(chars, 0, count)
+}
diff --git a/zio-json/jvm-native/src/main/scala/zio/json/internal/SafeNumbers.scala b/zio-json/jvm-native/src/main/scala/zio/json/internal/SafeNumbers.scala
new file mode 100644
index 000000000..cbc4bb1c6
--- /dev/null
+++ b/zio-json/jvm-native/src/main/scala/zio/json/internal/SafeNumbers.scala
@@ -0,0 +1,1055 @@
+/*
+ * Copyright 2019-2022 John A. De Goes and the ZIO Contributors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package zio.json.internal
+
+import java.util.UUID
+
+/**
+ * Total, fast, number parsing.
+ *
+ * The Java and Scala standard libraries throw exceptions when we attempt to parse an invalid number. Unfortunately,
+ * exceptions are very expensive, and untrusted data can be maliciously constructed to DOS a server.
+ *
+ * This suite of functions mitigates against such attacks by building up the numbers one character at a time, which has
+ * been shown through extensive benchmarking to be orders of magnitude faster than exception-throwing stdlib parsers,
+ * for valid and invalid inputs. This approach, proposed by alexknvl, was also benchmarked against regexp-based
+ * pre-validation.
+ *
+ * Note that although the behaviour is identical to the Java stdlib when given the canonical form of a primitive (i.e.
+ * the .toString) of a number there may be differences in behaviour for non-canonical forms. e.g. the Java stdlib may
+ * reject "1.0" when parsed as an `BigInteger` but we may parse it as a `1`, although "1.1" would be rejected. Parsing
+ * of `BigDecimal` preserves the trailing zeros on the right but not on the left, e.g. "000.00001000" will be
+ * "1.000e-5", which is useful in cases where the trailing zeros denote measurement accuracy.
+ *
+ * `BigInteger`, `BigDecimal`, `Float` and `Double` have a configurable bit limit on the size of the significand, to
+ * avoid OOM style attacks, which is 256 bits by default.
+ *
+ * Results are contained in a specialisation of Option that avoids boxing.
+ */
+object SafeNumbers {
+ import UnsafeNumbers.UnsafeNumber
+
+ def byte(num: String): ByteOption =
+ try ByteSome(UnsafeNumbers.byte(num))
+ catch { case _: UnexpectedEnd | UnsafeNumber => ByteNone }
+
+ def short(num: String): ShortOption =
+ try ShortSome(UnsafeNumbers.short(num))
+ catch { case _: UnexpectedEnd | UnsafeNumber => ShortNone }
+
+ def int(num: String): IntOption =
+ try IntSome(UnsafeNumbers.int(num))
+ catch { case _: UnexpectedEnd | UnsafeNumber => IntNone }
+
+ def long(num: String): LongOption =
+ try LongSome(UnsafeNumbers.long(num))
+ catch { case _: UnexpectedEnd | UnsafeNumber => LongNone }
+
+ def bigInteger(num: String, max_bits: Int = 256): Option[java.math.BigInteger] =
+ try Some(UnsafeNumbers.bigInteger(num, max_bits))
+ catch { case _: UnexpectedEnd | UnsafeNumber => None }
+
+ def bigInt(num: String, max_bits: Int = 256): Option[BigInt] =
+ try Some(UnsafeNumbers.bigInt(num, max_bits))
+ catch { case _: UnexpectedEnd | UnsafeNumber => None }
+
+ def float(num: String, max_bits: Int = 256): FloatOption =
+ try FloatSome(UnsafeNumbers.float(num, max_bits))
+ catch { case _: UnexpectedEnd | UnsafeNumber => FloatNone }
+
+ def double(num: String, max_bits: Int = 256): DoubleOption =
+ try DoubleSome(UnsafeNumbers.double(num, max_bits))
+ catch { case _: UnexpectedEnd | UnsafeNumber => DoubleNone }
+
+ def bigDecimal(num: String, max_bits: Int = 256): Option[java.math.BigDecimal] =
+ try Some(UnsafeNumbers.bigDecimal(num, max_bits))
+ catch { case _: UnexpectedEnd | UnsafeNumber => None }
+
+ def toString(x: java.math.BigDecimal): String = {
+ val out = writes.get
+ write(x, out)
+ out.toString
+ }
+
+ def toString(x: java.math.BigInteger): String = {
+ val out = writes.get
+ write(x, out)
+ out.toString
+ }
+
+ def toString(x: Double): String = {
+ val out = writes.get
+ write(x, out)
+ out.toString
+ }
+
+ def toString(x: Float): String = {
+ val out = writes.get
+ write(x, out)
+ out.toString
+ }
+
+ def toString(x: UUID): String = {
+ val out = writes.get
+ write(x, out)
+ out.toString
+ }
+
+ def write(x: java.math.BigDecimal, out: Write): Unit = {
+ var exp = writeBigDecimal(x.unscaledValue, x.scale, 0, null, out)
+ if (exp != 0) {
+ var sc = '+'
+ if (exp < 0) {
+ sc = '-'
+ exp = -exp
+ }
+ out.write('E', sc)
+ writeMantissa(exp, out)
+ }
+ }
+
+ private[this] def writeBigDecimal(
+ x: java.math.BigInteger,
+ scale: Int,
+ blockScale: Int,
+ ss: Array[java.math.BigInteger],
+ out: Write
+ ): Int = {
+ val bitLen = x.bitLength
+ if (bitLen < 64) {
+ val v = x.longValue
+ val pv = Math.abs(v)
+ val digits =
+ if (pv >= 100000000000000000L) {
+ if (pv >= 1000000000000000000L) 19
+ else 18
+ } else digitCount(pv)
+ val dotOff = scale - blockScale
+ val exp = (digits - 1) - dotOff
+ if (scale >= 0 && exp >= -6) {
+ if (exp < 0) {
+ if (v >= 0) out.write('0', '.')
+ else out.write('-', '0', '.')
+ var zeros = -exp - 1
+ while (zeros > 0) {
+ out.write('0')
+ zeros -= 1
+ }
+ write(pv, out)
+ } else if (dotOff > 0) writeLongWithDot(v, dotOff, out)
+ else write(v, out)
+ 0
+ } else {
+ if (digits > 1) writeLongWithDot(v, digits - 1, out)
+ else {
+ write(v, out)
+ if (blockScale > 0) out.write('.')
+ }
+ exp
+ }
+ } else {
+ val n = calculateTenPow18SquareNumber(bitLen)
+ val ss1 =
+ if (ss eq null) getTenPow18Squares(n)
+ else ss
+ val qr = x.divideAndRemainder(ss1(n))
+ val exp = writeBigDecimal(qr(0), scale, (18 << n) + blockScale, ss1, out)
+ writeBigDecimalRemainder(qr(1), scale, blockScale, n - 1, ss1, out)
+ exp
+ }
+ }
+
+ private[this] def writeLongWithDot(v: Long, dotOff: Int, out: Write): Unit = {
+ val pow10 = pow10longs(dotOff)
+ val q = v / pow10
+ val r = Math.abs(v - q * pow10)
+ write(q, out)
+ out.write('.')
+ var zeros = dotOff - digitCount(r)
+ while (zeros > 0) {
+ out.write('0')
+ zeros -= 1
+ }
+ write(r, out)
+ }
+
+ private[this] def writeBigDecimalRemainder(
+ x: java.math.BigInteger,
+ scale: Int,
+ blockScale: Int,
+ n: Int,
+ ss: Array[java.math.BigInteger],
+ out: Write
+ ): Unit =
+ if (n < 0) {
+ val v = Math.abs(x.longValue)
+ var dotOff = scale - blockScale
+ if (dotOff > 0 && dotOff < 18) {
+ val pow10 = pow10longs(dotOff)
+ val q = v / pow10
+ val r = v - q * pow10
+ var zeros = 18 - dotOff - digitCount(q)
+ while (zeros > 0) {
+ out.write('0')
+ zeros -= 1
+ }
+ writeMantissa(q, out)
+ out.write('.')
+ dotOff -= digitCount(r)
+ while (dotOff > 0) {
+ out.write('0')
+ dotOff -= 1
+ }
+ writeMantissa(r, out)
+ } else {
+ if (dotOff == 18) out.write('.')
+ write18Digits(v, out)
+ }
+ } else {
+ val qr = x.divideAndRemainder(ss(n))
+ writeBigDecimalRemainder(qr(0), scale, (18 << n) + blockScale, n - 1, ss, out)
+ writeBigDecimalRemainder(qr(1), scale, blockScale, n - 1, ss, out)
+ }
+
+ def write(x: java.math.BigInteger, out: Write): Unit = writeBigInteger(x, null, out)
+
+ private[this] def writeBigInteger(x: java.math.BigInteger, ss: Array[java.math.BigInteger], out: Write): Unit = {
+ val bitLen = x.bitLength
+ if (bitLen < 64) write(x.longValue, out)
+ else {
+ val n = calculateTenPow18SquareNumber(bitLen)
+ val ss1 =
+ if (ss eq null) getTenPow18Squares(n)
+ else ss
+ val qr = x.divideAndRemainder(ss1(n))
+ writeBigInteger(qr(0), ss1, out)
+ writeBigIntegerRemainder(qr(1), n - 1, ss1, out)
+ }
+ }
+
+ private[this] def writeBigIntegerRemainder(
+ x: java.math.BigInteger,
+ n: Int,
+ ss: Array[java.math.BigInteger],
+ out: Write
+ ): Unit =
+ if (n < 0) write18Digits(Math.abs(x.longValue), out)
+ else {
+ val qr = x.divideAndRemainder(ss(n))
+ writeBigIntegerRemainder(qr(0), n - 1, ss, out)
+ writeBigIntegerRemainder(qr(1), n - 1, ss, out)
+ }
+
+ private[this] def calculateTenPow18SquareNumber(bitLen: Int): Int = {
+ val m = Math.max(
+ (bitLen * 71828554L >> 32).toInt - 1,
+ 1
+ ) // Math.max((x.bitLength * Math.log(2) / Math.log(1e18)).toInt - 1, 1)
+ 31 - java.lang.Integer.numberOfLeadingZeros(m)
+ }
+
+ private[this] def getTenPow18Squares(n: Int): Array[java.math.BigInteger] = {
+ var ss = tenPow18Squares
+ var i = ss.length
+ if (n >= i) {
+ var s = ss(i - 1)
+ ss = java.util.Arrays.copyOf(ss, n + 1)
+ while (i <= n) {
+ s = s.multiply(s)
+ ss(i) = s
+ i += 1
+ }
+ tenPow18Squares = ss
+ }
+ ss
+ }
+
+ // Based on the amazing work of Raffaello Giulietti
+ // "The Schubfach way to render doubles": https://drive.google.com/file/d/1luHhyQF9zKlM8yJ1nebU0OgVYhfC6CBN/view
+ // Sources with the license are here: https://github.com/c4f7fcce9cb06515/Schubfach/blob/3c92d3c9b1fead540616c918cdfef432bca53dfa/todec/src/math/DoubleToDecimal.java
+ def write(x: Double, out: Write): Unit = {
+ val bits = java.lang.Double.doubleToLongBits(x)
+ val ieeeExponent = (bits >> 52).toInt & 0x7ff
+ val ieeeMantissa = bits & 0xfffffffffffffL
+ if (ieeeExponent == 2047) {
+ out.write(
+ if (x != x) """"NaN""""
+ else if (bits < 0) """"-Infinity""""
+ else """"Infinity""""
+ )
+ } else {
+ if (bits < 0) out.write('-')
+ if (x == 0.0f) out.write('0', '.', '0')
+ else {
+ var e = ieeeExponent - 1075
+ var m = ieeeMantissa | 0x10000000000000L
+ var dv = 0L
+ var exp = 0
+ if (e == 0) dv = m
+ else if (e >= -52 && e < 0 && m << e == 0) dv = m >> -e
+ else {
+ var expShift, expCorr = 0
+ var cblShift = 2
+ if (ieeeExponent == 0) {
+ e = -1074
+ m = ieeeMantissa
+ if (ieeeMantissa < 3) {
+ m *= 10
+ expShift = 1
+ }
+ } else if (ieeeMantissa == 0 && ieeeExponent > 1) {
+ expCorr = 131007
+ cblShift = 1
+ }
+ exp = e * 315653 - expCorr >> 20
+ val i = exp + 324 << 1
+ val g1 = gs(i)
+ val g0 = gs(i + 1)
+ val h = (-exp * 108853 >> 15) + e + 2
+ val cb = m << 2
+ val outm1 = (m.toInt & 0x1) - 1
+ val vb = rop(g1, g0, cb << h)
+ val vbls = rop(g1, g0, cb - cblShift << h) + outm1
+ val vbrd = outm1 - rop(g1, g0, cb + 2 << h)
+ val s = vb >> 2
+ if (
+ s < 100 || {
+ dv = Math.multiplyHigh(s, 1844674407370955168L) // divide a positive long by 10
+ val sp40 = dv * 40
+ val upin = (vbls - sp40).toInt
+ (((sp40 + vbrd).toInt + 40) ^ upin) >= 0 || {
+ dv += ~upin >>> 31
+ exp += 1
+ false
+ }
+ }
+ ) {
+ val s4 = s << 2
+ val uin = (vbls - s4).toInt
+ dv = (~ {
+ if ((((s4 + vbrd).toInt + 4) ^ uin) < 0) uin
+ else (vb.toInt & 0x3) + (s.toInt & 0x1) - 3
+ } >>> 31) + s
+ exp -= expShift
+ }
+ }
+ val len = digitCount(dv)
+ exp += len - 1
+ if (exp < -3 || exp >= 7) {
+ val sdv = stripTrailingZeros(dv)
+ writeMantissaWithDot(sdv, out)
+ if (sdv >= 10) out.write('E')
+ else out.write('0', 'E')
+ write(exp, out)
+ } else if (exp < 0) {
+ out.write('0', '.')
+ while ({
+ exp += 1
+ exp != 0
+ }) out.write('0')
+ writeMantissa(stripTrailingZeros(dv), out)
+ } else {
+ var pow10i = len - exp - 1
+ if (pow10i > 0) {
+ val pow10 = pow10longs(pow10i)
+ val q = dv / pow10
+ val r = dv - q * pow10
+ writeMantissa(q, out)
+ out.write('.')
+ pow10i -= digitCount(r)
+ while (pow10i > 0) {
+ out.write('0')
+ pow10i -= 1
+ }
+ writeMantissa(stripTrailingZeros(r), out)
+ } else {
+ writeMantissa(dv.toInt, out)
+ out.write('.', '0')
+ }
+ }
+ }
+ }
+ }
+
+ def write(x: Float, out: Write): Unit = {
+ val bits = java.lang.Float.floatToIntBits(x)
+ val ieeeExponent = (bits >> 23) & 0xff
+ val ieeeMantissa = bits & 0x7fffff
+ if (ieeeExponent == 255) {
+ out.write(
+ if (x != x) """"NaN""""
+ else if (bits < 0) """"-Infinity""""
+ else """"Infinity""""
+ )
+ } else {
+ if (bits < 0) out.write('-')
+ if (x == 0.0f) out.write('0', '.', '0')
+ else {
+ var e = ieeeExponent - 150
+ var m = ieeeMantissa | 0x800000
+ var dv, exp = 0
+ if (e == 0) dv = m
+ else if (e >= -23 && e < 0 && m << e == 0) dv = m >> -e
+ else {
+ var expShift, expCorr = 0
+ var cblShift = 2
+ if (ieeeExponent == 0) {
+ e = -149
+ m = ieeeMantissa
+ if (ieeeMantissa < 8) {
+ m *= 10
+ expShift = 1
+ }
+ } else if (ieeeMantissa == 0 && ieeeExponent > 1) {
+ expCorr = 131007
+ cblShift = 1
+ }
+ exp = e * 315653 - expCorr >> 20
+ val g1 = gs(exp + 324 << 1) + 1
+ val h = (-exp * 108853 >> 15) + e + 1
+ val cb = m << 2
+ val outm1 = (m & 0x1) - 1
+ val vb = rop(g1, cb << h)
+ val vbls = rop(g1, cb - cblShift << h) + outm1
+ val vbrd = outm1 - rop(g1, cb + 2 << h)
+ val s = vb >> 2
+ if (
+ s < 100 || {
+ dv = (s * 3435973837L >>> 35).toInt // divide a positive int by 10
+ val sp40 = dv * 40
+ val upin = vbls - sp40
+ ((sp40 + vbrd + 40) ^ upin) >= 0 || {
+ dv += ~upin >>> 31
+ exp += 1
+ false
+ }
+ }
+ ) {
+ val s4 = s << 2
+ val uin = vbls - s4
+ dv = (~ {
+ if (((s4 + vbrd + 4) ^ uin) < 0) uin
+ else (vb & 0x3) + (s & 0x1) - 3
+ } >>> 31) + s
+ exp -= expShift
+ }
+ }
+ val len = digitCount(dv.toLong)
+ exp += len - 1
+ if (exp < -3 || exp >= 7) {
+ val sdv = stripTrailingZeros(dv)
+ writeMantissaWithDot(sdv, out)
+ if (sdv >= 10) out.write('E')
+ else out.write('0', 'E')
+ write(exp, out)
+ } else if (exp < 0) {
+ out.write('0', '.')
+ while ({
+ exp += 1
+ exp != 0
+ }) out.write('0')
+ writeMantissa(stripTrailingZeros(dv), out)
+ } else {
+ var pow10i = len - exp - 1
+ if (pow10i > 0) {
+ val pow10 = pow10ints(pow10i)
+ val q = dv / pow10
+ val r = dv - q * pow10
+ writeMantissa(q, out)
+ out.write('.')
+ pow10i -= digitCount(r.toLong)
+ while (pow10i > 0) {
+ out.write('0')
+ pow10i -= 1
+ }
+ writeMantissa(stripTrailingZeros(r), out)
+ } else {
+ writeMantissa(dv, out)
+ out.write('.', '0')
+ }
+ }
+ }
+ }
+ }
+
+ def write(x: UUID, out: Write): Unit = {
+ val ds = lowerCaseHexDigits
+ val msb = x.getMostSignificantBits
+ val lsb = x.getLeastSignificantBits
+ val msb1 = (msb >> 32).toInt
+ val msb2 = msb.toInt
+ val lsb1 = (lsb >>> 32).toInt
+ val lsb2 = lsb.toInt
+ out.write(ds(msb1 >>> 24), ds(msb1 >> 16 & 0xff), ds(msb1 >> 8 & 0xff), ds(msb1 & 0xff))
+ out.write('-')
+ out.write(ds(msb2 >>> 24), ds(msb2 >> 16 & 0xff))
+ out.write('-')
+ out.write(ds(msb2 >> 8 & 0xff), ds(msb2 & 0xff))
+ out.write('-')
+ out.write(ds(lsb1 >>> 24), ds(lsb1 >> 16 & 0xff))
+ out.write('-')
+ out.write(ds(lsb1 >> 8 & 0xff), ds(lsb1 & 0xff))
+ out.write(ds(lsb2 >>> 24), ds(lsb2 >> 16 & 0xff), ds(lsb2 >> 8 & 0xff), ds(lsb2 & 0xff))
+ }
+
+ private[json] def writeHex(c: Char, out: Write): Unit = {
+ val ds = lowerCaseHexDigits
+ out.write(ds(c >> 8 & 0xff), ds(c & 0xff))
+ }
+
+ private[json] def writeNano(x: Int, out: Write): Unit = {
+ out.write('.')
+ var coeff = 100000000
+ while (coeff > x) {
+ out.write('0')
+ coeff = (coeff * 3435973837L >> 35).toInt // divide a positive int by 10
+ }
+ write(stripTrailingZeros(x), out)
+ }
+
+ private[this] def rop(g1: Long, g0: Long, cp: Long): Long = {
+ val x = Math.multiplyHigh(g0, cp) + (g1 * cp >>> 1)
+ Math.multiplyHigh(g1, cp) + (x >>> 63) | (-x ^ x) >>> 63
+ }
+
+ private[this] def rop(g: Long, cp: Int): Int = {
+ val x = Math.multiplyHigh(g, cp.toLong << 32)
+ (x >>> 31).toInt | -x.toInt >>> 31
+ }
+
+ private[this] def stripTrailingZeros(x: Long): Long = {
+ var q0, q1 = x
+ if (
+ (q1 << 56 == 0L) && {
+ q0 = Math.multiplyHigh(q1, 6189700196426901375L) >>> 25 // divide a positive long by 100000000
+ x - q0 * 100000000L == 0L
+ }
+ ) return stripTrailingZeros(q0.toInt).toLong
+ while ({
+ q0 = q1
+ q1 = Math.multiplyHigh(q1, 1844674407370955168L) // divide a positive long by 10
+ q1 * 10 == q0
+ }) ()
+ q0
+ }
+
+ private[this] def stripTrailingZeros(x: Int): Int = {
+ var q0, q1 = x
+ while ({
+ val qp = q1 * 3435973837L
+ q0 = q1
+ q1 = (qp >> 35).toInt // divide a positive int by 10
+ (qp & 0x7e0000000L) == 0 // check if q is divisible by 10
+ }) ()
+ q0
+ }
+
+ def write(a: Long, out: Write): Unit = {
+ var q0 = a
+ if (q0 < 0) {
+ q0 = -q0
+ out.write('-')
+ if (q0 == a) {
+ out.write('9', '2', '2')
+ q0 = 3372036854775808L
+ }
+ }
+ val m1 = 100000000L
+ if (q0 < m1) writeMantissa(q0.toInt, out)
+ else {
+ val m2 = 6189700196426901375L
+ val q1 = Math.multiplyHigh(q0, m2) >>> 25 // divide a positive long by 100000000
+ if (q1 < m1) writeMantissa(q1.toInt, out)
+ else {
+ val q2 = Math.multiplyHigh(q1, m2) >>> 25 // divide a small positive long by 100000000
+ writeMantissa(q2.toInt, out)
+ write8Digits(q1 - q2 * m1, out)
+ }
+ write8Digits(q0 - q1 * m1, out)
+ }
+ }
+
+ private[this] def writeMantissa(q0: Long, out: Write): Unit =
+ if (q0.toInt == q0) writeMantissa(q0.toInt, out)
+ else {
+ val q1 = Math.multiplyHigh(q0, 6189700196426901375L) >>> 25 // divide a positive long by 100000000
+ writeMantissa(q1.toInt, out)
+ write8Digits(q0 - q1 * 100000000L, out)
+ }
+
+ private[this] def writeMantissaWithDot(q0: Long, out: Write): Unit =
+ if (q0.toInt == q0) writeMantissaWithDot(q0.toInt, out)
+ else {
+ val q1 = Math.multiplyHigh(q0, 6189700196426901375L) >>> 25 // divide a positive long by 100000000
+ writeMantissaWithDot(q1.toInt, out)
+ write8Digits(q0 - q1 * 100000000L, out)
+ }
+
+ def write(a: Int, out: Write): Unit = {
+ var q0 = a
+ if (q0 < 0) {
+ q0 = -q0
+ out.write('-')
+ if (q0 == a) {
+ out.write('2')
+ q0 = 147483648
+ }
+ }
+ writeMantissa(q0, out)
+ }
+
+ private[this] def writeMantissa(q0: Int, out: Write): Unit = {
+ val ds = digits
+ if (q0 < 100) { // Based on James Anhalt's algorithm: https://jk-jeon.github.io/posts/2022/02/jeaiii-algorithm/
+ if (q0 < 10) out.write((q0 | '0').toChar)
+ else out.write(ds(q0))
+ } else if (q0 < 10000) {
+ val q1 = q0 * 5243 >> 19 // divide a small positive int by 100
+ val d2 = ds(q0 - q1 * 100)
+ if (q0 < 1000) out.write((q1 | '0').toChar)
+ else out.write(ds(q1))
+ out.write(d2)
+ } else if (q0 < 1000000) {
+ val y1 = q0 * 429497L
+ val y2 = (y1 & 0xffffffffL) * 100
+ val y3 = (y2 & 0xffffffffL) * 100
+ if (q0 < 100000) out.write(((y1 >> 32).toInt | '0').toChar)
+ else out.write(ds((y1 >> 32).toInt))
+ out.write(ds((y2 >> 32).toInt), ds((y3 >> 32).toInt))
+ } else if (q0 < 100000000) {
+ val y1 = q0 * 140737489L
+ val y2 = (y1 & 0x7fffffffffffL) * 100
+ val y3 = (y2 & 0x7fffffffffffL) * 100
+ val y4 = (y3 & 0x7fffffffffffL) * 100
+ if (q0 < 10000000) out.write(((y1 >> 47).toInt | '0').toChar)
+ else out.write(ds((y1 >> 47).toInt))
+ out.write(ds((y2 >> 47).toInt), ds((y3 >> 47).toInt), ds((y4 >> 47).toInt))
+ } else {
+ val y1 = q0 * 1441151881L
+ val y2 = (y1 & 0x1ffffffffffffffL) * 100
+ val y3 = (y2 & 0x1ffffffffffffffL) * 100
+ val y4 = (y3 & 0x1ffffffffffffffL) * 100
+ val y5 = (y4 & 0x1ffffffffffffffL) * 100
+ if (q0 < 1000000000) out.write(((y1 >>> 57).toInt | '0').toChar)
+ else out.write(ds((y1 >>> 57).toInt))
+ out.write(ds((y2 >>> 57).toInt), ds((y3 >>> 57).toInt), ds((y4 >>> 57).toInt), ds((y5 >>> 57).toInt))
+ }
+ }
+
+ private[this] def writeMantissaWithDot(q0: Int, out: Write): Unit = {
+ val ds = digits
+ if (q0 < 100) { // Based on James Anhalt's algorithm: https://jk-jeon.github.io/posts/2022/02/jeaiii-algorithm/
+ if (q0 < 10) out.write((q0 | '0').toChar, '.')
+ else {
+ val d1 = ds(q0)
+ out.write((d1 & 0xff).toChar, '.', (d1 >> 8).toChar)
+ }
+ } else if (q0 < 10000) {
+ val q1 = q0 * 5243 >> 19 // divide a small positive int by 100
+ val d2 = ds(q0 - q1 * 100)
+ if (q0 < 1000) out.write((q1 | '0').toChar, '.')
+ else {
+ val d1 = ds(q1)
+ out.write((d1 & 0xff).toChar, '.', (d1 >> 8).toChar)
+ }
+ out.write(d2)
+ } else if (q0 < 1000000) {
+ val y1 = q0 * 429497L
+ val y2 = (y1 & 0xffffffffL) * 100
+ val y3 = (y2 & 0xffffffffL) * 100
+ if (q0 < 100000) out.write(((y1 >> 32).toInt | '0').toChar, '.')
+ else {
+ val d1 = ds((y1 >> 32).toInt)
+ out.write((d1 & 0xff).toChar, '.', (d1 >> 8).toChar)
+ }
+ out.write(ds((y2 >> 32).toInt), ds((y3 >> 32).toInt))
+ } else if (q0 < 100000000) {
+ val y1 = q0 * 140737489L
+ val y2 = (y1 & 0x7fffffffffffL) * 100
+ val y3 = (y2 & 0x7fffffffffffL) * 100
+ val y4 = (y3 & 0x7fffffffffffL) * 100
+ if (q0 < 10000000) out.write(((y1 >> 47).toInt | '0').toChar, '.')
+ else {
+ val d1 = ds((y1 >> 47).toInt)
+ out.write((d1 & 0xff).toChar, '.', (d1 >> 8).toChar)
+ }
+ out.write(ds((y2 >> 47).toInt), ds((y3 >> 47).toInt), ds((y4 >> 47).toInt))
+ } else {
+ val y1 = q0 * 1441151881L
+ val y2 = (y1 & 0x1ffffffffffffffL) * 100
+ val y3 = (y2 & 0x1ffffffffffffffL) * 100
+ val y4 = (y3 & 0x1ffffffffffffffL) * 100
+ val y5 = (y4 & 0x1ffffffffffffffL) * 100
+ if (q0 < 1000000000) out.write(((y1 >>> 57).toInt | '0').toChar, '.')
+ else {
+ val d1 = ds((y1 >>> 57).toInt)
+ out.write((d1 & 0xff).toChar, '.', (d1 >> 8).toChar)
+ }
+ out.write(ds((y2 >>> 57).toInt), ds((y3 >>> 57).toInt), ds((y4 >>> 57).toInt), ds((y5 >>> 57).toInt))
+ }
+ }
+
+ private[this] def write18Digits(x: Long, out: Write): Unit = {
+ val m1 = 6189700196426901375L
+ val q1 = Math.multiplyHigh(x, m1) >>> 25 // divide a positive long by 100000000
+ val q2 = Math.multiplyHigh(q1, m1) >>> 25 // divide a positive long by 100000000
+ out.write(digits(q2.toInt))
+ write8Digits(q1 - q2 * 100000000L, out)
+ write8Digits(x - q1 * 100000000L, out)
+ }
+
+ private[this] def write8Digits(x: Long, out: Write): Unit = {
+ val ds = digits // Based on James Anhalt's algorithm: https://jk-jeon.github.io/posts/2022/02/jeaiii-algorithm/
+ val y1 = x * 140737489L
+ val m1 = 0x7fffffffffffL
+ val m2 = 100L
+ val y2 = (y1 & m1) * m2
+ val y3 = (y2 & m1) * m2
+ val y4 = (y3 & m1) * m2
+ out.write(ds((y1 >> 47).toInt), ds((y2 >> 47).toInt), ds((y3 >> 47).toInt), ds((y4 >> 47).toInt))
+ }
+
+ @inline private[json] def write4Digits(x: Int, out: Write): Unit = {
+ val ds = digits
+ val q = x * 5243 >> 19 // divide a 4-digit positive int by 100
+ out.write(ds(q), ds(x - q * 100))
+ }
+
+ @inline private[json] def write3Digits(x: Int, out: Write): Unit = {
+ val q = x * 1311 >> 17 // divide a 3-digit positive int by 100
+ out.write((q + '0').toChar)
+ out.write(digits(x - q * 100))
+ }
+
+ @inline private[json] def write2Digits(x: Int, out: Write): Unit =
+ out.write(digits(x))
+
+ // Adoption of a nice trick form Daniel Lemire's blog that works for numbers up to 10^18:
+ // https://lemire.me/blog/2021/06/03/computing-the-number-of-digits-of-an-integer-even-faster/
+ private[this] def digitCount(x: Long): Int = (offsets(java.lang.Long.numberOfLeadingZeros(x)) + x >> 58).toInt
+
+ private[this] final val digits: Array[Short] = Array(
+ 12336, 12592, 12848, 13104, 13360, 13616, 13872, 14128, 14384, 14640, 12337, 12593, 12849, 13105, 13361, 13617,
+ 13873, 14129, 14385, 14641, 12338, 12594, 12850, 13106, 13362, 13618, 13874, 14130, 14386, 14642, 12339, 12595,
+ 12851, 13107, 13363, 13619, 13875, 14131, 14387, 14643, 12340, 12596, 12852, 13108, 13364, 13620, 13876, 14132,
+ 14388, 14644, 12341, 12597, 12853, 13109, 13365, 13621, 13877, 14133, 14389, 14645, 12342, 12598, 12854, 13110,
+ 13366, 13622, 13878, 14134, 14390, 14646, 12343, 12599, 12855, 13111, 13367, 13623, 13879, 14135, 14391, 14647,
+ 12344, 12600, 12856, 13112, 13368, 13624, 13880, 14136, 14392, 14648, 12345, 12601, 12857, 13113, 13369, 13625,
+ 13881, 14137, 14393, 14649
+ )
+
+ private[this] val offsets = Array(
+ 5088146770730811392L, 5088146770730811392L, 5088146770730811392L, 5088146770730811392L, 5088146770730811392L,
+ 5088146770730811392L, 5088146770730811392L, 5088146770730811392L, 4889916394579099648L, 4889916394579099648L,
+ 4889916394579099648L, 4610686018427387904L, 4610686018427387904L, 4610686018427387904L, 4610686018427387904L,
+ 4323355642275676160L, 4323355642275676160L, 4323355642275676160L, 4035215266123964416L, 4035215266123964416L,
+ 4035215266123964416L, 3746993889972252672L, 3746993889972252672L, 3746993889972252672L, 3746993889972252672L,
+ 3458764413820540928L, 3458764413820540928L, 3458764413820540928L, 3170534127668829184L, 3170534127668829184L,
+ 3170534127668829184L, 2882303760517117440L, 2882303760517117440L, 2882303760517117440L, 2882303760517117440L,
+ 2594073385265405696L, 2594073385265405696L, 2594073385265405696L, 2305843009203693952L, 2305843009203693952L,
+ 2305843009203693952L, 2017612633060982208L, 2017612633060982208L, 2017612633060982208L, 2017612633060982208L,
+ 1729382256910170464L, 1729382256910170464L, 1729382256910170464L, 1441151880758548720L, 1441151880758548720L,
+ 1441151880758548720L, 1152921504606845976L, 1152921504606845976L, 1152921504606845976L, 1152921504606845976L,
+ 864691128455135132L, 864691128455135132L, 864691128455135132L, 576460752303423478L, 576460752303423478L,
+ 576460752303423478L, 576460752303423478L, 576460752303423478L, 576460752303423478L, 576460752303423478L
+ )
+
+ private[this] final val lowerCaseHexDigits: Array[Short] = Array(
+ 12336, 12592, 12848, 13104, 13360, 13616, 13872, 14128, 14384, 14640, 24880, 25136, 25392, 25648, 25904, 26160,
+ 12337, 12593, 12849, 13105, 13361, 13617, 13873, 14129, 14385, 14641, 24881, 25137, 25393, 25649, 25905, 26161,
+ 12338, 12594, 12850, 13106, 13362, 13618, 13874, 14130, 14386, 14642, 24882, 25138, 25394, 25650, 25906, 26162,
+ 12339, 12595, 12851, 13107, 13363, 13619, 13875, 14131, 14387, 14643, 24883, 25139, 25395, 25651, 25907, 26163,
+ 12340, 12596, 12852, 13108, 13364, 13620, 13876, 14132, 14388, 14644, 24884, 25140, 25396, 25652, 25908, 26164,
+ 12341, 12597, 12853, 13109, 13365, 13621, 13877, 14133, 14389, 14645, 24885, 25141, 25397, 25653, 25909, 26165,
+ 12342, 12598, 12854, 13110, 13366, 13622, 13878, 14134, 14390, 14646, 24886, 25142, 25398, 25654, 25910, 26166,
+ 12343, 12599, 12855, 13111, 13367, 13623, 13879, 14135, 14391, 14647, 24887, 25143, 25399, 25655, 25911, 26167,
+ 12344, 12600, 12856, 13112, 13368, 13624, 13880, 14136, 14392, 14648, 24888, 25144, 25400, 25656, 25912, 26168,
+ 12345, 12601, 12857, 13113, 13369, 13625, 13881, 14137, 14393, 14649, 24889, 25145, 25401, 25657, 25913, 26169,
+ 12385, 12641, 12897, 13153, 13409, 13665, 13921, 14177, 14433, 14689, 24929, 25185, 25441, 25697, 25953, 26209,
+ 12386, 12642, 12898, 13154, 13410, 13666, 13922, 14178, 14434, 14690, 24930, 25186, 25442, 25698, 25954, 26210,
+ 12387, 12643, 12899, 13155, 13411, 13667, 13923, 14179, 14435, 14691, 24931, 25187, 25443, 25699, 25955, 26211,
+ 12388, 12644, 12900, 13156, 13412, 13668, 13924, 14180, 14436, 14692, 24932, 25188, 25444, 25700, 25956, 26212,
+ 12389, 12645, 12901, 13157, 13413, 13669, 13925, 14181, 14437, 14693, 24933, 25189, 25445, 25701, 25957, 26213,
+ 12390, 12646, 12902, 13158, 13414, 13670, 13926, 14182, 14438, 14694, 24934, 25190, 25446, 25702, 25958, 26214
+ )
+
+ private[this] val gs: Array[Long] = Array(
+ 5696189077778435540L, 6557778377634271669L, 9113902524445496865L, 1269073367360058862L, 7291122019556397492L,
+ 1015258693888047090L, 5832897615645117993L, 6346230177223303157L, 4666318092516094394L, 8766332956520552849L,
+ 7466108948025751031L, 8492109508320019073L, 5972887158420600825L, 4949013199285060097L, 4778309726736480660L,
+ 3959210559428048077L, 7645295562778369056L, 6334736895084876923L, 6116236450222695245L, 3223115108696946377L,
+ 4892989160178156196L, 2578492086957557102L, 7828782656285049914L, 436238524390181040L, 6263026125028039931L,
+ 2193665226883099993L, 5010420900022431944L, 9133629810990300641L, 8016673440035891111L, 9079784475471615541L,
+ 6413338752028712889L, 5419153173006337271L, 5130671001622970311L, 6179996945776024979L, 8209073602596752498L,
+ 6198646298499729642L, 6567258882077401998L, 8648265853541694037L, 5253807105661921599L, 1384589460720489745L,
+ 8406091369059074558L, 5904691951894693915L, 6724873095247259646L, 8413102376257665455L, 5379898476197807717L,
+ 4885807493635177203L, 8607837561916492348L, 438594360332462878L, 6886270049533193878L, 4040224303007880625L,
+ 5509016039626555102L, 6921528257148214824L, 8814425663402488164L, 3695747581953323071L, 7051540530721990531L,
+ 4801272472933613619L, 5641232424577592425L, 1996343570975935733L, 9025971879324147880L, 3194149713561497173L,
+ 7220777503459318304L, 2555319770849197738L, 5776622002767454643L, 3888930224050313352L, 4621297602213963714L,
+ 6800492993982161005L, 7394076163542341943L, 5346765568258592123L, 5915260930833873554L, 7966761269348784022L,
+ 4732208744667098843L, 8218083422849982379L, 7571533991467358150L, 2080887032334240837L, 6057227193173886520L,
+ 1664709625867392670L, 4845781754539109216L, 1331767700693914136L, 7753250807262574745L, 7664851543223128102L,
+ 6202600645810059796L, 6131881234578502482L, 4962080516648047837L, 3060830580291846824L, 7939328826636876539L,
+ 6742003335837910079L, 6351463061309501231L, 7238277076041283225L, 5081170449047600985L, 3945947253462071419L,
+ 8129872718476161576L, 6313515605539314269L, 6503898174780929261L, 3206138077060496254L, 5203118539824743409L,
+ 720236054277441842L, 8324989663719589454L, 4841726501585817270L, 6659991730975671563L, 5718055608639608977L,
+ 5327993384780537250L, 8263793301653597505L, 8524789415648859601L, 3998697245790980200L, 6819831532519087681L,
+ 1354283389261828999L, 5455865226015270144L, 8462124340893283845L, 8729384361624432231L, 8005375723316388668L,
+ 6983507489299545785L, 4559626171282155773L, 5586805991439636628L, 3647700937025724618L, 8938889586303418605L,
+ 3991647091870204227L, 7151111669042734884L, 3193317673496163382L, 5720889335234187907L, 4399328546167885867L,
+ 9153422936374700651L, 8883600081239572549L, 7322738349099760521L, 5262205657620702877L, 5858190679279808417L,
+ 2365090118725607140L, 4686552543423846733L, 7426095317093351197L, 7498484069478154774L, 813706063123630946L,
+ 5998787255582523819L, 2495639257869859918L, 4799029804466019055L, 3841185813666843096L, 7678447687145630488L,
+ 6145897301866948954L, 6142758149716504390L, 8606066656235469486L, 4914206519773203512L, 6884853324988375589L,
+ 7862730431637125620L, 3637067690497580296L, 6290184345309700496L, 2909654152398064237L, 5032147476247760397L,
+ 483048914547496228L, 8051435961996416635L, 2617552670646949126L, 6441148769597133308L, 2094042136517559301L,
+ 5152919015677706646L, 5364582523955957764L, 8244670425084330634L, 4893983223587622099L, 6595736340067464507L,
+ 5759860986241052841L, 5276589072053971606L, 918539974250931950L, 8442542515286354569L, 7003687180914356604L,
+ 6754034012229083655L, 7447624152102440445L, 5403227209783266924L, 5958099321681952356L, 8645163535653227079L,
+ 3998935692578258285L, 6916130828522581663L, 5043822961433561789L, 5532904662818065330L, 7724407183888759755L,
+ 8852647460508904529L, 3135679457367239799L, 7082117968407123623L, 4353217973264747001L, 5665694374725698898L,
+ 7171923193353707924L, 9065110999561118238L, 407030665140201709L, 7252088799648894590L, 4014973346854071690L,
+ 5801671039719115672L, 3211978677483257352L, 4641336831775292537L, 8103606164099471367L, 7426138930840468060L,
+ 5587072233075333540L, 5940911144672374448L, 4469657786460266832L, 4752728915737899558L, 7265075043910123789L,
+ 7604366265180639294L, 556073626030467093L, 6083493012144511435L, 2289533308195328836L, 4866794409715609148L,
+ 1831626646556263069L, 7786871055544974637L, 1085928227119065748L, 6229496844435979709L, 6402765803808118083L,
+ 4983597475548783767L, 6966887050417449628L, 7973755960878054028L, 3768321651184098759L, 6379004768702443222L,
+ 6704006135689189330L, 5103203814961954578L, 1673856093809441141L, 8165126103939127325L, 833495342724150664L,
+ 6532100883151301860L, 666796274179320531L, 5225680706521041488L, 533437019343456425L, 8361089130433666380L,
+ 8232196860433350926L, 6688871304346933104L, 6585757488346680741L, 5351097043477546483L, 7113280398048299755L,
+ 8561755269564074374L, 313202192651548637L, 6849404215651259499L, 2095236161492194072L, 5479523372521007599L,
+ 3520863336564710419L, 8767237396033612159L, 99358116390671185L, 7013789916826889727L, 1924160900483492110L,
+ 5611031933461511781L, 7073351942499659173L, 8977651093538418850L, 7628014293257544353L, 7182120874830735080L,
+ 6102411434606035483L, 5745696699864588064L, 4881929147684828386L, 9193114719783340903L, 2277063414182859933L,
+ 7354491775826672722L, 5510999546088198270L, 5883593420661338178L, 719450822128648293L, 4706874736529070542L,
+ 4264909472444828957L, 7530999578446512867L, 8668529563282681493L, 6024799662757210294L, 3245474835884234871L,
+ 4819839730205768235L, 4441054276078343059L, 7711743568329229176L, 7105686841725348894L, 6169394854663383341L,
+ 3839875066009323953L, 4935515883730706673L, 1227225645436504001L, 7896825413969130677L, 118886625327451240L,
+ 6317460331175304541L, 5629132522374826477L, 5053968264940243633L, 2658631610528906020L, 8086349223904389813L,
+ 2409136169475294470L, 6469079379123511850L, 5616657750322145900L, 5175263503298809480L, 4493326200257716720L,
+ 8280421605278095168L, 7189321920412346751L, 6624337284222476135L, 217434314217011916L, 5299469827377980908L,
+ 173947451373609533L, 8479151723804769452L, 7657013551681595899L, 6783321379043815562L, 2436262026603366396L,
+ 5426657103235052449L, 7483032843395558602L, 8682651365176083919L, 6438829327320028278L, 6946121092140867135L,
+ 6995737869226977784L, 5556896873712693708L, 5596590295381582227L, 8891034997940309933L, 7109870065239576402L,
+ 7112827998352247947L, 153872830078795637L, 5690262398681798357L, 5657121486175901994L, 9104419837890877372L,
+ 1672696748397622544L, 7283535870312701897L, 6872180620830963520L, 5826828696250161518L, 1808395681922860493L,
+ 4661462957000129214L, 5136065360280198718L, 7458340731200206743L, 2683681354335452463L, 5966672584960165394L,
+ 5836293898210272294L, 4773338067968132315L, 6513709525939172997L, 7637340908749011705L, 1198563204647900987L,
+ 6109872726999209364L, 958850563718320789L, 4887898181599367491L, 2611754858345611793L, 7820637090558987986L,
+ 489458958611068546L, 6256509672447190388L, 7770264796372675483L, 5005207737957752311L, 682188614985274902L,
+ 8008332380732403697L, 6625525006089305327L, 6406665904585922958L, 1611071190129533939L, 5125332723668738366L,
+ 4978205766845537474L, 8200532357869981386L, 4275780412210949635L, 6560425886295985109L, 1575949922397804547L,
+ 5248340709036788087L, 3105434345289198799L, 8397345134458860939L, 6813369359833673240L, 6717876107567088751L,
+ 7295369895237893754L, 5374300886053671001L, 3991621508819359841L, 8598881417685873602L, 2697245599369065423L,
+ 6879105134148698881L, 7691819701608117823L, 5503284107318959105L, 4308781353915539097L, 8805254571710334568L,
+ 6894050166264862555L, 7044203657368267654L, 9204588947753800367L, 5635362925894614123L, 9208345565573995455L,
+ 9016580681431382598L, 3665306460692661759L, 7213264545145106078L, 6621593983296039730L, 5770611636116084862L,
+ 8986624001378742108L, 4616489308892867890L, 3499950386361083363L, 7386382894228588624L, 5599920618177733380L,
+ 5909106315382870899L, 6324610901913141866L, 4727285052306296719L, 6904363128901468655L, 7563656083690074751L,
+ 5512957784129484362L, 6050924866952059801L, 2565691819932632328L, 4840739893561647841L, 207879048575150701L,
+ 7745183829698636545L, 5866629699833106606L, 6196147063758909236L, 4693303759866485285L, 4956917651007127389L,
+ 1909968600522233067L, 7931068241611403822L, 6745298575577483229L, 6344854593289123058L, 1706890045720076260L,
+ 5075883674631298446L, 5054860851317971332L, 8121413879410077514L, 4398428547366843807L, 6497131103528062011L,
+ 5363417245264430207L, 5197704882822449609L, 2446059388840589004L, 8316327812515919374L, 7603043836886852730L,
+ 6653062250012735499L, 7927109476880437346L, 5322449800010188399L, 8186361988875305038L, 8515919680016301439L,
+ 7564155960087622576L, 6812735744013041151L, 7895999175441053223L, 5450188595210432921L, 4472124932981887417L,
+ 8720301752336692674L, 3466051078029109543L, 6976241401869354139L, 4617515269794242796L, 5580993121495483311L,
+ 5538686623206349399L, 8929588994392773298L, 5172549782388248714L, 7143671195514218638L, 7827388640652509295L,
+ 5714936956411374911L, 727887690409141951L, 9143899130258199857L, 6698643526767492606L, 7315119304206559886L,
+ 1669566006672083762L, 5852095443365247908L, 8714350434821487656L, 4681676354692198327L, 1437457125744324640L,
+ 7490682167507517323L, 4144605808561874585L, 5992545734006013858L, 7005033461591409992L, 4794036587204811087L,
+ 70003547160262509L, 7670458539527697739L, 1956680082827375175L, 6136366831622158191L, 3410018473632855302L,
+ 4909093465297726553L, 883340371535329080L, 7854549544476362484L, 8792042223940347174L, 6283639635581089987L,
+ 8878308186523232901L, 5026911708464871990L, 3413297734476675998L, 8043058733543795184L, 5461276375162681596L,
+ 6434446986835036147L, 6213695507501100438L, 5147557589468028918L, 1281607591258970028L, 8236092143148846269L,
+ 205897738643396882L, 6588873714519077015L, 2009392598285672668L, 5271098971615261612L, 1607514078628538134L,
+ 8433758354584418579L, 4416696933176616176L, 6747006683667534863L, 5378031953912248102L, 5397605346934027890L,
+ 7991774377871708805L, 8636168555094444625L, 3563466967739958280L, 6908934844075555700L, 2850773574191966624L,
+ 5527147875260444560L, 2280618859353573299L, 8843436600416711296L, 3648990174965717279L, 7074749280333369037L,
+ 1074517732601618662L, 5659799424266695229L, 6393637408194160414L, 9055679078826712367L, 4695796630997791177L,
+ 7244543263061369894L, 67288490056322619L, 5795634610449095915L, 1898505199416013257L, 4636507688359276732L,
+ 1518804159532810606L, 7418412301374842771L, 4274761062623452130L, 5934729841099874217L, 1575134442727806543L,
+ 4747783872879899373L, 6794130776295110719L, 7596454196607838997L, 9025934834701221989L, 6077163357286271198L,
+ 3531399053019067268L, 4861730685829016958L, 6514468057157164137L, 7778769097326427133L, 8578474484080507458L,
+ 6223015277861141707L, 1328756365151540482L, 4978412222288913365L, 6597028314234097870L, 7965459555662261385L,
+ 1331873265919780784L, 6372367644529809108L, 1065498612735824627L, 5097894115623847286L, 4541747704930570025L,
+ 8156630584998155658L, 3577447513147001717L, 6525304467998524526L, 6551306825259511697L, 5220243574398819621L,
+ 3396371052836654196L, 8352389719038111394L, 1744844869796736390L, 6681911775230489115L, 3240550303208344274L,
+ 5345529420184391292L, 2592440242566675419L, 8552847072295026067L, 5992578795477635832L, 6842277657836020854L,
+ 1104714221640198342L, 5473822126268816683L, 2728445784683113836L, 8758115402030106693L, 2520838848122026975L,
+ 7006492321624085354L, 5706019893239531903L, 5605193857299268283L, 6409490321962580684L, 8968310171678829253L,
+ 8410510107769173933L, 7174648137343063403L, 1194384864102473662L, 5739718509874450722L, 4644856706023889253L,
+ 9183549615799121156L, 53073100154402158L, 7346839692639296924L, 7421156109607342373L, 5877471754111437539L,
+ 7781599295056829060L, 4701977403289150031L, 8069953843416418410L, 7523163845262640050L, 9222577334724359132L,
+ 6018531076210112040L, 7378061867779487306L, 4814824860968089632L, 5902449494223589845L, 7703719777548943412L,
+ 2065221561273923105L, 6162975822039154729L, 7186200471132003969L, 4930380657631323783L, 7593634784276558337L,
+ 7888609052210118054L, 1081769210616762369L, 6310887241768094443L, 2710089775864365057L, 5048709793414475554L,
+ 5857420635433402369L, 8077935669463160887L, 3837849794580578305L, 6462348535570528709L, 8604303057777328129L,
+ 5169878828456422967L, 8728116853592817665L, 8271806125530276748L, 6586289336264687617L, 6617444900424221398L,
+ 8958380283753660417L, 5293955920339377119L, 1632681004890062849L, 8470329472543003390L, 6301638422566010881L,
+ 6776263578034402712L, 5041310738052808705L, 5421010862427522170L, 343699775700336641L, 8673617379884035472L,
+ 549919641120538625L, 6938893903907228377L, 5973958935009296385L, 5551115123125782702L, 1089818333265526785L,
+ 8881784197001252323L, 3588383740595798017L, 7105427357601001858L, 6560055807218548737L, 5684341886080801486L,
+ 8937393460516749313L, 9094947017729282379L, 1387108685230112769L, 7275957614183425903L, 2954361355555045377L,
+ 5820766091346740722L, 6052837899185946625L, 4656612873077392578L, 1152921504606846977L, 7450580596923828125L, 1L,
+ 5960464477539062500L, 1L, 4768371582031250000L, 1L, 7629394531250000000L, 1L, 6103515625000000000L, 1L,
+ 4882812500000000000L, 1L, 7812500000000000000L, 1L, 6250000000000000000L, 1L, 5000000000000000000L, 1L,
+ 8000000000000000000L, 1L, 6400000000000000000L, 1L, 5120000000000000000L, 1L, 8192000000000000000L, 1L,
+ 6553600000000000000L, 1L, 5242880000000000000L, 1L, 8388608000000000000L, 1L, 6710886400000000000L, 1L,
+ 5368709120000000000L, 1L, 8589934592000000000L, 1L, 6871947673600000000L, 1L, 5497558138880000000L, 1L,
+ 8796093022208000000L, 1L, 7036874417766400000L, 1L, 5629499534213120000L, 1L, 9007199254740992000L, 1L,
+ 7205759403792793600L, 1L, 5764607523034234880L, 1L, 4611686018427387904L, 1L, 7378697629483820646L,
+ 3689348814741910324L, 5902958103587056517L, 1106804644422573097L, 4722366482869645213L, 6419466937650923963L,
+ 7555786372591432341L, 8426472692870523179L, 6044629098073145873L, 4896503746925463381L, 4835703278458516698L,
+ 7606551812282281028L, 7737125245533626718L, 1102436455425918676L, 6189700196426901374L, 4571297979082645264L,
+ 4951760157141521099L, 5501712790637071373L, 7922816251426433759L, 3268717242906448711L, 6338253001141147007L,
+ 4459648201696114131L, 5070602400912917605L, 9101741783469756789L, 8112963841460668169L, 5339414816696835055L,
+ 6490371073168534535L, 6116206260728423206L, 5192296858534827628L, 4892965008582738565L, 8307674973655724205L,
+ 5984069606361426541L, 6646139978924579364L, 4787255685089141233L, 5316911983139663491L, 5674478955442268148L,
+ 8507059173023461586L, 5389817513965718714L, 6805647338418769269L, 2467179603801619810L, 5444517870735015415L,
+ 3818418090412251009L, 8711228593176024664L, 6109468944659601615L, 6968982874540819731L, 6732249563098636453L,
+ 5575186299632655785L, 3541125243107954001L, 8920298079412249256L, 5665800388972726402L, 7136238463529799405L,
+ 2687965903807225960L, 5708990770823839524L, 2150372723045780768L, 9134385233318143238L, 7129945171615159552L,
+ 7307508186654514591L, 169932915179262157L, 5846006549323611672L, 7514643961627230372L, 4676805239458889338L,
+ 2322366354559873974L, 7482888383134222941L, 1871111759924843197L, 5986310706507378352L, 8875587037423695204L,
+ 4789048565205902682L, 3411120815197045840L, 7662477704329444291L, 7302467711686228506L, 6129982163463555433L,
+ 3997299761978027643L, 4903985730770844346L, 6887188624324332438L, 7846377169233350954L, 7330152984177021577L,
+ 6277101735386680763L, 7708796794712572423L, 5021681388309344611L, 633014213657192454L, 8034690221294951377L,
+ 6546845963964373411L, 6427752177035961102L, 1548127956429588405L, 5142201741628768881L, 6772525587256536209L,
+ 8227522786606030210L, 7146692124868547611L, 6582018229284824168L, 5717353699894838089L, 5265614583427859334L,
+ 8263231774657780795L, 8424983333484574935L, 7687147617339583786L, 6739986666787659948L, 6149718093871667029L,
+ 5391989333430127958L, 8609123289839243947L, 8627182933488204734L, 2706550819517059345L, 6901746346790563787L,
+ 4009915062984602637L, 5521397077432451029L, 8741955272500547595L, 8834235323891921647L, 8453105213888010667L,
+ 7067388259113537318L, 3073135356368498210L, 5653910607290829854L, 6147857099836708891L, 9046256971665327767L,
+ 4302548137625868741L, 7237005577332262213L, 8976061732213560478L, 5789604461865809771L, 1646826163657982898L,
+ 4631683569492647816L, 8696158560410206965L, 7410693711188236507L, 1001132845059645012L, 5928554968950589205L,
+ 6334929498160581494L, 4742843975160471364L, 5067943598528465196L, 7588550360256754183L, 2574686535532678828L,
+ 6070840288205403346L, 5749098043168053386L, 4856672230564322677L, 2754604027163487547L, 7770675568902916283L,
+ 6252040850832535236L, 6216540455122333026L, 8690981495407938512L, 4973232364097866421L, 5108110788955395648L,
+ 7957171782556586274L, 4483628447586722714L, 6365737426045269019L, 5431577165440333333L, 5092589940836215215L,
+ 6189936139723221828L, 8148143905337944345L, 680525786702379117L, 6518515124270355476L, 544420629361903293L,
+ 5214812099416284380L, 7814234132973343281L, 8343699359066055009L, 3279402575902573442L, 6674959487252844007L,
+ 4468196468093013915L, 5339967589802275205L, 9108580396587276617L, 8543948143683640329L, 5350356597684866779L,
+ 6835158514946912263L, 6124959685518848585L, 5468126811957529810L, 8589316563156989191L, 8749002899132047697L,
+ 4519534464196406897L, 6999202319305638157L, 9149650793469991003L, 5599361855444510526L, 3630371820034082479L,
+ 8958978968711216842L, 2119246097312621643L, 7167183174968973473L, 7229420099962962799L, 5733746539975178779L,
+ 249512857857504755L, 9173994463960286046L, 4088569387313917931L, 7339195571168228837L, 1426181102480179183L,
+ 5871356456934583069L, 6674968104097008831L, 4697085165547666455L, 7184648890648562227L, 7515336264876266329L,
+ 2272066188182923754L, 6012269011901013063L, 3662327357917294165L, 4809815209520810450L, 6619210701075745655L,
+ 7695704335233296721L, 1367365084866417240L, 6156563468186637376L, 8472589697376954439L, 4925250774549309901L,
+ 4933397350530608390L, 7880401239278895842L, 4204086946107063100L, 6304320991423116673L, 8897292778998515965L,
+ 5043456793138493339L, 1583811001085947287L, 8069530869021589342L, 6223446416479425982L, 6455624695217271474L,
+ 1289408318441630463L, 5164499756173817179L, 2876201062124259532L, 8263199609878107486L, 8291270514140725574L,
+ 6610559687902485989L, 4788342003941625298L, 5288447750321988791L, 5675348010524255400L, 8461516400515182066L,
+ 5391208002096898316L, 6769213120412145653L, 2468291994306563491L, 5415370496329716522L, 5663982410187161116L,
+ 8664592794127546436L, 1683674226815637140L, 6931674235302037148L, 8725637010936330358L, 5545339388241629719L,
+ 1446486386636198802L, 8872543021186607550L, 6003727033359828406L, 7098034416949286040L, 4802981626687862725L,
+ 5678427533559428832L, 3842385301350290180L, 9085484053695086131L, 7992490889531419449L, 7268387242956068905L,
+ 4549318304254180398L, 5814709794364855124L, 3639454643403344318L, 4651767835491884099L, 4756238122093630616L,
+ 7442828536787014559L, 2075957773236943501L, 5954262829429611647L, 3505440625960509963L, 4763410263543689317L,
+ 8338375722881273455L, 7621456421669902908L, 5962703527126216881L, 6097165137335922326L, 8459511636442883828L,
+ 4877732109868737861L, 4922934901783351901L, 7804371375789980578L, 4187347028111452718L, 6243497100631984462L,
+ 7039226437231072498L, 4994797680505587570L, 1942032335042947675L, 7991676288808940112L, 3107251736068716280L,
+ 6393341031047152089L, 8019824610967838509L, 5114672824837721671L, 8260534096145225969L, 8183476519740354675L,
+ 304133702235675419L, 6546781215792283740L, 243306961788540335L, 5237424972633826992L, 194645569430832268L,
+ 8379879956214123187L, 2156107318460286790L, 6703903964971298549L, 7258909076881094917L, 5363123171977038839L,
+ 7651801668875831096L, 8580997075163262143L, 6708859448088464268L, 6864797660130609714L, 9056436373212681737L,
+ 5491838128104487771L, 9089823505941100552L, 8786941004967180435L, 1630996757909074751L, 7029552803973744348L,
+ 1304797406327259801L, 5623642243178995478L, 4733186739803718164L, 8997827589086392765L, 5728424376314993901L,
+ 7198262071269114212L, 4582739501051995121L, 5758609657015291369L, 9200214822954461581L, 9213775451224466191L,
+ 9186320494614273045L, 7371020360979572953L, 5504381988320463275L, 5896816288783658362L, 8092854405398280943L,
+ 4717453031026926690L, 2784934709576714431L, 7547924849643082704L, 4455895535322743090L, 6038339879714466163L,
+ 5409390835629149634L, 4830671903771572930L, 8016861483245230030L, 7729075046034516689L, 3603606336337592240L,
+ 6183260036827613351L, 4727559476441028954L, 4946608029462090681L, 1937373173781868001L, 7914572847139345089L,
+ 8633820300163854287L, 6331658277711476071L, 8751730647502038591L, 5065326622169180857L, 5156710110630675711L,
+ 8104522595470689372L, 872038547525260492L, 6483618076376551497L, 6231654060133073878L, 5186894461101241198L,
+ 1295974433364548779L, 8299031137761985917L, 228884686012322885L, 6639224910209588733L, 5717130970922723793L,
+ 5311379928167670986L, 8263053591480089358L, 8498207885068273579L, 308164894771456841L, 6798566308054618863L,
+ 2091206323188120634L, 5438853046443695090L, 5362313873292406831L, 8702164874309912144L, 8579702197267850929L,
+ 6961731899447929715L, 8708436165185235905L, 5569385519558343772L, 6966748932148188724L, 8911016831293350036L,
+ 3768100661953281312L, 7128813465034680029L, 1169806122191669888L, 5703050772027744023L, 2780519305124291072L,
+ 9124881235244390437L, 2604156480827910553L, 7299904988195512349L, 7617348406775193928L, 5839923990556409879L,
+ 7938553132791110304L, 4671939192445127903L, 8195516913603843405L, 7475102707912204646L, 2044780617540418478L,
+ 5980082166329763716L, 9014522123516155429L, 4784065733063810973L, 5366943291441969181L, 7654505172902097557L,
+ 6742434858936195528L, 6123604138321678046L, 1704599072407046100L, 4898883310657342436L, 8742376887409457526L,
+ 7838213297051747899L, 1075082168258445910L, 6270570637641398319L, 2704740141977711890L, 5016456510113118655L,
+ 4008466520953124674L, 8026330416180989848L, 6413546433524999478L, 6421064332944791878L, 8820185961561909905L,
+ 5136851466355833503L, 1522125547136662440L, 8218962346169333605L, 590726468047704741L, 6575169876935466884L,
+ 472581174438163793L, 5260135901548373507L, 2222739346921486196L, 8416217442477397611L, 5401057362445333075L,
+ 6732973953981918089L, 2476171482585311299L, 5386379163185534471L, 3825611593439204201L, 8618206661096855154L,
+ 2431629734760816398L, 6894565328877484123L, 3789978195179608280L, 5515652263101987298L, 6721331370885596947L,
+ 8825043620963179677L, 8909455786045999954L, 7060034896770543742L, 3438215814094889640L, 5648027917416434993L,
+ 8284595873388777197L, 9036844667866295990L, 2187306953196312545L, 7229475734293036792L, 1749845562557050036L,
+ 5783580587434429433L, 6933899672158505514L, 4626864469947543547L, 13096515613938926L, 7402983151916069675L,
+ 1865628832353257443L, 5922386521532855740L, 1492503065882605955L, 4737909217226284592L, 1194002452706084764L,
+ 7580654747562055347L, 3755078331700690783L, 6064523798049644277L, 8538085887473418112L, 4851619038439715422L,
+ 3141119895236824166L, 7762590461503544675L, 6870466239749873827L, 6210072369202835740L, 5496372991799899062L,
+ 4968057895362268592L, 4397098393439919250L, 7948892632579629747L, 8880031836874825961L, 6359114106063703798L,
+ 3414676654757950445L, 5087291284850963038L, 6421090138548270680L, 8139666055761540861L, 8429069814306277926L,
+ 6511732844609232689L, 4898581444074067179L, 5209386275687386151L, 5763539562630208905L, 8335018041099817842L,
+ 5532314485466423924L, 6668014432879854274L, 736502773631228816L, 5334411546303883419L, 2433876626275938215L,
+ 8535058474086213470L, 7583551416783411467L, 6828046779268970776L, 6066841133426729173L, 5462437423415176621L,
+ 3008798499370428177L, 8739899877464282594L, 1124728784250774760L, 6991919901971426075L, 2744457434771574970L,
+ 5593535921577140860L, 2195565947817259976L, 8949657474523425376L, 3512905516507615961L, 7159725979618740301L,
+ 965650005835137607L, 5727780783694992240L, 8151217634151930732L, 9164449253911987585L, 3818576177788313364L,
+ 7331559403129590068L, 3054860942230650691L, 5865247522503672054L, 6133237568526430876L, 4692198018002937643L,
+ 6751264462192099863L, 7507516828804700229L, 8957348732136404618L, 6006013463043760183L, 9010553393080078856L,
+ 4804810770435008147L, 1674419492351197600L, 7687697232696013035L, 4523745595132871322L, 6150157786156810428L,
+ 3618996476106297057L, 4920126228925448342L, 6584545995626947969L, 7872201966280717348L, 3156575963519296104L,
+ 6297761573024573878L, 6214609585557347207L, 5038209258419659102L, 8661036483187788089L, 8061134813471454564L,
+ 6478960743616640295L, 6448907850777163651L, 7027843002264267398L, 5159126280621730921L, 3777599994440458757L,
+ 8254602048994769474L, 2354811176362823687L, 6603681639195815579L, 3728523348461214111L, 5282945311356652463L,
+ 4827493086139926451L, 8452712498170643941L, 5879314530452927160L, 6762169998536515153L, 2858777216991386566L,
+ 5409735998829212122L, 5976370588335019576L, 8655577598126739396L, 2183495311852210675L, 6924462078501391516L,
+ 9125493878965589187L, 5539569662801113213L, 5455720695801516188L, 8863311460481781141L, 6884478705911470739L,
+ 7090649168385424913L, 3662908557358221429L, 5672519334708339930L, 6619675660628487467L, 9076030935533343889L,
+ 1368109020150804139L, 7260824748426675111L, 2939161623491598473L, 5808659798741340089L, 506654891422323617L,
+ 4646927838993072071L, 2249998320508814055L, 7435084542388915313L, 9134020534926967972L, 5948067633911132251L,
+ 1773193205828708893L, 4758454107128905800L, 8797252194146787761L, 7613526571406249281L, 4852231473780084609L,
+ 6090821257124999425L, 2037110771653112526L, 4872657005699999540L, 1629688617322490021L, 7796251209119999264L,
+ 2607501787715984033L, 6237000967295999411L, 3930675837543742388L, 4989600773836799529L, 1299866262664038749L,
+ 7983361238138879246L, 5769134835004372321L, 6386688990511103397L, 2770633460632542696L, 5109351192408882717L,
+ 7750529990618899641L, 8174961907854212348L, 5022150355506418780L, 6539969526283369878L, 7707069099147045347L,
+ 5231975621026695903L, 631632057204770793L, 8371160993642713444L, 8389308921011453915L, 6696928794914170755L,
+ 8556121544180118293L, 5357543035931336604L, 6844897235344094635L, 8572068857490138567L, 5417812354437685931L,
+ 6857655085992110854L, 644901068808238421L, 5486124068793688683L, 2360595262417545899L, 8777798510069901893L,
+ 1932278012497118276L, 7022238808055921514L, 5235171224739604944L, 5617791046444737211L, 6032811387162639117L,
+ 8988465674311579538L, 5963149404718312264L, 7190772539449263630L, 8459868338516560134L, 5752618031559410904L,
+ 6767894670813248108L, 9204188850495057447L, 5294608251188331487L
+ )
+
+ private[this] final val pow10ints: Array[Int] =
+ Array(1, 10, 100, 1000, 10000, 100000, 1000000, 10000000, 100000000, 1000000000)
+
+ private[this] final val pow10longs: Array[Long] =
+ Array(1L, 10L, 100L, 1000L, 10000L, 100000L, 1000000L, 10000000L, 100000000L, 1000000000L, 10000000000L,
+ 100000000000L, 1000000000000L, 10000000000000L, 100000000000000L, 1000000000000000L, 10000000000000000L,
+ 100000000000000000L, 1000000000000000000L)
+
+ @volatile private[this] var tenPow18Squares: Array[java.math.BigInteger] =
+ Array(java.math.BigInteger.valueOf(1000000000000000000L))
+
+ private[this] val writes = new ThreadLocal[FastStringWrite] {
+ override def initialValue(): FastStringWrite = new FastStringWrite(64)
+
+ override def get: FastStringWrite = {
+ val w = super.get
+ w.reset()
+ w
+ }
+ }
+}
diff --git a/zio-json/jvm-native/src/main/scala/zio/json/internal/UnsafeNumbers.scala b/zio-json/jvm-native/src/main/scala/zio/json/internal/UnsafeNumbers.scala
new file mode 100644
index 000000000..c9d1a3164
--- /dev/null
+++ b/zio-json/jvm-native/src/main/scala/zio/json/internal/UnsafeNumbers.scala
@@ -0,0 +1,782 @@
+/*
+ * Copyright 2019-2022 John A. De Goes and the ZIO Contributors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package zio.json.internal
+
+import scala.util.control.NoStackTrace
+
+// The underlying implementation uses an exception that has no stack trace for
+// the failure case, which is 20x faster than retaining stack traces. Therefore,
+// we require no boxing of the results on the happy path. This slows down the
+// unhappy path a little bit, but it's still on the same order of magnitude as
+// the happy path.
+//
+// This API should only be used by people who know what they are doing. Note
+// that Reader implementations consume one character beyond the number that is
+// parsed, because there is no terminator character.
+object UnsafeNumbers {
+
+ // should never escape into user code
+ case object UnsafeNumber
+ extends Exception("if you see this a dev made a mistake using UnsafeNumbers")
+ with NoStackTrace
+
+ def byte(num: String): Byte =
+ byte_(new FastStringReader(num), true)
+
+ def byte_(in: OneCharReader, consume: Boolean): Byte = {
+ var current =
+ if (consume) in.readChar().toInt
+ else in.nextNonWhitespace().toInt
+ val negate = current == '-'
+ if (negate) current = in.readChar().toInt
+ if (current >= '0' && current <= '9') {
+ var accum = current - '0'
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ accum = accum * 10 + (current - '0')
+ if (accum > 128) throw UnsafeNumber
+ }
+ if (!consume || current == -1) {
+ if (negate) return (-accum).toByte
+ else if (accum < 128) return accum.toByte
+ }
+ }
+ throw UnsafeNumber
+ }
+
+ def short(num: String): Short =
+ short_(new FastStringReader(num), true)
+
+ def short_(in: OneCharReader, consume: Boolean): Short = {
+ var current =
+ if (consume) in.readChar().toInt
+ else in.nextNonWhitespace().toInt
+ val negate = current == '-'
+ if (negate) current = in.readChar().toInt
+ if (current >= '0' && current <= '9') {
+ var accum = current - '0'
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ accum = accum * 10 + (current - '0')
+ if (accum > 32768) throw UnsafeNumber
+ }
+ if (!consume || current == -1) {
+ if (negate) return (-accum).toShort
+ else if (accum < 32768) return accum.toShort
+ }
+ }
+ throw UnsafeNumber
+ }
+
+ def int(num: String): Int =
+ int_(new FastStringReader(num), true)
+
+ def int_(in: OneCharReader, consume: Boolean): Int = {
+ var current =
+ if (consume) in.readChar().toInt
+ else in.nextNonWhitespace().toInt
+ val negate = current == '-'
+ if (negate) current = in.readChar().toInt
+ if (current >= '0' && current <= '9') {
+ var accum = '0' - current
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ if (
+ accum < -214748364 || {
+ accum = accum * 10 + ('0' - current)
+ accum > 0
+ }
+ ) throw UnsafeNumber
+ }
+ if (!consume || current == -1) {
+ if (negate) return accum
+ else if (accum != -2147483648) return -accum
+ }
+ }
+ throw UnsafeNumber
+ }
+
+ def long(num: String): Long =
+ long_(new FastStringReader(num), true)
+
+ def long_(in: OneCharReader, consume: Boolean): Long = {
+ var current =
+ if (consume) in.readChar().toInt
+ else in.nextNonWhitespace().toInt
+ val negate = current == '-'
+ if (negate) current = in.readChar().toInt
+ if (current >= '0' && current <= '9') {
+ var accum = ('0' - current).toLong
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ if (
+ accum < -922337203685477580L || {
+ accum = accum * 10 + ('0' - current)
+ accum > 0
+ }
+ ) throw UnsafeNumber
+ }
+ if (!consume || current == -1) {
+ if (negate) return accum
+ else if (accum != -9223372036854775808L) return -accum
+ }
+ }
+ throw UnsafeNumber
+ }
+
+ def bigInteger(num: String, max_bits: Int): java.math.BigInteger =
+ bigInteger_(new FastStringReader(num), true, max_bits)
+
+ def bigInteger_(in: OneCharReader, consume: Boolean, max_bits: Int): java.math.BigInteger = {
+ var current =
+ if (consume) in.readChar().toInt
+ else in.nextNonWhitespace().toInt
+ val negate = current == '-'
+ if (negate) current = in.readChar().toInt
+ if (current >= '0' && current <= '9') {
+ var loM10 = (current - '0').toLong
+ var loDigits = 1
+ var hiM10: java.math.BigDecimal = null
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ if (loM10 < 922337203685477580L) {
+ loM10 = loM10 * 10 + (current - '0')
+ loDigits += 1
+ } else {
+ if (negate) loM10 = -loM10
+ val bd = java.math.BigDecimal.valueOf(loM10)
+ if (hiM10 eq null) hiM10 = bd
+ else {
+ hiM10 = hiM10.scaleByPowerOfTen(loDigits).add(bd)
+ if (hiM10.unscaledValue.bitLength >= max_bits) throw UnsafeNumber
+ }
+ loM10 = (current - '0').toLong
+ loDigits = 1
+ }
+ }
+ if (!consume || current == -1) {
+ if (negate) loM10 = -loM10
+ if (hiM10 eq null) return java.math.BigInteger.valueOf(loM10)
+ val bi = hiM10.scaleByPowerOfTen(loDigits).add(java.math.BigDecimal.valueOf(loM10)).unscaledValue
+ if (bi.bitLength < max_bits) return bi
+ }
+ }
+ throw UnsafeNumber
+ }
+
+ def bigInt(num: String, max_bits: Int): BigInt =
+ bigInt_(new FastStringReader(num), true, max_bits)
+
+ def bigInt_(in: OneCharReader, consume: Boolean, max_bits: Int): BigInt = {
+ var current =
+ if (consume) in.readChar().toInt
+ else in.nextNonWhitespace().toInt
+ val negate = current == '-'
+ if (negate) current = in.readChar().toInt
+ if (current >= '0' && current <= '9') {
+ var loM10 = (current - '0').toLong
+ var loDigits = 1
+ var hiM10: java.math.BigDecimal = null
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ if (loM10 < 922337203685477580L) {
+ loM10 = loM10 * 10 + (current - '0')
+ loDigits += 1
+ } else {
+ if (negate) loM10 = -loM10
+ val bd = java.math.BigDecimal.valueOf(loM10)
+ if (hiM10 eq null) hiM10 = bd
+ else {
+ hiM10 = hiM10.scaleByPowerOfTen(loDigits).add(bd)
+ if (hiM10.unscaledValue.bitLength >= max_bits) throw UnsafeNumber
+ }
+ loM10 = (current - '0').toLong
+ loDigits = 1
+ }
+ }
+ if (!consume || current == -1) {
+ if (negate) loM10 = -loM10
+ if (hiM10 eq null) return BigInt(loM10)
+ val bi = hiM10.scaleByPowerOfTen(loDigits).add(java.math.BigDecimal.valueOf(loM10)).unscaledValue
+ if (bi.bitLength < max_bits) return new BigInt(bi)
+ }
+ }
+ throw UnsafeNumber
+ }
+
+ def bigDecimal(num: String, max_bits: Int): java.math.BigDecimal =
+ bigDecimal_(new FastStringReader(num), true, max_bits)
+
+ def bigDecimal_(in: OneCharReader, consume: Boolean, max_bits: Int): java.math.BigDecimal = {
+ var current =
+ if (consume) in.readChar().toInt
+ else in.nextNonWhitespace().toInt
+ val negate = current == '-'
+ if (negate) current = in.readChar().toInt
+ var loM10 = 0L
+ var loDigits = 0
+ var hiM10: java.math.BigDecimal = null
+ if (current >= '0' && current <= '9') {
+ loM10 = (current - '0').toLong
+ loDigits = 1
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ if (loM10 < 922337203685477580L) {
+ loM10 = loM10 * 10 + (current - '0')
+ loDigits += 1
+ } else {
+ hiM10 = toBigDecimal(hiM10, loM10, loDigits, max_bits, negate)
+ loM10 = (current - '0').toLong
+ loDigits = 1
+ }
+ }
+ }
+ var e10 = 0
+ if (current == '.') {
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ if (loM10 < 922337203685477580L) {
+ loM10 = loM10 * 10 + (current - '0')
+ loDigits += 1
+ e10 -= 1
+ } else {
+ hiM10 = toBigDecimal(hiM10, loM10, loDigits, max_bits, negate)
+ loM10 = (current - '0').toLong
+ loDigits = 1
+ e10 -= 1
+ }
+ }
+ }
+ if (
+ loDigits != 0 && ((current | 0x20) != 'e' || {
+ current = in.readChar().toInt
+ val negateExp = current == '-'
+ if (negateExp || current == '+') current = in.readChar().toInt
+ (current >= '0' && current <= '9') && {
+ var exp = '0' - current
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ if (
+ exp < -214748364 || {
+ exp = exp * 10 + ('0' - current)
+ exp > 0
+ }
+ ) throw UnsafeNumber
+ }
+ negateExp && {
+ e10 += exp
+ e10 <= 0
+ } || !negateExp && {
+ e10 -= exp
+ exp != -2147483648
+ }
+ }
+ }) && (!consume || current == -1)
+ ) {
+ if (hiM10 eq null) {
+ if (negate) loM10 = -loM10
+ return java.math.BigDecimal.valueOf(loM10, -e10)
+ }
+ return toBigDecimal(hiM10, loM10, loDigits, e10, max_bits, negate)
+ }
+ throw UnsafeNumber
+ }
+
+ @noinline private[this] def toBigDecimal(
+ hi: java.math.BigDecimal,
+ lo: Long,
+ loDigits: Int,
+ max_bits: Int,
+ negate: Boolean
+ ): java.math.BigDecimal = {
+ var loM10 = lo
+ if (negate) loM10 = -loM10
+ var hiM10 = java.math.BigDecimal.valueOf(loM10)
+ if (hi eq null) return hiM10
+ hiM10 = hi.scaleByPowerOfTen(loDigits).add(hiM10)
+ if (hiM10.unscaledValue.bitLength < max_bits) return hiM10
+ throw UnsafeNumber
+ }
+
+ @noinline private[this] def toBigDecimal(
+ hi: java.math.BigDecimal,
+ lo: Long,
+ loDigits: Int,
+ e10: Int,
+ max_bits: Int,
+ negate: Boolean
+ ): java.math.BigDecimal = {
+ var loM10 = lo
+ if (negate) loM10 = -loM10
+ var hiM10 = java.math.BigDecimal.valueOf(loM10, -e10)
+ if (hi eq null) return hiM10
+ val n = loDigits.toLong + e10
+ if (
+ n.toInt == n && {
+ val scale = hi.scale - n
+ scale.toInt == scale
+ } && {
+ hiM10 = hi.scaleByPowerOfTen(n.toInt).add(hiM10)
+ hiM10.unscaledValue.bitLength < max_bits
+ }
+ ) return hiM10
+ throw UnsafeNumber
+ }
+
+ def float(num: String, max_bits: Int): Float =
+ float_(new FastStringReader(num), true, max_bits)
+
+ def float_(in: OneCharReader, consume: Boolean, max_bits: Int): Float = {
+ var current =
+ if (consume) in.readChar().toInt
+ else in.nextNonWhitespace().toInt
+ val negate = current == '-'
+ if (negate) current = in.readChar().toInt
+ else if (current == 'N') {
+ readAll(in, "aN", consume)
+ return Float.NaN
+ }
+ if (current == 'I' || current == '+') {
+ if (current == '+' && in.readChar() != 'I') throw UnsafeNumber
+ readAll(in, "nfinity", consume)
+ return if (negate) Float.NegativeInfinity else Float.PositiveInfinity
+ }
+ var loM10 = 0L
+ var loDigits = 0
+ var hiM10: java.math.BigDecimal = null
+ if (current >= '0' && current <= '9') {
+ loM10 = (current - '0').toLong
+ loDigits = 1
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ if (loM10 < 922337203685477580L) {
+ loM10 = loM10 * 10 + (current - '0')
+ loDigits += 1
+ } else {
+ hiM10 = toBigDecimal(hiM10, loM10, loDigits, max_bits, negate)
+ loM10 = (current - '0').toLong
+ loDigits = 1
+ }
+ }
+ }
+ var e10 = 0
+ if (current == '.') {
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ if (loM10 < 922337203685477580L) {
+ loM10 = loM10 * 10 + (current - '0')
+ loDigits += 1
+ e10 -= 1
+ } else {
+ hiM10 = toBigDecimal(hiM10, loM10, loDigits, max_bits, negate)
+ loM10 = (current - '0').toLong
+ loDigits = 1
+ e10 -= 1
+ }
+ }
+ }
+ if (
+ loDigits != 0 && ((current | 0x20) != 'e' || {
+ current = in.readChar().toInt
+ val negateExp = current == '-'
+ if (negateExp || current == '+') current = in.readChar().toInt
+ (current >= '0' && current <= '9') && {
+ var exp = '0' - current
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ if (
+ exp < -214748364 || {
+ exp = exp * 10 + ('0' - current)
+ exp > 0
+ }
+ ) throw UnsafeNumber
+ }
+ negateExp && {
+ e10 += exp
+ e10 <= 0
+ } || !negateExp && {
+ e10 -= exp
+ exp != -2147483648
+ }
+ }
+ }) && (!consume || current == -1)
+ ) {
+ if (hiM10 eq null) {
+ var x =
+ if (e10 == 0) loM10.toFloat
+ else {
+ if (loM10 < 4294967296L && e10 >= loDigits - 23 && e10 <= 19 - loDigits) {
+ val pow10 = pow10Doubles
+ (if (e10 < 0) loM10 / pow10(-e10)
+ else loM10 * pow10(e10)).toFloat
+ } else toFloat(loM10, e10)
+ }
+ if (negate) x = -x
+ return x
+ }
+ return toBigDecimal(hiM10, loM10, loDigits, e10, max_bits, negate).floatValue
+ }
+ throw UnsafeNumber
+ }
+
+ // Based on the 'Moderate Path' algorithm from the awesome library of Alexander Huszagh: https://github.com/Alexhuszagh/rust-lexical
+ // Here is his inspiring post: https://www.reddit.com/r/rust/comments/a6j5j1/making_rust_float_parsing_fast_and_correct
+ private[this] def toFloat(m10: Long, e10: Int): Float =
+ if (m10 == 0 || e10 < -64) 0.0f
+ else if (e10 >= 39) Float.PositiveInfinity
+ else {
+ var shift = java.lang.Long.numberOfLeadingZeros(m10)
+ var m2 = unsignedMultiplyHigh(pow10Mantissas(e10 + 343), m10 << shift)
+ var e2 = (e10 * 108853 >> 15) - shift + 1 // (e10 * Math.log(10) / Math.log(2)).toInt - shift + 1
+ shift = java.lang.Long.numberOfLeadingZeros(m2)
+ m2 <<= shift
+ e2 -= shift
+ val truncatedBitNum = Math.max(-149 - e2, 40)
+ val savedBitNum = 64 - truncatedBitNum
+ val mask = -1L >>> Math.max(savedBitNum, 0)
+ val halfwayDiff = (m2 & mask) - (mask >>> 1)
+ if (Math.abs(halfwayDiff) > 1 || savedBitNum <= 0) java.lang.Float.intBitsToFloat {
+ var mf = 0
+ if (savedBitNum > 0) mf = (m2 >>> truncatedBitNum).toInt
+ e2 += truncatedBitNum
+ if (savedBitNum >= 0 && halfwayDiff > 0) {
+ if (mf == 0xffffff) {
+ mf = 0x800000
+ e2 += 1
+ } else mf += 1
+ }
+ if (e2 == -149) mf
+ else if (e2 >= 105) 0x7f800000
+ else e2 + 150 << 23 | mf & 0x7fffff
+ }
+ else java.math.BigDecimal.valueOf(m10, -e10).floatValue
+ }
+
+ def double(num: String, max_bits: Int): Double =
+ double_(new FastStringReader(num), true, max_bits)
+
+ def double_(in: OneCharReader, consume: Boolean, max_bits: Int): Double = {
+ var current =
+ if (consume) in.readChar().toInt
+ else in.nextNonWhitespace().toInt
+ val negate = current == '-'
+ if (negate) current = in.readChar().toInt
+ else if (current == 'N') {
+ readAll(in, "aN", consume)
+ return Double.NaN
+ }
+ if (current == 'I' || current == '+') {
+ if (current == '+' && in.readChar() != 'I') throw UnsafeNumber
+ readAll(in, "nfinity", consume)
+ return if (negate) Double.NegativeInfinity else Double.PositiveInfinity
+ }
+ var loM10 = 0L
+ var loDigits = 0
+ var hiM10: java.math.BigDecimal = null
+ if (current >= '0' && current <= '9') {
+ loM10 = (current - '0').toLong
+ loDigits = 1
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ if (loM10 < 922337203685477580L) {
+ loM10 = loM10 * 10 + (current - '0')
+ loDigits += 1
+ } else {
+ hiM10 = toBigDecimal(hiM10, loM10, loDigits, 0, max_bits, negate)
+ loM10 = (current - '0').toLong
+ loDigits = 1
+ }
+ }
+ }
+ var e10 = 0
+ if (current == '.') {
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ if (loM10 < 922337203685477580L) {
+ loM10 = loM10 * 10 + (current - '0')
+ loDigits += 1
+ e10 -= 1
+ } else {
+ hiM10 = toBigDecimal(hiM10, loM10, loDigits, 0, max_bits, negate)
+ loM10 = (current - '0').toLong
+ loDigits = 1
+ e10 -= 1
+ }
+ }
+ }
+ if (
+ loDigits != 0 && ((current | 0x20) != 'e' || {
+ current = in.readChar().toInt
+ val negateExp = current == '-'
+ if (negateExp || current == '+') current = in.readChar().toInt
+ (current >= '0' && current <= '9') && {
+ var exp = '0' - current
+ while ({
+ current = in.read()
+ current >= '0' && current <= '9'
+ }) {
+ if (
+ exp < -214748364 || {
+ exp = exp * 10 + ('0' - current)
+ exp > 0
+ }
+ ) throw UnsafeNumber
+ }
+ negateExp && {
+ e10 += exp
+ e10 <= 0
+ } || !negateExp && {
+ e10 -= exp
+ exp != -2147483648
+ }
+ }
+ }) && (!consume || current == -1)
+ ) {
+ if (hiM10 eq null) {
+ var x =
+ if (e10 == 0) loM10.toDouble
+ else {
+ if (loM10 < 4503599627370496L && e10 >= -22 && e10 <= 38 - loDigits) {
+ val pow10 = pow10Doubles
+ if (e10 < 0) loM10 / pow10(-e10)
+ else if (e10 <= 22) loM10 * pow10(e10)
+ else {
+ val slop = 16 - loDigits
+ (loM10 * pow10(slop)) * pow10(e10 - slop)
+ }
+ } else toDouble(loM10, e10)
+ }
+ if (negate) x = -x
+ return x
+ }
+ return toBigDecimal(hiM10, loM10, loDigits, e10, max_bits, negate).doubleValue
+ }
+ throw UnsafeNumber
+ }
+
+ // Based on the 'Moderate Path' algorithm from the awesome library of Alexander Huszagh: https://github.com/Alexhuszagh/rust-lexical
+ // Here is his inspiring post: https://www.reddit.com/r/rust/comments/a6j5j1/making_rust_float_parsing_fast_and_correct
+ @inline private[this] def toDouble(m10: Long, e10: Int): Double =
+ if (m10 == 0 || e10 < -343) 0.0
+ else if (e10 >= 310) Double.PositiveInfinity
+ else {
+ var shift = java.lang.Long.numberOfLeadingZeros(m10)
+ var m2 = unsignedMultiplyHigh(pow10Mantissas(e10 + 343), m10 << shift)
+ var e2 = (e10 * 108853 >> 15) - shift + 1 // (e10 * Math.log(10) / Math.log(2)).toInt - shift + 1
+ shift = java.lang.Long.numberOfLeadingZeros(m2)
+ m2 <<= shift
+ e2 -= shift
+ val truncatedBitNum = Math.max(-1074 - e2, 11)
+ val savedBitNum = 64 - truncatedBitNum
+ val mask = -1L >>> Math.max(savedBitNum, 0)
+ val halfwayDiff = (m2 & mask) - (mask >>> 1)
+ if (Math.abs(halfwayDiff) > 1 || savedBitNum <= 0) java.lang.Double.longBitsToDouble {
+ if (savedBitNum <= 0) m2 = 0
+ m2 >>>= truncatedBitNum
+ e2 += truncatedBitNum
+ if (savedBitNum >= 0 && halfwayDiff > 0) {
+ if (m2 == 0x1fffffffffffffL) {
+ m2 = 0x10000000000000L
+ e2 += 1
+ } else m2 += 1
+ }
+ if (e2 == -1074) m2
+ else if (e2 >= 972) 0x7ff0000000000000L
+ else (e2 + 1075).toLong << 52 | m2 & 0xfffffffffffffL
+ }
+ else java.math.BigDecimal.valueOf(m10, -e10).doubleValue
+ }
+
+ @noinline private[this] def readAll(in: OneCharReader, s: String, consume: Boolean): Unit = {
+ val len = s.length
+ var i = 0
+ while (i < len) {
+ if (in.readChar() != s.charAt(i)) throw UnsafeNumber
+ i += 1
+ }
+ val current = in.read() // to be consistent read the terminator
+ if (consume && current != -1 || !consume && current != '"') throw UnsafeNumber
+ }
+
+ @inline private[this] def unsignedMultiplyHigh(x: Long, y: Long): Long =
+ Math.multiplyHigh(x, y) + x + y // FIXME: Use Math.unsignedMultiplyHigh after dropping of JDK 17 support
+
+ private[this] final val pow10Doubles: Array[Double] =
+ Array(1, 1e+1, 1e+2, 1e+3, 1e+4, 1e+5, 1e+6, 1e+7, 1e+8, 1e+9, 1e+10, 1e+11, 1e+12, 1e+13, 1e+14, 1e+15, 1e+16,
+ 1e+17, 1e+18, 1e+19, 1e+20, 1e+21, 1e+22)
+
+ private[this] final val pow10Mantissas: Array[Long] = Array(
+ -4671960508600951122L, -1228264617323800998L, -7685194413468457480L, -4994806998408183946L, -1631822729582842029L,
+ -7937418233630358124L, -5310086773610559751L, -2025922448585811785L, -8183730558007214222L, -5617977179081629873L,
+ -2410785455424649437L, -8424269937281487754L, -5918651403174471789L, -2786628235540701832L, -8659171674854020501L,
+ -6212278575140137722L, -3153662200497784248L, -8888567902952197011L, -6499023860262858360L, -3512093806901185046L,
+ -9112587656954322510L, -6779048552765515233L, -3862124672529506138L, -215969822234494768L, -7052510166537641086L,
+ -4203951689744663454L, -643253593753441413L, -7319562523736982739L, -4537767136243840520L, -1060522901877412746L,
+ -7580355841314464822L, -4863758783215693124L, -1468012460592228501L, -7835036815511224669L, -5182110000961642932L,
+ -1865951482774665761L, -8083748704375247957L, -5492999862041672042L, -2254563809124702148L, -8326631408344020699L,
+ -5796603242002637969L, -2634068034075909558L, -8563821548938525330L, -6093090917745768758L, -3004677628754823043L,
+ -8795452545612846258L, -6382629663588669919L, -3366601061058449494L, -9021654690802612790L, -6665382345075878084L,
+ -3720041912917459700L, -38366372719436721L, -6941508010590729807L, -4065198994811024355L, -469812725086392539L,
+ -7211161980820077193L, -4402266457597708587L, -891147053569747830L, -7474495936122174250L, -4731433901725329908L,
+ -1302606358729274481L, -7731658001846878407L, -5052886483881210105L, -1704422086424124727L, -7982792831656159810L,
+ -5366805021142811859L, -2096820258001126919L, -8228041688891786181L, -5673366092687344822L, -2480021597431793123L,
+ -8467542526035952558L, -5972742139117552794L, -2854241655469553088L, -8701430062309552536L, -6265101559459552766L,
+ -3219690930897053053L, -8929835859451740015L, -6550608805887287114L, -3576574988931720989L, -9152888395723407474L,
+ -6829424476226871438L, -3925094576856201394L, -294682202642863838L, -7101705404292871755L, -4265445736938701790L,
+ -720121152745989333L, -7367604748107325189L, -4597819916706768583L, -1135588877456072824L, -7627272076051127371L,
+ -4922404076636521310L, -1541319077368263733L, -7880853450996246689L, -5239380795317920458L, -1937539975720012668L,
+ -8128491512466089774L, -5548928372155224313L, -2324474446766642487L, -8370325556870233411L, -5851220927660403859L,
+ -2702340141148116920L, -8606491615858654931L, -6146428501395930760L, -3071349608317525546L, -8837122532839535322L,
+ -6434717147622031249L, -3431710416100151157L, -9062348037703676329L, -6716249028702207507L, -3783625267450371480L,
+ -117845565885576446L, -6991182506319567135L, -4127292114472071014L, -547429124662700864L, -7259672230555269896L,
+ -4462904269766699466L, -966944318780986428L, -7521869226879198374L, -4790650515171610063L, -1376627125537124675L,
+ -7777920981101784778L, -5110715207949843068L, -1776707991509915931L, -8027971522334779313L, -5423278384491086237L,
+ -2167411962186469893L, -8272161504007625539L, -5728515861582144020L, -2548958808550292121L, -8510628282985014432L,
+ -6026599335303880135L, -2921563150702462265L, -8743505996830120772L, -6317696477610263061L, -3285434578585440922L,
+ -8970925639256982432L, -6601971030643840136L, -3640777769877412266L, -9193015133814464522L, -6879582898840692749L,
+ -3987792605123478032L, -373054737976959636L, -7150688238876681629L, -4326674280168464132L, -796656831783192261L,
+ -7415439547505577019L, -4657613415954583370L, -1210330751515841308L, -7673985747338482674L, -4980796165745715438L,
+ -1614309188754756393L, -7926472270612804602L, -5296404319838617848L, -2008819381370884406L, -8173041140997884610L,
+ -5604615407819967859L, -2394083241347571919L, -8413831053483314306L, -5905602798426754978L, -2770317479606055818L,
+ -8648977452394866743L, -6199535797066195524L, -3137733727905356501L, -8878612607581929669L, -6486579741050024183L,
+ -3496538657885142324L, -9102865688819295809L, -6766896092596731857L, -3846934097318526917L, -196981603220770742L,
+ -7040642529654063570L, -4189117143640191558L, -624710411122851544L, -7307973034592864071L, -4523280274813692185L,
+ -1042414325089727327L, -7569037980822161435L, -4849611457600313890L, -1450328303573004458L, -7823984217374209643L,
+ -5168294253290374149L, -1848681798185579782L, -8072955151507069220L, -5479507920956448621L, -2237698882768172872L,
+ -8316090829371189901L, -5783427518286599473L, -2617598379430861437L, -8553528014785370254L, -6080224000054324913L,
+ -2988593981640518238L, -8785400266166405755L, -6370064314280619289L, -3350894374423386208L, -9011838011655698236L,
+ -6653111496142234891L, -3704703351750405709L, -19193171260619233L, -6929524759678968877L, -4050219931171323192L,
+ -451088895536766085L, -7199459587351560659L, -4387638465762062920L, -872862063775190746L, -7463067817500576073L,
+ -4717148753448332187L, -1284749923383027329L, -7720497729755473937L, -5038936143766954517L, -1686984161281305242L,
+ -7971894128441897632L, -5353181642124984136L, -2079791034228842266L, -8217398424034108273L, -5660062011615247437L,
+ -2463391496091671392L, -8457148712698376476L, -5959749872445582691L, -2838001322129590460L, -8691279853972075893L,
+ -6252413799037706963L, -3203831230369745799L, -8919923546622172981L, -6538218414850328322L, -3561087000135522498L,
+ -9143208402725783417L, -6817324484979841368L, -3909969587797413806L, -275775966319379353L, -7089889006590693952L,
+ -4250675239810979535L, -701658031336336515L, -7356065297226292178L, -4583395603105477319L, -1117558485454458744L,
+ -7616003081050118571L, -4908317832885260310L, -1523711272679187483L, -7869848573065574033L, -5225624697904579637L,
+ -1920344853953336643L, -8117744561361917258L, -5535494683275008668L, -2307682335666372931L, -8359830487432564938L,
+ -5838102090863318269L, -2685941595151759932L, -8596242524610931813L, -6133617137336276863L, -3055335403242958174L,
+ -8827113654667930715L, -6422206049907525490L, -3416071543957018958L, -9052573742614218705L, -6704031159840385477L,
+ -3768352931373093942L, -98755145788979524L, -6979250993759194058L, -4112377723771604669L, -528786136287117932L,
+ -7248020362820530564L, -4448339435098275301L, -948738275445456222L, -7510490449794491995L, -4776427043815727089L,
+ -1358847786342270957L, -7766808894105001205L, -5096825099203863602L, -1759345355577441598L, -8017119874876982855L,
+ -5409713825168840664L, -2150456263033662926L, -8261564192037121185L, -5715269221619013577L, -2532400508596379068L,
+ -8500279345513818773L, -6013663163464885563L, -2905392935903719049L, -8733399612580906262L, -6305063497298744923L,
+ -3269643353196043250L, -8961056123388608887L, -6589634135808373205L, -3625356651333078602L, -9183376934724255983L,
+ -6867535149977932074L, -3972732919045027189L, -354230130378896082L, -7138922859127891907L, -4311967555482476980L,
+ -778273425925708321L, -7403949918844649557L, -4643251380128424042L, -1192378206733142148L, -7662765406849295699L,
+ -4966770740134231719L, -1596777406740401745L, -7915514906853832947L, -5282707615139903279L, -1991698500497491195L,
+ -8162340590452013853L, -5591239719637629412L, -2377363631119648861L, -8403381297090862394L, -5892540602936190089L,
+ -2753989735242849707L, -8638772612167862923L, -6186779746782440750L, -3121788665050663033L, -8868646943297746252L,
+ -6474122660694794911L, -3480967307441105734L, -9093133594791772940L, -6754730975062328271L, -3831727700400522434L,
+ -177973607073265139L, -7028762532061872568L, -4174267146649952806L, -606147914885053103L, -7296371474444240046L,
+ -4508778324627912153L, -1024286887357502287L, -7557708332239520786L, -4835449396872013078L, -1432625727662628443L,
+ -7812920107430224633L, -5154464115860392887L, -1831394126398103205L, -8062150356639896359L, -5466001927372482545L,
+ -2220816390788215277L, -8305539271883716405L, -5770238071427257602L, -2601111570856684098L, -8543223759426509417L,
+ -6067343680855748868L, -2972493582642298180L, -8775337516792518219L, -6357485877563259869L, -3335171328526686933L,
+ -9002011107970261189L, -6640827866535438582L, -3689348814741910324L, -9223372036854775808L, -6917529027641081856L,
+ -4035225266123964416L, -432345564227567616L, -7187745005283311616L, -4372995238176751616L, -854558029293551616L,
+ -7451627795949551616L, -4702848726509551616L, -1266874889709551616L, -7709325833709551616L, -5024971273709551616L,
+ -1669528073709551616L, -7960984073709551616L, -5339544073709551616L, -2062744073709551616L, -8206744073709551616L,
+ -5646744073709551616L, -2446744073709551616L, -8446744073709551616L, -5946744073709551616L, -2821744073709551616L,
+ -8681119073709551616L, -6239712823709551616L, -3187955011209551616L, -8910000909647051616L, -6525815118631426616L,
+ -3545582879861895366L, -9133518327554766460L, -6805211891016070171L, -3894828845342699810L, -256850038250986858L,
+ -7078060301547948643L, -4235889358507547899L, -683175679707046970L, -7344513827457986212L, -4568956265895094861L,
+ -1099509313941480672L, -7604722348854507276L, -4894216917640746191L, -1506085128623544835L, -7858832233030797378L,
+ -5211854272861108819L, -1903131822648998119L, -8106986416796705681L, -5522047002568494197L, -2290872734783229842L,
+ -8349324486880600507L, -5824969590173362730L, -2669525969289315508L, -8585982758446904049L, -6120792429631242157L,
+ -3039304518611664792L, -8817094351773372351L, -6409681921289327535L, -3400416383184271515L, -9042789267131251553L,
+ -6691800565486676537L, -3753064688430957767L, -79644842111309304L, -6967307053960650171L, -4097447799023424810L,
+ -510123730351893109L, -7236356359111015049L, -4433759430461380907L, -930513269649338230L, -7499099821171918250L,
+ -4762188758037509908L, -1341049929119499481L, -7755685233340769032L, -5082920523248573386L, -1741964635633328828L,
+ -8006256924911912374L, -5396135137712502563L, -2133482903713240300L, -8250955842461857044L, -5702008784649933400L,
+ -2515824962385028846L, -8489919629131724885L, -6000713517987268202L, -2889205879056697349L, -8723282702051517699L,
+ -6292417359137009220L, -3253835680493873621L, -8951176327949752869L, -6577284391509803182L, -3609919470959866074L,
+ -9173728696990998152L, -6855474852811359786L, -3957657547586811828L, -335385916056126881L, -7127145225176161157L,
+ -4297245513042813542L, -759870872876129024L, -7392448323188662496L, -4628874385558440216L, -1174406963520662366L,
+ -7651533379841495835L, -4952730706374481889L, -1579227364540714458L, -7904546130479028392L, -5268996644671397586L,
+ -1974559787411859078L, -8151628894773493780L, -5577850100039479321L, -2360626606621961247L, -8392920656779807636L,
+ -5879464802547371641L, -2737644984756826647L, -8628557143114098510L, -6174010410465235234L, -3105826994654156138L,
+ -8858670899299929442L, -6461652605697523899L, -3465379738694516970L, -9083391364325154962L, -6742553186979055799L,
+ -3816505465296431844L, -158945813193151901L, -7016870160886801794L, -4159401682681114339L, -587566084924005019L,
+ -7284757830718584993L, -4494261269970843337L, -1006140569036166268L, -7546366883288685774L, -4821272585683469313L,
+ -1414904713676948737L, -7801844473689174817L, -5140619573684080617L, -1814088448677712867L, -8051334308064652398L,
+ -5452481866653427593L, -2203916314889396588L, -8294976724446954723L, -5757034887131305500L, -2584607590486743971L,
+ -8532908771695296838L, -6054449946191733143L, -2956376414312278525L, -8765264286586255934L, -6344894339805432014L,
+ -3319431906329402113L, -8992173969096958177L, -6628531442943809817L, -3673978285252374367L, -9213765455923815836L,
+ -6905520801477381891L, -4020214983419339459L, -413582710846786420L, -7176018221920323369L, -4358336758973016307L,
+ -836234930288882479L, -7440175859071633406L, -4688533805412153853L, -1248981238337804412L, -7698142301602209614L,
+ -5010991858575374113L, -1652053804791829737L, -7950062655635975442L, -5325892301117581398L, -2045679357969588844L,
+ -8196078626372074883L, -5633412264537705700L, -2430079312244744221L, -8436328597794046994L, -5933724728815170839L,
+ -2805469892591575644L, -8670947710510816634L, -6226998619711132888L, -3172062256211528206L, -8900067937773286985L,
+ -6513398903789220827L, -3530062611309138130L, -9123818159709293187L, -6793086681209228580L, -3879672333084147821L,
+ -237904397927796872L, -7066219276345954901L, -4221088077005055722L, -664674077828931749L, -7332950326284164199L,
+ -4554501889427817345L, -1081441343357383777L, -7593429867239446717L, -4880101315621920492L, -1488440626100012711L,
+ -7847804418953589800L, -5198069505264599346L, -1885900863153361279L, -8096217067111932656L, -5508585315462527915L,
+ -2274045625900771990L, -8338807543829064350L, -5811823411358942533L, -2653093245771290262L, -8575712306248138270L,
+ -6107954364382784934L, -3023256937051093263L, -8807064613298015146L, -6397144748195131028L, -3384744916816525881L,
+ -9032994600651410532L, -6679557232386875260L, -3737760522056206171L, -60514634142869810L, -6955350673980375487L,
+ -4082502324048081455L, -491441886632713915L, -7224680206786528053L, -4419164240055772162L, -912269281642327298L,
+ -7487697328667536418L, -4747935642407032618L, -1323233534581402868L, -7744549986754458649L, -5069001465015685407L,
+ -1724565812842218855L, -7995382660667468640L, -5382542307406947896L, -2116491865831296966L, -8240336443785642460L,
+ -5688734536304665171L, -2499232151953443560L, -8479549122611984081L, -5987750384837592197L, -2873001962619602342L,
+ -8713155254278333320L, -6279758049420528746L, -3238011543348273028L, -8941286242233752499L, -6564921784364802720L,
+ -3594466212028615495L, -9164070410158966541L, -6843401994271320272L, -3942566474411762436L, -316522074587315140L,
+ -7115355324258153819L, -4282508136895304370L, -741449152691742558L, -7380934748073420955L, -4614482416664388289L,
+ -1156417002403097458L, -7640289654143017767L, -4938676049251384305L, -1561659043136842477L, -7893565929601608404L,
+ -5255271393574622601L, -1957403223540890347L, -8140906042354138323L, -5564446534515285000L, -2343872149716718346L,
+ -8382449121214030822L, -5866375383090150624L, -2721283210435300376L, -8618331034163144591L, -6161227774276542835L,
+ -3089848699418290639L, -8848684464777513506L, -6449169562544503978L, -3449775934753242068L, -9073638986861858149L,
+ -6730362715149934782L, -3801267375510030573L, -139898200960150313L, -7004965403241175802L, -4144520735624081848L,
+ -568964901102714406L, -7273132090830278360L, -4479729095110460046L, -987975350460687153L, -7535013621679011327L,
+ -4807081008671376254L, -1397165242411832414L, -7790757304148477115L, -5126760611758208489L, -1796764746270372707L,
+ -8040506994060064798L, -5438947724147693094L, -2186998636757228463L, -8284403175614349646L, -5743817951090549153L,
+ -2568086420435798537L, -8522583040413455942L, -6041542782089432023L, -2940242459184402125L, -8755180564631333184L,
+ -6332289687361778576L, -3303676090774835316L, -8982326584375353929L, -6616222212041804507L, -3658591746624867729L,
+ -9204148869281624187L, -6893500068174642330L, -4005189066790915008L, -394800315061255856L, -7164279224554366766L,
+ -4343663012265570553L, -817892746904575288L, -7428711994456441411L, -4674203974643163860L, -1231068949876566920L,
+ -7686947121313936181L, -4996997883215032323L, -1634561335591402499L, -7939129862385708418L, -5312226309554747619L,
+ -2028596868516046619L, -8185402070463610993L, -5620066569652125837L
+ )
+}
diff --git a/zio-json/jvm/src/jmh/scala/zio/json/CollectionDecoderBenchmarks.scala b/zio-json/jvm/src/jmh/scala/zio/json/CollectionDecoderBenchmarks.scala
index 1d87aa2d8..968e75d1b 100644
--- a/zio-json/jvm/src/jmh/scala/zio/json/CollectionDecoderBenchmarks.scala
+++ b/zio-json/jvm/src/jmh/scala/zio/json/CollectionDecoderBenchmarks.scala
@@ -10,7 +10,6 @@ import scala.collection.immutable
@State(Scope.Thread)
@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
-@OutputTimeUnit(TimeUnit.MILLISECONDS)
@Fork(value = 1)
class CollectionDecoderBenchmarks {
private[this] var encodedArray: String = null
diff --git a/zio-json/jvm/src/jmh/scala/zio/json/CollectionEncoderBenchmarks.scala b/zio-json/jvm/src/jmh/scala/zio/json/CollectionEncoderBenchmarks.scala
index 226d46ab6..46b9ad860 100644
--- a/zio-json/jvm/src/jmh/scala/zio/json/CollectionEncoderBenchmarks.scala
+++ b/zio-json/jvm/src/jmh/scala/zio/json/CollectionEncoderBenchmarks.scala
@@ -10,7 +10,6 @@ import scala.collection.{ SortedMap, immutable }
@State(Scope.Thread)
@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
-@OutputTimeUnit(TimeUnit.MILLISECONDS)
@Fork(value = 1)
class CollectionEncoderBenchmarks {
private[this] var stringsChunk: Chunk[String] = null
diff --git a/zio-json/jvm/src/jmh/scala/zio/json/GeoJSONBenchmarks.scala b/zio-json/jvm/src/jmh/scala/zio/json/GeoJSONBenchmarks.scala
index 3c818e917..e8a407be9 100644
--- a/zio-json/jvm/src/jmh/scala/zio/json/GeoJSONBenchmarks.scala
+++ b/zio-json/jvm/src/jmh/scala/zio/json/GeoJSONBenchmarks.scala
@@ -1,16 +1,14 @@
package zio.json
-import java.nio.charset.StandardCharsets._
import java.util.concurrent.TimeUnit
import com.github.plokhotnyuk.jsoniter_scala.core._
import com.github.plokhotnyuk.jsoniter_scala.macros._
import io.circe
import zio.json.GeoJSONBenchmarks._
-import testzio.json.TestUtils._
-import testzio.json.data.geojson.handrolled._
+import zio.json.TestUtils._
+import zio.json.data.geojson.handrolled._
import org.openjdk.jmh.annotations._
-import play.api.libs.{ json => Play }
import scala.util.Try
@@ -42,30 +40,26 @@ class GeoJSONBenchmarks {
assert(decodeCirceSuccess2() == decodeZioSuccess2())
assert(decodeCirceError().isLeft)
- // these are failing because of a bug in play-json, but they succeed
- // assert(decodeCirceSuccess1() == decodePlaySuccess1(), decodePlaySuccess1().toString)
- // assert(decodeCirceSuccess2() == decodePlaySuccess2())
- assert(decodePlaySuccess1().isRight)
- assert(decodePlaySuccess2().isRight)
- assert(decodePlayError().isLeft)
-
assert(decodeZioError().isLeft)
}
@Benchmark
def decodeJsoniterSuccess1(): Either[String, GeoJSON] =
- Try(readFromArray(jsonString1.getBytes(UTF_8)))
- .fold(t => Left(t.toString), Right.apply)
+ Try(readFromString(jsonString1))
+ .fold(t => Left(t.toString), Right.apply(_))
@Benchmark
def decodeJsoniterSuccess2(): Either[String, GeoJSON] =
- Try(readFromArray(jsonString2.getBytes(UTF_8)))
- .fold(t => Left(t.toString), Right.apply)
+ Try(readFromString(jsonString2))
+ .fold(t => Left(t.toString), Right.apply(_))
@Benchmark
def decodeJsoniterError(): Either[String, GeoJSON] =
- Try(readFromArray(jsonStringErr.getBytes(UTF_8)))
- .fold(t => Left(t.toString), Right.apply)
+ Try(readFromString(jsonStringErr))
+ .fold(t => Left(t.toString), Right.apply(_))
+
+ @Benchmark
+ def encodeJsoniter(): String = writeToString(decoded)
@Benchmark
def decodeCirceSuccess1(): Either[circe.Error, GeoJSON] =
@@ -86,25 +80,6 @@ class GeoJSONBenchmarks {
def decodeCirceError(): Either[circe.Error, GeoJSON] =
circe.parser.decode[GeoJSON](jsonStringErr)
- @Benchmark
- def decodePlaySuccess1(): Either[String, GeoJSON] =
- Try(Play.Json.parse(jsonString1).as[GeoJSON])
- .fold(t => Left(t.toString), Right.apply)
-
- @Benchmark
- def decodePlaySuccess2(): Either[String, GeoJSON] =
- Try(Play.Json.parse(jsonString2).as[GeoJSON])
- .fold(t => Left(t.toString), Right.apply)
-
- @Benchmark
- def encodePlay(): String =
- Play.Json.stringify(implicitly[Play.Writes[GeoJSON]].writes(decoded))
-
- @Benchmark
- def decodePlayError(): Either[String, GeoJSON] =
- Try(Play.Json.parse(jsonStringErr).as[GeoJSON])
- .fold(t => Left(t.toString), Right.apply)
-
@Benchmark
def decodeZioSuccess1(): Either[String, GeoJSON] =
jsonChars1.fromJson[GeoJSON]
@@ -120,7 +95,6 @@ class GeoJSONBenchmarks {
@Benchmark
def decodeZioError(): Either[String, GeoJSON] =
jsonCharsErr.fromJson[GeoJSON]
-
}
object GeoJSONBenchmarks {
diff --git a/zio-json/jvm/src/jmh/scala/zio/json/GoogleMapsAPIBenchmarks.scala b/zio-json/jvm/src/jmh/scala/zio/json/GoogleMapsAPIBenchmarks.scala
index 277455ad1..963567960 100644
--- a/zio-json/jvm/src/jmh/scala/zio/json/GoogleMapsAPIBenchmarks.scala
+++ b/zio-json/jvm/src/jmh/scala/zio/json/GoogleMapsAPIBenchmarks.scala
@@ -1,15 +1,14 @@
package zio.json
-import java.util.Arrays
import java.util.concurrent.TimeUnit
import com.github.plokhotnyuk.jsoniter_scala.core._
import com.github.plokhotnyuk.jsoniter_scala.macros._
import io.circe
-import testzio.json.TestUtils._
-import testzio.json.data.googlemaps._
+import zio.json.TestUtils._
+import zio.json.data.googlemaps._
import org.openjdk.jmh.annotations._
-import play.api.libs.{ json => Play }
+import zio.json.GoogleMapsAPIBenchmarks._
import scala.util.Try
@@ -62,8 +61,8 @@ class GoogleMapsAPIBenchmarks {
@Setup
def setup(): Unit = {
- //Distance Matrix API call for top-10 by population cities in US:
- //https://maps.googleapis.com/maps/api/distancematrix/json?origins=New+York|Los+Angeles|Chicago|Houston|Phoenix+AZ|Philadelphia|San+Antonio|San+Diego|Dallas|San+Jose&destinations=New+York|Los+Angeles|Chicago|Houston|Phoenix+AZ|Philadelphia|San+Antonio|San+Diego|Dallas|San+Jose
+ // Distance Matrix API call for top-10 by population cities in US:
+ // https://maps.googleapis.com/maps/api/distancematrix/json?origins=New+York|Los+Angeles|Chicago|Houston|Phoenix+AZ|Philadelphia|San+Antonio|San+Diego|Dallas|San+Jose&destinations=New+York|Los+Angeles|Chicago|Houston|Phoenix+AZ|Philadelphia|San+Antonio|San+Diego|Dallas|San+Jose
jsonString = getResourceAsString("google_maps_api_response.json")
jsonChars = asChars(jsonString)
jsonStringCompact = getResourceAsString(
@@ -94,51 +93,49 @@ class GoogleMapsAPIBenchmarks {
assert(decodeCirceSuccess1() == decodeZioSuccess1())
assert(decodeCirceSuccess2() == decodeZioSuccess2())
- assert(decodeCirceSuccess1() == decodePlaySuccess1())
- assert(decodeCirceSuccess2() == decodePlaySuccess2())
assert(decodeCirceSuccess1() == decodeCirceAttack0())
assert(decodeCirceSuccess1() == decodeZioAttack0())
- assert(decodeCirceSuccess1() == decodePlayAttack0())
assert(decodeCirceSuccess1() == decodeCirceAttack1())
assert(decodeCirceSuccess1() == decodeZioAttack1())
- assert(decodeCirceSuccess1() == decodePlayAttack1())
assert(decodeCirceSuccess1() == decodeCirceAttack2())
assert(decodeCirceSuccess1() == decodeZioAttack2())
- assert(decodeCirceSuccess1() == decodePlayAttack2())
}
- // @Benchmark
- // def decodeJsoniterSuccess1(): Either[String, DistanceMatrix] =
- // Try(readFromArray(jsonString.getBytes(UTF_8)))
- // .fold(t => Left(t.toString), Right.apply)
+ @Benchmark
+ def decodeJsoniterSuccess1(): Either[String, DistanceMatrix] =
+ Try(readFromString(jsonString))
+ .fold(t => Left(t.toString), Right.apply(_))
- // @Benchmark
- // def decodeJsoniterSuccess2(): Either[String, DistanceMatrix] =
- // Try(readFromArray(jsonStringCompact.getBytes(UTF_8)))
- // .fold(t => Left(t.toString), Right.apply)
+ @Benchmark
+ def decodeJsoniterSuccess2(): Either[String, DistanceMatrix] =
+ Try(readFromString(jsonStringCompact))
+ .fold(t => Left(t.toString), Right.apply(_))
- // @Benchmark
- // def decodeJsoniterError(): Either[String, DistanceMatrix] =
- // Try(readFromArray(jsonStringErr.getBytes(UTF_8)))
- // .fold(t => Left(t.toString), Right.apply)
+ @Benchmark
+ def decodeJsoniterError(): Either[String, DistanceMatrix] =
+ Try(readFromString(jsonStringErr))
+ .fold(t => Left(t.toString), Right.apply(_))
- // @Benchmark
- // def decodeJsoniterAttack1(): Either[String, DistanceMatrix] =
- // Try(readFromArray(jsonStringAttack1.getBytes(UTF_8)))
- // .fold(t => Left(t.toString), Right.apply)
+ @Benchmark
+ def decodeJsoniterAttack1(): Either[String, DistanceMatrix] =
+ Try(readFromString(jsonStringAttack1))
+ .fold(t => Left(t.toString), Right.apply(_))
- // @Benchmark
- // def decodeJsoniterAttack2(): Either[String, DistanceMatrix] =
- // Try(readFromArray(jsonStringAttack2.getBytes(UTF_8)))
- // .fold(t => Left(t.toString), Right.apply)
+ @Benchmark
+ def decodeJsoniterAttack2(): Either[String, DistanceMatrix] =
+ Try(readFromString(jsonStringAttack2))
+ .fold(t => Left(t.toString), Right.apply(_))
- // @Benchmark
- // def decodeJsoniterAttack3(): Either[String, DistanceMatrix] =
- // Try(readFromArray(jsonStringAttack3.getBytes(UTF_8)))
- // .fold(t => Left(t.toString), Right.apply)
+ @Benchmark
+ def decodeJsoniterAttack3(): Either[String, DistanceMatrix] =
+ Try(readFromString(jsonStringAttack3))
+ .fold(t => Left(t.toString), Right.apply(_))
+
+ @Benchmark
+ def encodeJsoniter(): String = writeToString(decoded)
@Benchmark
def decodeCirceSuccess1(): Either[circe.Error, DistanceMatrix] =
@@ -183,56 +180,6 @@ class GoogleMapsAPIBenchmarks {
def decodeCirceAttack3(): Either[circe.Error, DistanceMatrix] =
circe.parser.decode[DistanceMatrix](jsonStringAttack3)
- def playDecode[A](
- str: String
- )(implicit R: Play.Reads[A]): Either[String, A] =
- Try(Play.Json.parse(str).as[A]).fold(
- // if we don't access the stacktrace then the JVM can optimise it away in
- // these tight loop perf tests, which would cover up a real bottleneck
- err => Left(Arrays.toString(err.getStackTrace().asInstanceOf[Array[Object]])),
- a => Right(a)
- )
-
- @Benchmark
- def decodePlaySuccess1(): Either[String, DistanceMatrix] =
- playDecode[DistanceMatrix](jsonString)
-
- @Benchmark
- def decodePlaySuccess2(): Either[String, DistanceMatrix] =
- playDecode[DistanceMatrix](jsonStringCompact)
-
- @Benchmark
- def encodePlay(): String =
- Play.Json.stringify(implicitly[Play.Writes[DistanceMatrix]].writes(decoded))
-
- // @Benchmark
- // def decodePlayError(): Either[String, DistanceMatrix] =
- // playDecode[DistanceMatrix](jsonStringErr)
-
- @Benchmark
- def decodePlayErrorParse(): Either[String, DistanceMatrix] =
- playDecode[DistanceMatrix](jsonStringErrParse)
-
- @Benchmark
- def decodePlayErrorNumber(): Either[String, DistanceMatrix] =
- playDecode[DistanceMatrix](jsonStringErrNumber)
-
- @Benchmark
- def decodePlayAttack0(): Either[String, DistanceMatrix] =
- playDecode[DistanceMatrix](jsonStringAttack0)
-
- @Benchmark
- def decodePlayAttack1(): Either[String, DistanceMatrix] =
- playDecode[DistanceMatrix](jsonStringAttack1)
-
- @Benchmark
- def decodePlayAttack2(): Either[String, DistanceMatrix] =
- playDecode[DistanceMatrix](jsonStringAttack2)
-
- @Benchmark
- def decodePlayAttack3(): Either[String, DistanceMatrix] =
- playDecode[DistanceMatrix](jsonStringAttack3)
-
@Benchmark
def decodeZioSuccess1(): Either[String, DistanceMatrix] =
jsonChars.fromJson[DistanceMatrix]
diff --git a/zio-json/jvm/src/jmh/scala/zio/json/SyntheticBenchmarks.scala b/zio-json/jvm/src/jmh/scala/zio/json/SyntheticBenchmarks.scala
index 26e2ddaad..b6b529e65 100644
--- a/zio-json/jvm/src/jmh/scala/zio/json/SyntheticBenchmarks.scala
+++ b/zio-json/jvm/src/jmh/scala/zio/json/SyntheticBenchmarks.scala
@@ -1,15 +1,15 @@
package zio.json
-import java.nio.charset.StandardCharsets.UTF_8
import java.util.concurrent.TimeUnit
import com.github.plokhotnyuk.jsoniter_scala.core._
import com.github.plokhotnyuk.jsoniter_scala.macros._
import io.circe
+import io.circe.Codec
+import io.circe.generic.semiauto.deriveCodec
import zio.json.SyntheticBenchmarks._
-import testzio.json.TestUtils._
+import zio.json.TestUtils._
import org.openjdk.jmh.annotations._
-import play.api.libs.{ json => Play }
import scala.util.Try
@@ -19,18 +19,8 @@ object Nested {
DeriveJsonDecoder.gen
implicit lazy val zioJsonEncoder: JsonEncoder[Nested] =
DeriveJsonEncoder.gen
-
- implicit val customConfig: circe.generic.extras.Configuration =
- circe.generic.extras.Configuration.default
- .copy(discriminator = Some("type"))
- implicit lazy val circeJsonDecoder: circe.Decoder[Nested] =
- circe.generic.extras.semiauto.deriveConfiguredDecoder[Nested]
- implicit lazy val circeEncoder: circe.Encoder[Nested] =
- circe.generic.extras.semiauto.deriveConfiguredEncoder[Nested]
-
- implicit lazy val playFormatter: Play.Format[Nested] =
- Play.Json.format[Nested]
-
+ implicit lazy val circeCodec: Codec[Nested] =
+ deriveCodec
}
@State(Scope.Thread)
@@ -38,8 +28,8 @@ object Nested {
@Measurement(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Fork(value = 1)
class SyntheticBenchmarks {
- //@Param(Array("100", "1000"))
- var size: Int = 500
+ // @Param(Array("100", "1000"))
+ var size: Int = 100
var jsonString: String = _
var jsonChars: CharSequence = _
var decoded: Nested = _
@@ -60,15 +50,16 @@ class SyntheticBenchmarks {
assert(decodeJsoniterSuccess() == decodeZioSuccess())
assert(decodeCirceSuccess() == decodeZioSuccess())
-
- assert(decodePlaySuccess() == decodeZioSuccess())
}
@Benchmark
def decodeJsoniterSuccess(): Either[String, Nested] =
- Try(readFromArray(jsonString.getBytes(UTF_8)))
+ Try(readFromString(jsonString))
.fold(t => Left(t.toString), Right(_))
+ @Benchmark
+ def encodeJsoniter(): String = writeToString(decoded)
+
@Benchmark
def decodeCirceSuccess(): Either[circe.Error, Nested] =
circe.parser.decode[Nested](jsonString)
@@ -80,15 +71,6 @@ class SyntheticBenchmarks {
decoded.asJson.noSpaces
}
- @Benchmark
- def decodePlaySuccess(): Either[String, Nested] =
- Try(Play.Json.parse(jsonString).as[Nested])
- .fold(t => Left(t.toString), Right.apply)
-
- @Benchmark
- def encodePlay(): String =
- Play.Json.stringify(implicitly[Play.Writes[Nested]].writes(decoded))
-
@Benchmark
def decodeZioSuccess(): Either[String, Nested] =
jsonChars.fromJson[Nested]
@@ -96,7 +78,6 @@ class SyntheticBenchmarks {
@Benchmark
def encodeZio(): CharSequence =
JsonEncoder[Nested].encodeJson(decoded, None)
-
}
object SyntheticBenchmarks {
diff --git a/zio-json/jvm/src/jmh/scala/zio/json/TwitterAPIBenchmarks.scala b/zio-json/jvm/src/jmh/scala/zio/json/TwitterAPIBenchmarks.scala
index 14eb64ed4..42952ae94 100644
--- a/zio-json/jvm/src/jmh/scala/zio/json/TwitterAPIBenchmarks.scala
+++ b/zio-json/jvm/src/jmh/scala/zio/json/TwitterAPIBenchmarks.scala
@@ -1,16 +1,14 @@
package zio.json
-import java.nio.charset.StandardCharsets.UTF_8
import java.util.concurrent.TimeUnit
import com.github.plokhotnyuk.jsoniter_scala.core._
import com.github.plokhotnyuk.jsoniter_scala.macros._
import io.circe
-import testzio.json.TestUtils._
-import testzio.json.data.twitter._
+import zio.json.TestUtils._
+import zio.json.data.twitter._
import org.openjdk.jmh.annotations._
-import play.api.libs.{ json => Play }
-import TwitterAPIBenchmarks._
+import zio.json.TwitterAPIBenchmarks._
import scala.util.Try
@@ -44,27 +42,26 @@ class TwitterAPIBenchmarks {
assert(decodeCirceSuccess2() == decodeZioSuccess2())
assert(decodeCirceError().isLeft)
- assert(decodePlaySuccess1() == decodeZioSuccess1())
- assert(decodePlaySuccess2() == decodeZioSuccess2())
- assert(decodePlayError().isLeft)
-
assert(decodeZioError().isLeft)
}
@Benchmark
def decodeJsoniterSuccess1(): Either[String, List[Tweet]] =
- Try(readFromArray(jsonString.getBytes(UTF_8)))
- .fold(t => Left(t.toString), Right.apply)
+ Try(readFromString(jsonString))
+ .fold(t => Left(t.toString), Right.apply(_))
@Benchmark
def decodeJsoniterSuccess2(): Either[String, List[Tweet]] =
- Try(readFromArray(jsonStringCompact.getBytes(UTF_8)))
- .fold(t => Left(t.toString), Right.apply)
+ Try(readFromString(jsonStringCompact))
+ .fold(t => Left(t.toString), Right.apply(_))
@Benchmark
def decodeJsoniterError(): Either[String, List[Tweet]] =
- Try(readFromArray(jsonStringErr.getBytes(UTF_8)))
- .fold(t => Left(t.toString), Right.apply)
+ Try(readFromString(jsonStringErr))
+ .fold(t => Left(t.toString), Right.apply(_))
+
+ @Benchmark
+ def encodeJsoniter(): String = writeToString(decoded)
@Benchmark
def decodeCirceSuccess1(): Either[circe.Error, List[Tweet]] =
@@ -85,25 +82,6 @@ class TwitterAPIBenchmarks {
def decodeCirceError(): Either[circe.Error, List[Tweet]] =
circe.parser.decode[List[Tweet]](jsonStringErr)
- @Benchmark
- def decodePlaySuccess1(): Either[String, List[Tweet]] =
- Try(Play.Json.parse(jsonString).as[List[Tweet]])
- .fold(t => Left(t.toString), Right.apply)
-
- @Benchmark
- def decodePlaySuccess2(): Either[String, List[Tweet]] =
- Try(Play.Json.parse(jsonStringCompact).as[List[Tweet]])
- .fold(t => Left(t.toString), Right.apply)
-
- @Benchmark
- def encodePlay(): String =
- Play.Json.stringify(implicitly[Play.Writes[List[Tweet]]].writes(decoded))
-
- @Benchmark
- def decodePlayError(): Either[String, List[Tweet]] =
- Try(Play.Json.parse(jsonStringErr).as[List[Tweet]])
- .fold(t => Left(t.toString), Right.apply)
-
@Benchmark
def decodeZioSuccess1(): Either[String, List[Tweet]] =
jsonChars.fromJson[List[Tweet]]
diff --git a/zio-json/jvm/src/jmh/scala/zio/json/UUIDBenchmarks.scala b/zio-json/jvm/src/jmh/scala/zio/json/UUIDBenchmarks.scala
index 6b987e219..3912c108a 100644
--- a/zio-json/jvm/src/jmh/scala/zio/json/UUIDBenchmarks.scala
+++ b/zio-json/jvm/src/jmh/scala/zio/json/UUIDBenchmarks.scala
@@ -30,7 +30,7 @@ class UUIDBenchmarks {
} yield s"$s1-$s2-$s3-$s4-$s5"
unparsedUUIDChunk = {
- Unsafe.unsafeCompat { implicit u =>
+ Unsafe.unsafe { implicit u =>
zio.Runtime.default.unsafe.run(gen.runCollectN(10000).map(Chunk.fromIterable)).getOrThrow()
}
}
diff --git a/zio-json/jvm/src/jmh/scala/zio/json/internal/SafeNumbersBenchmarks.scala b/zio-json/jvm/src/jmh/scala/zio/json/internal/SafeNumbersBenchmarks.scala
index 624f8827d..345b8afa0 100644
--- a/zio-json/jvm/src/jmh/scala/zio/json/internal/SafeNumbersBenchmarks.scala
+++ b/zio-json/jvm/src/jmh/scala/zio/json/internal/SafeNumbersBenchmarks.scala
@@ -10,7 +10,7 @@ import org.openjdk.jmh.annotations._
@Fork(value = 1)
class SafeNumbersBenchInt {
- //@Param(Array("100", "1000"))
+ // @Param(Array("100", "1000"))
var size: Int = 10000
// invalid input. e.g. out of range longs
@@ -69,7 +69,7 @@ class SafeNumbersBenchInt {
@Measurement(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Fork(value = 1)
class SafeNumbersBenchFloat {
- //@Param(Array("100", "1000"))
+ // @Param(Array("100", "1000"))
var size: Int = 10000
var invalids: Array[String] = _
@@ -115,7 +115,7 @@ class SafeNumbersBenchFloat {
@Benchmark
def decodeFommilUnsafeValid(): Array[Float] =
- valids.map(UnsafeNumbers.float(_, 128))
+ valids.map(UnsafeNumbers.float(_, 256))
@Benchmark
def decodeStdlibInvalid(): Array[FloatOption] = invalids.map(stdlib)
@@ -135,7 +135,7 @@ class SafeNumbersBenchFloat {
@Measurement(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Fork(value = 1)
class SafeNumbersBenchBigDecimal {
- //@Param(Array("100", "1000"))
+ // @Param(Array("100", "1000"))
var size: Int = 10000
var invalids: Array[String] = _
@@ -182,7 +182,7 @@ class SafeNumbersBenchBigDecimal {
@Benchmark
def decodeFommilUnsafeValid(): Array[java.math.BigDecimal] =
- valids.map(UnsafeNumbers.bigDecimal(_, 128))
+ valids.map(UnsafeNumbers.bigDecimal(_, 256))
@Benchmark
def decodeStdlibInvalid(): Array[Option[java.math.BigDecimal]] =
diff --git a/zio-json/jvm/src/main/scala/zio/json/JsonDecoderPlatformSpecific.scala b/zio-json/jvm/src/main/scala/zio/json/JsonDecoderPlatformSpecific.scala
index 3394cf0d5..7e442f983 100644
--- a/zio-json/jvm/src/main/scala/zio/json/JsonDecoderPlatformSpecific.scala
+++ b/zio-json/jvm/src/main/scala/zio/json/JsonDecoderPlatformSpecific.scala
@@ -20,12 +20,13 @@ trait JsonDecoderPlatformSpecific[A] { self: JsonDecoder[A] =>
}
/**
- * Attempts to decode a stream of bytes using the user supplied Charset into a single value of type `A`, but may fail with
- * a human-readable exception if the stream does not encode a value of this type.
+ * Attempts to decode a stream of bytes using the user supplied Charset into a single value of type `A`, but may fail
+ * with a human-readable exception if the stream does not encode a value of this type.
*
* Note: This method may not consume the full string.
*
- * @see [[decodeJsonStream]] For a `Char` stream variant
+ * @see
+ * [[decodeJsonStream]] For a `Char` stream variant
*/
final def decodeJsonStreamInput[R](
stream: ZStream[R, Throwable, Byte],
@@ -41,12 +42,13 @@ trait JsonDecoderPlatformSpecific[A] { self: JsonDecoder[A] =>
}
/**
- * Attempts to decode a stream of characters into a single value of type `A`, but may fail with
- * a human-readable exception if the stream does not encode a value of this type.
+ * Attempts to decode a stream of characters into a single value of type `A`, but may fail with a human-readable
+ * exception if the stream does not encode a value of this type.
*
* Note: This method may not consume the full string.
*
- * @see also [[decodeJsonStreamInput]]
+ * @see
+ * also [[decodeJsonStreamInput]]
*/
final def decodeJsonStream[R](stream: ZStream[R, Throwable, Char]): ZIO[R, Throwable, A] =
ZIO.scoped[R](stream.toReader.flatMap(readAll))
@@ -54,7 +56,7 @@ trait JsonDecoderPlatformSpecific[A] { self: JsonDecoder[A] =>
final def decodeJsonPipeline(
delimiter: JsonStreamDelimiter = JsonStreamDelimiter.Array
): ZPipeline[Any, Throwable, Char, A] = {
- Unsafe.unsafeCompat { (u: Unsafe) =>
+ Unsafe.unsafe { (u: Unsafe) =>
implicit val unsafe: Unsafe = u
ZPipeline.fromPush {
@@ -121,7 +123,7 @@ trait JsonDecoderPlatformSpecific[A] { self: JsonDecoder[A] =>
throw new Exception(JsonError.render(trace))
}
- Unsafe.unsafeCompat { (u: Unsafe) =>
+ Unsafe.unsafe { (u: Unsafe) =>
implicit val unsafe: Unsafe = u
runtime.unsafe.run(outQueue.offer(Take.single(nextElem))).getOrThrow()
diff --git a/zio-json/jvm/src/main/scala/zio/json/JsonEncoderPlatformSpecific.scala b/zio-json/jvm/src/main/scala/zio/json/JsonEncoderPlatformSpecific.scala
index bd4b995f6..3793ec0cc 100644
--- a/zio-json/jvm/src/main/scala/zio/json/JsonEncoderPlatformSpecific.scala
+++ b/zio-json/jvm/src/main/scala/zio/json/JsonEncoderPlatformSpecific.scala
@@ -17,14 +17,14 @@ trait JsonEncoderPlatformSpecific[A] { self: JsonEncoder[A] =>
delimiter: Option[Char],
endWith: Option[Char]
): ZPipeline[Any, Throwable, A, Char] =
- Unsafe.unsafeCompat { (u: Unsafe) =>
+ Unsafe.unsafe { (u: Unsafe) =>
implicit val unsafe: Unsafe = u
ZPipeline.fromPush {
for {
runtime <- ZIO.runtime[Any]
chunkBuffer <- Ref.make(Chunk.fromIterable(startWith.toList))
- writer <- ZIO.fromAutoCloseable {
+ writer <- ZIO.fromAutoCloseable {
ZIO.succeed {
new java.io.BufferedWriter(
new java.io.Writer {
@@ -43,17 +43,20 @@ trait JsonEncoderPlatformSpecific[A] { self: JsonEncoder[A] =>
)
}
}
- writeWriter <- ZIO.succeed(new WriteWriter(writer))
- push = { (is: Option[Chunk[A]]) =>
+ writeWriter <- ZIO.succeed(new WriteWriter(writer))
+ hasAtLeastOneElement <- Ref.make(false)
+ push = { (is: Option[Chunk[A]]) =>
val pushChars = chunkBuffer.getAndUpdate(c => if (c.isEmpty) c else Chunk())
is match {
case None =>
- ZIO.attemptBlocking(writer.close()) *> pushChars.map { terminal =>
- endWith.fold(terminal) { last =>
- // Chop off terminal delimiter
- (if (delimiter.isDefined) terminal.dropRight(1) else terminal) :+ last
- }
+ ZIO.attemptBlocking(writer.close()) *> pushChars.flatMap { terminal =>
+ hasAtLeastOneElement.get.map(nonEmptyStream =>
+ endWith.fold(terminal) { last =>
+ // Chop off terminal delimiter if stream is not empty
+ (if (delimiter.isDefined && nonEmptyStream) terminal.dropRight(1) else terminal) :+ last
+ }
+ )
}
case Some(xs) =>
@@ -64,16 +67,16 @@ trait JsonEncoderPlatformSpecific[A] { self: JsonEncoder[A] =>
for (s <- delimiter)
writeWriter.write(s)
}
- } *> pushChars
+ } *> hasAtLeastOneElement.set(true).when(xs.nonEmpty) *> pushChars
}
}
} yield push
}
}
- final val encodeJsonLinesPipeline: ZPipeline[Any, Throwable, A, Char] =
+ final lazy val encodeJsonLinesPipeline: ZPipeline[Any, Throwable, A, Char] =
encodeJsonDelimitedPipeline(None, Some('\n'), None)
- final val encodeJsonArrayPipeline: ZPipeline[Any, Throwable, A, Char] =
+ final lazy val encodeJsonArrayPipeline: ZPipeline[Any, Throwable, A, Char] =
encodeJsonDelimitedPipeline(Some('['), Some(','), Some(']'))
}
diff --git a/zio-json/jvm/src/test/scala-2/zio/json/data/GoogleMaps.scala b/zio-json/jvm/src/test/scala-2/zio/json/data/GoogleMaps.scala
deleted file mode 100644
index 654c879ba..000000000
--- a/zio-json/jvm/src/test/scala-2/zio/json/data/GoogleMaps.scala
+++ /dev/null
@@ -1,110 +0,0 @@
-package testzio.json.data.googlemaps
-
-import com.github.ghik.silencer.silent
-import com.github.plokhotnyuk.jsoniter_scala.macros.named
-import io.circe
-import play.api.libs.{ json => Play }
-import zio.json._
-
-final case class Value(
- text: String,
- @named("value")
- @jsonField("value")
- @circe.generic.extras.JsonKey("value")
- v: Int
-)
-final case class Elements(distance: Value, duration: Value, status: String)
-final case class Rows(elements: List[Elements])
-// @jsonNoExtraFields // entirely mitigates Attack1
-final case class DistanceMatrix(
- destination_addresses: List[String],
- origin_addresses: List[String],
- rows: List[Rows],
- status: String
-)
-
-@silent("Block result was adapted via implicit conversion")
-object Value {
- implicit val zioJsonJsonDecoder: JsonDecoder[Value] = DeriveJsonDecoder.gen[Value]
- implicit val zioJsonEncoder: JsonEncoder[Value] = DeriveJsonEncoder.gen[Value]
-
- implicit val customConfig: circe.generic.extras.Configuration =
- circe.generic.extras.Configuration.default
- implicit val circeJsonDecoder: circe.Decoder[Value] =
- circe.generic.extras.semiauto.deriveConfiguredDecoder[Value]
- implicit val circeEncoder: circe.Encoder[Value] =
- circe.generic.extras.semiauto.deriveConfiguredEncoder[Value]
-
- // play macros don't support custom field
- // implicit val playJsonDecoder: Play.Reads[Value] = Play.Json.reads[Value]
-
- implicit val playJsonDecoder: Play.Reads[Value] = {
- import play.api.libs.json._
- import play.api.libs.json.Reads._
- import play.api.libs.functional.syntax._
-
- ((JsPath \ "text").read[String].and((JsPath \ "value").read[Int]))(
- Value.apply _
- )
- }
- implicit val playEncoder: Play.Writes[Value] = {
- import play.api.libs.json._
- import play.api.libs.json.Writes._
- import play.api.libs.functional.syntax._
-
- ((JsPath \ "text").write[String].and((JsPath \ "value").write[Int]))(unlift(Value.unapply))
- }
-
-}
-@silent("Block result was adapted via implicit conversion")
-object Elements {
- implicit val zioJsonJsonDecoder: JsonDecoder[Elements] = DeriveJsonDecoder.gen[Elements]
- implicit val zioJsonEncoder: JsonEncoder[Elements] = DeriveJsonEncoder.gen[Elements]
-
- implicit val customConfig: circe.generic.extras.Configuration =
- circe.generic.extras.Configuration.default
- implicit val circeJsonDecoder: circe.Decoder[Elements] =
- circe.generic.extras.semiauto.deriveConfiguredDecoder[Elements]
- implicit val circeEncoder: circe.Encoder[Elements] =
- circe.generic.extras.semiauto.deriveConfiguredEncoder[Elements]
-
- implicit val playJsonDecoder: Play.Reads[Elements] = Play.Json.reads[Elements]
- implicit val playEncoder: Play.Writes[Elements] = Play.Json.writes[Elements]
-
-}
-@silent("Block result was adapted via implicit conversion")
-object Rows {
- implicit val zioJsonJsonDecoder: JsonDecoder[Rows] = DeriveJsonDecoder.gen[Rows]
- implicit val zioJsonEncoder: JsonEncoder[Rows] = DeriveJsonEncoder.gen[Rows]
-
- implicit val customConfig: circe.generic.extras.Configuration =
- circe.generic.extras.Configuration.default
- implicit val circeJsonDecoder: circe.Decoder[Rows] =
- circe.generic.extras.semiauto.deriveConfiguredDecoder[Rows]
- implicit val circeEncoder: circe.Encoder[Rows] =
- circe.generic.extras.semiauto.deriveConfiguredEncoder[Rows]
-
- implicit val playJsonDecoder: Play.Reads[Rows] = Play.Json.reads[Rows]
- implicit val playEncoder: Play.Writes[Rows] = Play.Json.writes[Rows]
-
-}
-@silent("Block result was adapted via implicit conversion")
-object DistanceMatrix {
- implicit val zioJsonJsonDecoder: JsonDecoder[DistanceMatrix] =
- DeriveJsonDecoder.gen[DistanceMatrix]
- implicit val zioJsonEncoder: JsonEncoder[DistanceMatrix] =
- DeriveJsonEncoder.gen[DistanceMatrix]
-
- implicit val customConfig: circe.generic.extras.Configuration =
- circe.generic.extras.Configuration.default
- implicit val circeJsonDecoder: circe.Decoder[DistanceMatrix] =
- circe.generic.extras.semiauto.deriveConfiguredDecoder[DistanceMatrix]
- implicit val circeEncoder: circe.Encoder[DistanceMatrix] =
- circe.generic.extras.semiauto.deriveConfiguredEncoder[DistanceMatrix]
-
- implicit val playJsonDecoder: Play.Reads[DistanceMatrix] =
- Play.Json.reads[DistanceMatrix]
- implicit val playEncoder: Play.Writes[DistanceMatrix] =
- Play.Json.writes[DistanceMatrix]
-
-}
diff --git a/zio-json/jvm/src/test/scala-2/zio/json/DecoderPlatformSpecificSpec.scala b/zio-json/jvm/src/test/scala/zio/json/DecoderPlatformSpecificSpec.scala
similarity index 95%
rename from zio-json/jvm/src/test/scala-2/zio/json/DecoderPlatformSpecificSpec.scala
rename to zio-json/jvm/src/test/scala/zio/json/DecoderPlatformSpecificSpec.scala
index eab496ca8..9e07b5517 100644
--- a/zio-json/jvm/src/test/scala-2/zio/json/DecoderPlatformSpecificSpec.scala
+++ b/zio-json/jvm/src/test/scala/zio/json/DecoderPlatformSpecificSpec.scala
@@ -1,13 +1,12 @@
-package testzio.json
+package zio.json
import io.circe
import org.typelevel.jawn.{ ast => jawn }
-import testzio.json.TestUtils._
-import testzio.json.data.googlemaps._
-import testzio.json.data.twitter._
import zio._
-import zio.json._
+import zio.json.TestUtils._
import zio.json.ast._
+import zio.json.data.googlemaps._
+import zio.json.data.twitter._
import zio.stream.ZStream
import zio.test.Assertion._
import zio.test.TestAspect._
@@ -65,28 +64,28 @@ object DecoderPlatformSpecificSpec extends ZIOSpecDefault {
}
},
test("geojson1") {
- import testzio.json.data.geojson.generated._
+ import zio.json.data.geojson.generated._
getResourceAsStringM("che.geo.json").map { str =>
assert(str.fromJson[GeoJSON])(matchesCirceDecoded[GeoJSON](str))
}
},
test("geojson1 alt") {
- import testzio.json.data.geojson.handrolled._
+ import zio.json.data.geojson.handrolled._
getResourceAsStringM("che.geo.json").map { str =>
assert(str.fromJson[GeoJSON])(matchesCirceDecoded[GeoJSON](str))
}
},
test("geojson2") {
- import testzio.json.data.geojson.generated._
+ import zio.json.data.geojson.generated._
getResourceAsStringM("che-2.geo.json").map { str =>
assert(str.fromJson[GeoJSON])(matchesCirceDecoded[GeoJSON](str))
}
},
test("geojson2 lowlevel") {
- import testzio.json.data.geojson.generated._
+ import zio.json.data.geojson.generated._
// this uses a lower level Reader to ensure that the more general recorder
// impl is covered by the tests
@@ -184,7 +183,7 @@ object DecoderPlatformSpecificSpec extends ZIOSpecDefault {
.map { exit =>
assert(exit)(isInterrupted)
}
- } @@ timeout(2.seconds)
+ } @@ timeout(7.seconds)
),
suite("Array delimited")(
test("decodes single elements") {
@@ -263,11 +262,11 @@ object DecoderPlatformSpecificSpec extends ZIOSpecDefault {
test("test hand-coded alternative in `orElse` comment") {
val decoder: JsonDecoder[AnyVal] = JsonDecoder.peekChar[AnyVal] {
case 't' | 'f' => JsonDecoder[Boolean].widen
- case c => JsonDecoder[Int].widen
+ case _ => JsonDecoder[Int].widen
}
assert(decoder.decodeJson("true"))(equalTo(Right(true.asInstanceOf[AnyVal]))) &&
assert(decoder.decodeJson("42"))(equalTo(Right(42.asInstanceOf[AnyVal]))) &&
- assert(decoder.decodeJson("\"a string\""))(equalTo(Left("(expected a number, got a)")))
+ assert(decoder.decodeJson("\"a string\""))(equalTo(Left("(expected an Int)")))
}
)
)
@@ -338,7 +337,7 @@ object DecoderPlatformSpecificSpec extends ZIOSpecDefault {
// Helper function because Circe and Zio-JSON’s Left differ, making tests unnecessary verbose
def matchesCirceDecoded[A](
expected: String
- )(implicit cDecoder: circe.Decoder[A], eq: Eql[A, A]): Assertion[Either[String, A]] = {
+ )(implicit cDecoder: circe.Decoder[A]): Assertion[Either[String, A]] = {
val cDecoded = circe.parser.decode(expected).left.map(_.toString)
diff --git a/zio-json/jvm/src/test/scala-2/zio/json/EncoderPlatformSpecificSpec.scala b/zio-json/jvm/src/test/scala/zio/json/EncoderPlatformSpecificSpec.scala
similarity index 77%
rename from zio-json/jvm/src/test/scala-2/zio/json/EncoderPlatformSpecificSpec.scala
rename to zio-json/jvm/src/test/scala/zio/json/EncoderPlatformSpecificSpec.scala
index ba37179d5..8d50d34e9 100644
--- a/zio-json/jvm/src/test/scala-2/zio/json/EncoderPlatformSpecificSpec.scala
+++ b/zio-json/jvm/src/test/scala/zio/json/EncoderPlatformSpecificSpec.scala
@@ -1,13 +1,13 @@
package zio.json
import io.circe
-import testzio.json.TestUtils._
-import testzio.json.data.geojson.generated._
-import testzio.json.data.googlemaps._
-import testzio.json.data.twitter._
import zio.Chunk
+import zio.json.TestUtils._
import zio.json.ast.Json
-import zio.stream.ZStream
+import zio.json.data.geojson.generated._
+import zio.json.data.googlemaps._
+import zio.json.data.twitter._
+import zio.stream.{ ZSink, ZStream }
import zio.test.Assertion._
import zio.test.{ ZIOSpecDefault, assert, _ }
@@ -15,7 +15,7 @@ import java.io.IOException
import java.nio.file.Files
object EncoderPlatformSpecificSpec extends ZIOSpecDefault {
- import testzio.json.DecoderSpec.logEvent._
+ import zio.json.DecoderSpec.logEvent._
val spec =
suite("Encoder")(
@@ -75,14 +75,30 @@ object EncoderPlatformSpecificSpec extends ZIOSpecDefault {
} yield {
assert(xs.mkString)(equalTo("""[{"id":1},{"id":2},{"id":3}]"""))
}
+ },
+ test("encodeJsonArrayPipeline, empty stream") {
+ val emptyArray = ZStream
+ .from(List())
+ .via(JsonEncoder[String].encodeJsonArrayPipeline)
+ .run(ZSink.mkString)
+
+ assertZIO(emptyArray)(equalTo("[]"))
}
),
suite("helpers in zio.json")(
test("writeJsonLines writes JSON lines") {
- val path = Files.createTempFile("log", "json")
+ val path = Files.createTempFile("log", "json")
val events = Chunk(
- Event(1603669876, "hello"),
- Event(1603669875, "world")
+ Event(1, "hello", priority = 1111.1111111),
+ Event(12, "hello", priority = 11111111.111),
+ Event(123, "world", priority = 1.1111111111),
+ Event(1234, "world"),
+ Event(12345, "world"),
+ Event(123456, "world"),
+ Event(1234567, "world"),
+ Event(12345678, "world"),
+ Event(123456789, "world"),
+ Event(1234567890, "world", true)
)
for {
diff --git a/zio-json/jvm/src/test/scala/zio/json/JsonTestSuiteSpec.scala b/zio-json/jvm/src/test/scala/zio/json/JsonTestSuiteSpec.scala
index a3edd89bc..f3883e797 100644
--- a/zio-json/jvm/src/test/scala/zio/json/JsonTestSuiteSpec.scala
+++ b/zio-json/jvm/src/test/scala/zio/json/JsonTestSuiteSpec.scala
@@ -1,8 +1,7 @@
-package testzio.json
+package zio.json
-import testzio.json.TestUtils._
+import zio.json.TestUtils._
import zio._
-import zio.json._
import zio.json.ast.Json
import zio.test.Assertion._
import zio.test.TestAspect._
@@ -21,7 +20,7 @@ object JsonTestSuiteSpec extends ZIOSpecDefault {
a <- ZIO.foreach(f.sorted) { path =>
for {
input <- getResourceAsStringM(s"json_test_suite/$path")
- exit <- ZIO.succeed {
+ exit <- ZIO.succeed {
// Catch Stack overflow
try {
JsonDecoder[Json]
diff --git a/zio-json/jvm/src/test/scala/zio/json/TestUtils.scala b/zio-json/jvm/src/test/scala/zio/json/TestUtils.scala
index e96cfdae7..191f221dc 100644
--- a/zio-json/jvm/src/test/scala/zio/json/TestUtils.scala
+++ b/zio-json/jvm/src/test/scala/zio/json/TestUtils.scala
@@ -1,4 +1,4 @@
-package testzio.json
+package zio.json
import zio._
import zio.stream._
@@ -15,9 +15,9 @@ object TestUtils {
def getResourceAsString(res: String): String = {
val is = getClass.getClassLoader.getResourceAsStream(res)
try {
- val baos = new java.io.ByteArrayOutputStream()
- val data = Array.ofDim[Byte](2048)
- var len: Int = 0
+ val baos = new java.io.ByteArrayOutputStream()
+ val data = Array.ofDim[Byte](2048)
+ var len: Int = 0
def read(): Int = { len = is.read(data); len }
while (read() != -1)
baos.write(data, 0, len)
diff --git a/zio-json/jvm/src/test/scala-2/zio/json/data/GeoJSON.scala b/zio-json/jvm/src/test/scala/zio/json/data/geojson/GeoJSON.scala
similarity index 53%
rename from zio-json/jvm/src/test/scala-2/zio/json/data/GeoJSON.scala
rename to zio-json/jvm/src/test/scala/zio/json/data/geojson/GeoJSON.scala
index 04f4605e6..9d22edcf4 100644
--- a/zio-json/jvm/src/test/scala-2/zio/json/data/GeoJSON.scala
+++ b/zio-json/jvm/src/test/scala/zio/json/data/geojson/GeoJSON.scala
@@ -1,19 +1,13 @@
-package testzio.json.data.geojson
+package zio.json.data.geojson
-import ai.x.play.json.Encoders.encoder
-import ai.x.play.json.{ Jsonx => Playx }
-import io.circe
-import play.api.libs.{ json => Play }
import zio.json._
import zio.json.ast._
-
-object playtuples extends Play.GeneratedReads with Play.GeneratedWrites
-import playtuples._
+import io.circe.{ Codec, Decoder, Encoder }
+import io.circe.generic.semiauto.deriveCodec
+import io.circe.syntax.EncoderOps
package generated {
- import com.github.ghik.silencer.silent
-
@jsonDiscriminator("type")
sealed abstract class Geometry
final case class Point(coordinates: (Double, Double)) extends Geometry
@@ -33,58 +27,72 @@ package generated {
features: List[GeoJSON] // NOTE: recursive
) extends GeoJSON
- @silent("Block result was adapted via implicit conversion")
object Geometry {
implicit lazy val zioJsonJsonDecoder: JsonDecoder[Geometry] =
DeriveJsonDecoder.gen[Geometry]
implicit lazy val zioJsonEncoder: JsonEncoder[Geometry] =
DeriveJsonEncoder.gen[Geometry]
- implicit val customConfig: circe.generic.extras.Configuration =
- circe.generic.extras.Configuration.default
- .copy(discriminator = Some("type"))
- implicit lazy val circeJsonDecoder: circe.Decoder[Geometry] =
- circe.generic.extras.semiauto.deriveConfiguredDecoder[Geometry]
- implicit lazy val circeEncoder: circe.Encoder[Geometry] =
- circe.generic.extras.semiauto.deriveConfiguredEncoder[Geometry]
-
- // it's not clear why this needs the extras package...
- implicit val playPoint: Play.Format[Point] = Playx.formatCaseClass[Point]
- implicit val playMultiPoint: Play.Format[MultiPoint] = Play.Json.format[MultiPoint]
- implicit val playLineString: Play.Format[LineString] = Play.Json.format[LineString]
- implicit val playMultiLineString: Play.Format[MultiLineString] = Play.Json.format[MultiLineString]
- implicit val playPolygon: Play.Format[Polygon] = Play.Json.format[Polygon]
- implicit val playMultiPolygon: Play.Format[MultiPolygon] = Play.Json.format[MultiPolygon]
- implicit lazy val playGeometryCollection: Play.Format[GeometryCollection] = Play.Json.format[GeometryCollection]
- implicit val playFormatter: Play.Format[Geometry] = Playx.formatSealed[Geometry]
-
+ implicit lazy val circeJsonCodec: Codec[Geometry] = {
+ implicit val c1: Codec[Point] = deriveCodec
+ implicit val c2: Codec[MultiPoint] = deriveCodec
+ implicit val c3: Codec[LineString] = deriveCodec
+ implicit val c4: Codec[MultiLineString] = deriveCodec
+ implicit val c5: Codec[Polygon] = deriveCodec
+ implicit val c6: Codec[MultiPolygon] = deriveCodec
+ implicit val c8: Codec[GeometryCollection] = deriveCodec
+ Codec.from(
+ Decoder.instance(c =>
+ c.downField("type").as[String].flatMap {
+ case "Point" => c.as[Point]
+ case "MultiPoint" => c.as[MultiPoint]
+ case "LineString" => c.as[LineString]
+ case "MultiLineString" => c.as[MultiLineString]
+ case "Polygon" => c.as[Polygon]
+ case "MultiPolygon" => c.as[MultiPolygon]
+ case "GeometryCollection" => c.as[GeometryCollection]
+ }
+ ),
+ Encoder.instance {
+ case x: Point => x.asJson.mapObject(_.+:("type" -> "Point".asJson))
+ case x: MultiPoint => x.asJson.mapObject(_.+:("type" -> "MultiPoint".asJson))
+ case x: LineString => x.asJson.mapObject(_.+:("type" -> "LineString".asJson))
+ case x: MultiLineString => x.asJson.mapObject(_.+:("type" -> "MultiLineString".asJson))
+ case x: Polygon => x.asJson.mapObject(_.+:("type" -> "Polygon".asJson))
+ case x: MultiPolygon => x.asJson.mapObject(_.+:("type" -> "MultiPolygon".asJson))
+ case x: GeometryCollection => x.asJson.mapObject(_.+:("type" -> "GeometryCollection".asJson))
+ }
+ )
+ }
}
- @silent("Block result was adapted via implicit conversion")
+
object GeoJSON {
implicit lazy val zioJsonJsonDecoder: JsonDecoder[GeoJSON] =
DeriveJsonDecoder.gen[GeoJSON]
implicit lazy val zioJsonEncoder: JsonEncoder[GeoJSON] =
DeriveJsonEncoder.gen[GeoJSON]
- implicit val customConfig: circe.generic.extras.Configuration =
- circe.generic.extras.Configuration.default
- .copy(discriminator = Some("type"))
- implicit lazy val circeJsonDecoder: circe.Decoder[GeoJSON] =
- circe.generic.extras.semiauto.deriveConfiguredDecoder[GeoJSON]
- implicit lazy val circeEncoder: circe.Encoder[GeoJSON] =
- circe.generic.extras.semiauto.deriveConfiguredEncoder[GeoJSON]
-
- implicit val playFeature: Play.Format[Feature] = Play.Json.format[Feature]
- implicit lazy val playFeatureCollection: Play.Format[FeatureCollection] = Play.Json.format[FeatureCollection]
- implicit val playFormatter: Play.Format[GeoJSON] = Playx.formatSealed[GeoJSON]
-
+ implicit lazy val circeCodec: Codec[GeoJSON] = {
+ implicit val c1: Codec[Feature] = deriveCodec
+ implicit val c2: Codec[FeatureCollection] = deriveCodec
+ Codec.from(
+ Decoder.instance(c =>
+ c.downField("type").as[String].flatMap {
+ case "Feature" => c.as[Feature]
+ case "FeatureCollection" => c.as[FeatureCollection]
+ }
+ ),
+ Encoder.instance {
+ case x: Feature => x.asJson.mapObject(_.+:("type" -> "Feature".asJson))
+ case x: FeatureCollection => x.asJson.mapObject(_.+:("type" -> "FeatureCollection".asJson))
+ }
+ )
+ }
}
}
package handrolled {
- import com.github.ghik.silencer.silent
-
sealed abstract class Geometry
final case class Point(coordinates: (Double, Double)) extends Geometry
final case class MultiPoint(coordinates: List[(Double, Double)]) extends Geometry
@@ -102,7 +110,6 @@ package handrolled {
features: List[GeoJSON] // NOTE: recursive
) extends GeoJSON
- @silent("Block result was adapted via implicit conversion")
object Geometry {
// this is an example of a handrolled decoder that avoids using the
// backtracking algorithm that is normally used for sealed traits with a
@@ -114,13 +121,16 @@ package handrolled {
// custom decoder (below) which is necessary to avert a DOS attack.
implicit lazy val zioJsonJsonDecoder: JsonDecoder[Geometry] =
new JsonDecoder[Geometry] {
- import zio.json._, internal._, JsonDecoder.{ JsonError, UnsafeJson }
+ import zio.json._
+ import JsonDecoder.JsonError
+ import internal._
+
import scala.annotation._
val names: Array[String] = Array("type", "coordinates", "geometries")
val matrix: StringMatrix = new StringMatrix(names)
val spans: Array[JsonError] = names.map(JsonError.ObjectAccess(_))
- val subtypes: StringMatrix = new StringMatrix(
+ val subtypes: StringMatrix = new StringMatrix(
Array(
"Point",
"MultiPoint",
@@ -141,11 +151,8 @@ package handrolled {
js match {
case Json.Arr(chunk)
if chunk.length == 2 && chunk(0).isInstanceOf[Json.Num] && chunk(1).isInstanceOf[Json.Num] =>
- (chunk(0).asInstanceOf[Json.Num].value.doubleValue(), chunk(1).asInstanceOf[Json.Num].value.doubleValue())
- case _ =>
- throw UnsafeJson(
- JsonError.Message("expected coordinates") :: trace
- )
+ (chunk(0).asInstanceOf[Json.Num].value.doubleValue, chunk(1).asInstanceOf[Json.Num].value.doubleValue)
+ case _ => Lexer.error("expected coordinates", trace)
}
def coordinates1(
trace: List[JsonError],
@@ -153,8 +160,7 @@ package handrolled {
): List[(Double, Double)] =
js.elements.map {
case js1: Json.Arr => coordinates0(trace, js1)
- case _ =>
- throw UnsafeJson(JsonError.Message("expected list") :: trace)
+ case _ => Lexer.error("expected list", trace)
}.toList
def coordinates2(
trace: List[JsonError],
@@ -162,8 +168,7 @@ package handrolled {
): List[List[(Double, Double)]] =
js.elements.map {
case js1: Json.Arr => coordinates1(trace, js1)
- case _ =>
- throw UnsafeJson(JsonError.Message("expected list") :: trace)
+ case _ => Lexer.error("expected list", trace)
}.toList
def coordinates3(
trace: List[JsonError],
@@ -171,8 +176,7 @@ package handrolled {
): List[List[List[(Double, Double)]]] =
js.elements.map {
case js1: Json.Arr => coordinates2(trace, js1)
- case _ =>
- throw UnsafeJson(JsonError.Message("expected list") :: trace)
+ case _ => Lexer.error("expected list", trace)
}.toList
def unsafeDecode(
@@ -186,46 +190,32 @@ package handrolled {
var subtype: Int = -1
if (Lexer.firstField(trace, in))
- do {
+ while ({
val field = Lexer.field(trace, in, matrix)
if (field == -1) Lexer.skipValue(trace, in)
else {
val trace_ = spans(field) :: trace
(field: @switch) match {
case 0 =>
- if (subtype != -1)
- throw UnsafeJson(JsonError.Message("duplicate") :: trace_)
+ if (subtype != -1) Lexer.error("duplicate", trace_)
subtype = Lexer.enumeration(trace_, in, subtypes)
case 1 =>
- if (coordinates != null)
- throw UnsafeJson(JsonError.Message("duplicate") :: trace_)
+ if (coordinates != null) Lexer.error("duplicate", trace_)
coordinates = coordinatesD.unsafeDecode(trace_, in)
case 2 =>
- if (geometries != null)
- throw UnsafeJson(JsonError.Message("duplicate") :: trace_)
-
+ if (geometries != null) Lexer.error("duplicate", trace_)
geometries = geometriesD.unsafeDecode(trace_, in)
}
}
- } while (Lexer.nextField(trace, in))
-
- if (subtype == -1)
- throw UnsafeJson(
- JsonError.Message("missing discriminator") :: trace
- )
+ Lexer.nextField(trace, in)
+ }) ()
+ if (subtype == -1) Lexer.error("missing discriminator", trace)
if (subtype == 6) {
- if (geometries == null)
- throw UnsafeJson(
- JsonError.Message("missing 'geometries' field") :: trace
- )
+ if (geometries == null) Lexer.error("missing 'geometries' field", trace)
else GeometryCollection(geometries)
}
-
- if (coordinates == null)
- throw UnsafeJson(
- JsonError.Message("missing 'coordinates' field") :: trace
- )
+ if (coordinates == null) Lexer.error("missing 'coordinates' field", trace)
val trace_ = spans(1) :: trace
(subtype: @switch) match {
case 0 => Point(coordinates0(trace_, coordinates))
@@ -240,25 +230,39 @@ package handrolled {
}
implicit lazy val zioJsonEncoder: JsonEncoder[Geometry] =
DeriveJsonEncoder.gen[Geometry]
-
- implicit val customConfig: circe.generic.extras.Configuration =
- circe.generic.extras.Configuration.default
- .copy(discriminator = Some("type"))
- implicit lazy val circeJsonDecoder: circe.Decoder[Geometry] =
- circe.generic.extras.semiauto.deriveConfiguredDecoder[Geometry]
- implicit lazy val circeEncoder: circe.Encoder[Geometry] =
- circe.generic.extras.semiauto.deriveConfiguredEncoder[Geometry]
- implicit val playPoint: Play.Format[Point] = Playx.formatCaseClass[Point]
- implicit val playMultiPoint: Play.Format[MultiPoint] = Play.Json.format[MultiPoint]
- implicit val playLineString: Play.Format[LineString] = Play.Json.format[LineString]
- implicit val playMultiLineString: Play.Format[MultiLineString] = Play.Json.format[MultiLineString]
- implicit val playPolygon: Play.Format[Polygon] = Play.Json.format[Polygon]
- implicit val playMultiPolygon: Play.Format[MultiPolygon] = Play.Json.format[MultiPolygon]
- implicit lazy val playGeometryCollection: Play.Format[GeometryCollection] = Play.Json.format[GeometryCollection]
- implicit val playFormatter: Play.Format[Geometry] = Playx.formatSealed[Geometry]
-
+ implicit lazy val circeJsonCodec: Codec[Geometry] = {
+ implicit val c1: Codec[Point] = deriveCodec
+ implicit val c2: Codec[MultiPoint] = deriveCodec
+ implicit val c3: Codec[LineString] = deriveCodec
+ implicit val c4: Codec[MultiLineString] = deriveCodec
+ implicit val c5: Codec[Polygon] = deriveCodec
+ implicit val c6: Codec[MultiPolygon] = deriveCodec
+ implicit val c8: Codec[GeometryCollection] = deriveCodec
+ Codec.from(
+ Decoder.instance(c =>
+ c.downField("type").as[String].flatMap {
+ case "Point" => c.as[Point]
+ case "MultiPoint" => c.as[MultiPoint]
+ case "LineString" => c.as[LineString]
+ case "MultiLineString" => c.as[MultiLineString]
+ case "Polygon" => c.as[Polygon]
+ case "MultiPolygon" => c.as[MultiPolygon]
+ case "GeometryCollection" => c.as[GeometryCollection]
+ }
+ ),
+ Encoder.instance {
+ case x: Point => x.asJson.mapObject(_.+:("type" -> "Point".asJson))
+ case x: MultiPoint => x.asJson.mapObject(_.+:("type" -> "MultiPoint".asJson))
+ case x: LineString => x.asJson.mapObject(_.+:("type" -> "LineString".asJson))
+ case x: MultiLineString => x.asJson.mapObject(_.+:("type" -> "MultiLineString".asJson))
+ case x: Polygon => x.asJson.mapObject(_.+:("type" -> "Polygon".asJson))
+ case x: MultiPolygon => x.asJson.mapObject(_.+:("type" -> "MultiPolygon".asJson))
+ case x: GeometryCollection => x.asJson.mapObject(_.+:("type" -> "GeometryCollection".asJson))
+ }
+ )
+ }
}
- @silent("Block result was adapted via implicit conversion")
+
object GeoJSON {
// This uses a hand rolled decoder that guesses the type based on the field
// names to protect against attack vectors that put the hint at the end of
@@ -267,19 +271,22 @@ package handrolled {
// of a corner case.
implicit lazy val zioJsonJsonDecoder: JsonDecoder[GeoJSON] =
new JsonDecoder[GeoJSON] {
- import zio.json._, internal._, JsonDecoder.{ JsonError, UnsafeJson }
+ import zio.json._
+ import JsonDecoder.JsonError
+ import internal._
+
import scala.annotation._
val names: Array[String] =
Array("type", "properties", "geometry", "features")
val matrix: StringMatrix = new StringMatrix(names)
val spans: Array[JsonError] = names.map(JsonError.ObjectAccess(_))
- val subtypes: StringMatrix = new StringMatrix(
+ val subtypes: StringMatrix = new StringMatrix(
Array("Feature", "FeatureCollection")
)
val propertyD: JsonDecoder[Map[String, String]] =
JsonDecoder[Map[String, String]]
- val geometryD: JsonDecoder[Geometry] = JsonDecoder[Geometry]
+ val geometryD: JsonDecoder[Geometry] = JsonDecoder[Geometry]
lazy val featuresD: JsonDecoder[List[GeoJSON]] =
JsonDecoder[List[GeoJSON]] // recursive
@@ -292,58 +299,37 @@ package handrolled {
var subtype: Int = -1
if (Lexer.firstField(trace, in))
- do {
+ while ({
val field = Lexer.field(trace, in, matrix)
if (field == -1) Lexer.skipValue(trace, in)
else {
val trace_ = spans(field) :: trace
(field: @switch) match {
case 0 =>
- if (subtype != -1)
- throw UnsafeJson(JsonError.Message("duplicate") :: trace_)
-
+ if (subtype != -1) Lexer.error("duplicate", trace_)
subtype = Lexer.enumeration(trace_, in, subtypes)
case 1 =>
- if (properties != null)
- throw UnsafeJson(JsonError.Message("duplicate") :: trace_)
-
+ if (properties != null) Lexer.error("duplicate", trace_)
properties = propertyD.unsafeDecode(trace_, in)
case 2 =>
- if (geometry != null)
- throw UnsafeJson(JsonError.Message("duplicate") :: trace_)
-
+ if (geometry != null) Lexer.error("duplicate", trace_)
geometry = geometryD.unsafeDecode(trace_, in)
case 3 =>
- if (features != null)
- throw UnsafeJson(JsonError.Message("duplicate") :: trace_)
-
+ if (features != null) Lexer.error("duplicate", trace_)
features = featuresD.unsafeDecode(trace_, in)
}
}
- } while (Lexer.nextField(trace, in))
-
- if (subtype == -1)
- // we could infer the type but that would mean accepting invalid data
- throw UnsafeJson(
- JsonError.Message("missing required fields") :: trace
- )
+ Lexer.nextField(trace, in)
+ }) ()
+ // we could infer the type but that would mean accepting invalid data
+ if (subtype == -1) Lexer.error("missing required fields", trace)
if (subtype == 0) {
- if (properties == null)
- throw UnsafeJson(
- JsonError.Message("missing 'properties' field") :: trace
- )
- if (geometry == null)
- throw UnsafeJson(
- JsonError.Message("missing 'geometry' field") :: trace
- )
+ if (properties == null) Lexer.error("missing 'properties' field", trace)
+ if (geometry == null) Lexer.error("missing 'geometry' field", trace)
Feature(properties, geometry)
} else {
-
- if (features == null)
- throw UnsafeJson(
- JsonError.Message("missing 'features' field") :: trace
- )
+ if (features == null) Lexer.error("missing 'features' field", trace)
FeatureCollection(features)
}
}
@@ -351,19 +337,21 @@ package handrolled {
}
implicit lazy val zioJsonEncoder: JsonEncoder[GeoJSON] =
DeriveJsonEncoder.gen[GeoJSON]
-
- implicit val customConfig: circe.generic.extras.Configuration =
- circe.generic.extras.Configuration.default
- .copy(discriminator = Some("type"))
- implicit lazy val circeJsonDecoder: circe.Decoder[GeoJSON] =
- circe.generic.extras.semiauto.deriveConfiguredDecoder[GeoJSON]
- implicit lazy val circeEncoder: circe.Encoder[GeoJSON] =
- circe.generic.extras.semiauto.deriveConfiguredEncoder[GeoJSON]
-
- implicit val playFeature: Play.Format[Feature] = Play.Json.format[Feature]
- implicit lazy val playFeatureCollection: Play.Format[FeatureCollection] = Play.Json.format[FeatureCollection]
-
- implicit val playFormatter: Play.Format[GeoJSON] = Playx.formatSealed[GeoJSON]
-
+ implicit lazy val circeCodec: Codec[GeoJSON] = {
+ implicit val c1: Codec[Feature] = deriveCodec
+ implicit val c2: Codec[FeatureCollection] = deriveCodec
+ Codec.from(
+ Decoder.instance(c =>
+ c.downField("type").as[String].flatMap {
+ case "Feature" => c.as[Feature]
+ case "FeatureCollection" => c.as[FeatureCollection]
+ }
+ ),
+ Encoder.instance {
+ case x: Feature => x.asJson.mapObject(_.+:("type" -> "Feature".asJson))
+ case x: FeatureCollection => x.asJson.mapObject(_.+:("type" -> "FeatureCollection".asJson))
+ }
+ )
+ }
}
}
diff --git a/zio-json/jvm/src/test/scala/zio/json/data/googlemaps/GoogleMaps.scala b/zio-json/jvm/src/test/scala/zio/json/data/googlemaps/GoogleMaps.scala
new file mode 100644
index 000000000..c5719d2b8
--- /dev/null
+++ b/zio-json/jvm/src/test/scala/zio/json/data/googlemaps/GoogleMaps.scala
@@ -0,0 +1,52 @@
+package zio.json.data.googlemaps
+
+import com.github.plokhotnyuk.jsoniter_scala.macros.named
+import io.circe.Codec
+import io.circe.generic.semiauto.deriveCodec
+import zio.json._
+
+final case class Value(
+ text: String,
+ @named("value")
+ @jsonField("value")
+ value: Int
+)
+final case class Elements(distance: Value, duration: Value, status: String)
+final case class Rows(elements: List[Elements])
+// @jsonNoExtraFields // entirely mitigates Attack1
+final case class DistanceMatrix(
+ destination_addresses: List[String],
+ origin_addresses: List[String],
+ rows: List[Rows],
+ status: String
+)
+
+object Value {
+ implicit val zioJsonJsonDecoder: JsonDecoder[Value] = DeriveJsonDecoder.gen[Value]
+ implicit val zioJsonEncoder: JsonEncoder[Value] = DeriveJsonEncoder.gen[Value]
+
+ implicit val circeCodec: Codec[Value] = deriveCodec
+}
+
+object Elements {
+ implicit val zioJsonJsonDecoder: JsonDecoder[Elements] = DeriveJsonDecoder.gen[Elements]
+ implicit val zioJsonEncoder: JsonEncoder[Elements] = DeriveJsonEncoder.gen[Elements]
+
+ implicit val circeCodec: Codec[Elements] = deriveCodec
+}
+
+object Rows {
+ implicit val zioJsonJsonDecoder: JsonDecoder[Rows] = DeriveJsonDecoder.gen[Rows]
+ implicit val zioJsonEncoder: JsonEncoder[Rows] = DeriveJsonEncoder.gen[Rows]
+
+ implicit val circeCodec: Codec[Rows] = deriveCodec
+}
+
+object DistanceMatrix {
+ implicit val zioJsonJsonDecoder: JsonDecoder[DistanceMatrix] =
+ DeriveJsonDecoder.gen[DistanceMatrix]
+ implicit val zioJsonEncoder: JsonEncoder[DistanceMatrix] =
+ DeriveJsonEncoder.gen[DistanceMatrix]
+
+ implicit val circeCodec: Codec[DistanceMatrix] = deriveCodec
+}
diff --git a/zio-json/jvm/src/test/scala-2/zio/json/data/Twitter.scala b/zio-json/jvm/src/test/scala/zio/json/data/twitter/Twitter.scala
similarity index 51%
rename from zio-json/jvm/src/test/scala-2/zio/json/data/Twitter.scala
rename to zio-json/jvm/src/test/scala/zio/json/data/twitter/Twitter.scala
index daee6afa5..0b289aaef 100644
--- a/zio-json/jvm/src/test/scala-2/zio/json/data/Twitter.scala
+++ b/zio-json/jvm/src/test/scala/zio/json/data/twitter/Twitter.scala
@@ -1,10 +1,7 @@
-package testzio.json.data.twitter
+package zio.json.data.twitter
-import ai.x.play.json.Encoders.encoder
-import ai.x.play.json.{ Jsonx => Playx }
-import com.github.ghik.silencer.silent
-import io.circe
-import play.api.libs.{ json => Play }
+import io.circe.Codec
+import io.circe.generic.semiauto.deriveCodec
import zio.json._
case class Urls(
@@ -13,45 +10,28 @@ case class Urls(
display_url: String,
indices: List[Int]
)
-@silent("Block result was adapted via implicit conversion")
object Urls {
implicit val jJsonDecoder: JsonDecoder[Urls] = DeriveJsonDecoder.gen[Urls]
implicit val jEncoder: JsonEncoder[Urls] = DeriveJsonEncoder.gen[Urls]
- implicit val customConfig: circe.generic.extras.Configuration =
- circe.generic.extras.Configuration.default
- implicit val circeJsonDecoder: circe.Decoder[Urls] =
- circe.generic.extras.semiauto.deriveConfiguredDecoder[Urls]
- implicit val circeEncoder: circe.Encoder[Urls] =
- circe.generic.extras.semiauto.deriveConfiguredEncoder[Urls]
- implicit val playFormatter: Play.Format[Urls] = Play.Json.format[Urls]
+
+ implicit val circeCodec: Codec[Urls] = deriveCodec
}
case class Url(urls: List[Urls])
-@silent("Block result was adapted via implicit conversion")
+
object Url {
implicit val jJsonDecoder: JsonDecoder[Url] = DeriveJsonDecoder.gen[Url]
implicit val jEncoder: JsonEncoder[Url] = DeriveJsonEncoder.gen[Url]
- implicit val customConfig: circe.generic.extras.Configuration =
- circe.generic.extras.Configuration.default
- implicit val circeJsonDecoder: circe.Decoder[Url] =
- circe.generic.extras.semiauto.deriveConfiguredDecoder[Url]
- implicit val circeEncoder: circe.Encoder[Url] =
- circe.generic.extras.semiauto.deriveConfiguredEncoder[Url]
- implicit val playFormatter: Play.Format[Url] = Play.Json.format[Url]
+
+ implicit val circeCodec: Codec[Url] = deriveCodec
}
case class UserEntities(url: Url, description: Url)
-@silent("Block result was adapted via implicit conversion")
+
object UserEntities {
implicit val jJsonDecoder: JsonDecoder[UserEntities] = DeriveJsonDecoder.gen[UserEntities]
implicit val jEncoder: JsonEncoder[UserEntities] = DeriveJsonEncoder.gen[UserEntities]
- implicit val customConfig: circe.generic.extras.Configuration =
- circe.generic.extras.Configuration.default
- implicit val circeJsonDecoder: circe.Decoder[UserEntities] =
- circe.generic.extras.semiauto.deriveConfiguredDecoder[UserEntities]
- implicit val circeEncoder: circe.Encoder[UserEntities] =
- circe.generic.extras.semiauto.deriveConfiguredEncoder[UserEntities]
- implicit val playFormatter: Play.Format[UserEntities] =
- Play.Json.format[UserEntities]
+
+ implicit val circeCodec: Codec[UserEntities] = deriveCodec
}
case class UserMentions(
@@ -61,18 +41,12 @@ case class UserMentions(
id_str: String,
indices: List[Int]
)
-@silent("Block result was adapted via implicit conversion")
+
object UserMentions {
implicit val jJsonDecoder: JsonDecoder[UserMentions] = DeriveJsonDecoder.gen[UserMentions]
implicit val jEncoder: JsonEncoder[UserMentions] = DeriveJsonEncoder.gen[UserMentions]
- implicit val customConfig: circe.generic.extras.Configuration =
- circe.generic.extras.Configuration.default
- implicit val circeJsonDecoder: circe.Decoder[UserMentions] =
- circe.generic.extras.semiauto.deriveConfiguredDecoder[UserMentions]
- implicit val circeEncoder: circe.Encoder[UserMentions] =
- circe.generic.extras.semiauto.deriveConfiguredEncoder[UserMentions]
- implicit val playFormatter: Play.Format[UserMentions] =
- Play.Json.format[UserMentions]
+
+ implicit val circeCodec: Codec[UserMentions] = deriveCodec
}
case class User(
@@ -119,17 +93,12 @@ case class User(
notifications: Boolean,
translator_type: String
)
-@silent("Block result was adapted via implicit conversion")
+
object User {
implicit val jJsonDecoder: JsonDecoder[User] = DeriveJsonDecoder.gen[User]
implicit val jEncoder: JsonEncoder[User] = DeriveJsonEncoder.gen[User]
- implicit val customConfig: circe.generic.extras.Configuration =
- circe.generic.extras.Configuration.default
- implicit val circeJsonDecoder: circe.Decoder[User] =
- circe.generic.extras.semiauto.deriveConfiguredDecoder[User]
- implicit val circeEncoder: circe.Encoder[User] =
- circe.generic.extras.semiauto.deriveConfiguredEncoder[User]
- implicit val playFormatter: Play.Format[User] = Playx.formatCaseClass[User]
+
+ implicit val circeCodec: Codec[User] = deriveCodec
}
case class Entities(
@@ -138,17 +107,12 @@ case class Entities(
user_mentions: List[UserMentions],
urls: List[Urls]
)
-@silent("Block result was adapted via implicit conversion")
+
object Entities {
implicit val jJsonDecoder: JsonDecoder[Entities] = DeriveJsonDecoder.gen[Entities]
implicit val jEncoder: JsonEncoder[Entities] = DeriveJsonEncoder.gen[Entities]
- implicit val customConfig: circe.generic.extras.Configuration =
- circe.generic.extras.Configuration.default
- implicit val circeJsonDecoder: circe.Decoder[Entities] =
- circe.generic.extras.semiauto.deriveConfiguredDecoder[Entities]
- implicit val circeEncoder: circe.Encoder[Entities] =
- circe.generic.extras.semiauto.deriveConfiguredEncoder[Entities]
- implicit val playFormatter: Play.Format[Entities] = Play.Json.format[Entities]
+
+ implicit val circeCodec: Codec[Entities] = deriveCodec
}
case class RetweetedStatus(
@@ -177,20 +141,14 @@ case class RetweetedStatus(
possibly_sensitive: Boolean,
lang: String
)
-@silent("Block result was adapted via implicit conversion")
+
object RetweetedStatus {
implicit val jJsonDecoder: JsonDecoder[RetweetedStatus] =
DeriveJsonDecoder.gen[RetweetedStatus]
implicit val jEncoder: JsonEncoder[RetweetedStatus] =
DeriveJsonEncoder.gen[RetweetedStatus]
- implicit val customConfig: circe.generic.extras.Configuration =
- circe.generic.extras.Configuration.default
- implicit val circeJsonDecoder: circe.Decoder[RetweetedStatus] =
- circe.generic.extras.semiauto.deriveConfiguredDecoder[RetweetedStatus]
- implicit val circeEncoder: circe.Encoder[RetweetedStatus] =
- circe.generic.extras.semiauto.deriveConfiguredEncoder[RetweetedStatus]
- implicit val playFormatter: Play.Format[RetweetedStatus] =
- Playx.formatCaseClass[RetweetedStatus]
+
+ implicit val circeCodec: Codec[RetweetedStatus] = deriveCodec
}
case class Tweet(
@@ -221,15 +179,9 @@ case class Tweet(
lang: String
)
-@silent("Block result was adapted via implicit conversion")
object Tweet {
implicit val zioJsonJsonDecoder: JsonDecoder[Tweet] = DeriveJsonDecoder.gen[Tweet]
implicit val zioJsonEncoder: JsonEncoder[Tweet] = DeriveJsonEncoder.gen[Tweet]
- implicit val customConfig: circe.generic.extras.Configuration =
- circe.generic.extras.Configuration.default
- implicit val circeJsonDecoder: circe.Decoder[Tweet] =
- circe.generic.extras.semiauto.deriveConfiguredDecoder[Tweet]
- implicit val circeEncoder: circe.Encoder[Tweet] =
- circe.generic.extras.semiauto.deriveConfiguredEncoder[Tweet]
- implicit val playFormatter: Play.Format[Tweet] = Playx.formatCaseClass[Tweet]
+
+ implicit val circeCodec: Codec[Tweet] = deriveCodec
}
diff --git a/zio-json/jvm/src/test/scala/zio/json/internal/SafeNumbersSpec.scala b/zio-json/jvm/src/test/scala/zio/json/internal/SafeNumbersSpec.scala
deleted file mode 100644
index 41f184676..000000000
--- a/zio-json/jvm/src/test/scala/zio/json/internal/SafeNumbersSpec.scala
+++ /dev/null
@@ -1,260 +0,0 @@
-package testzio.json.internal
-
-import testzio.json.Gens._
-import zio.json.internal._
-import zio.test.Assertion._
-import zio.test._
-
-object SafeNumbersSpec extends ZIOSpecDefault {
- val spec =
- suite("SafeNumbers")(
- test("valid big decimals") {
- check(genBigDecimal)(i => assert(SafeNumbers.bigDecimal(i.toString, 2048))(isSome(equalTo(i))))
- },
- test("invalid big decimals") {
- val invalidBigDecimalEdgeCases = List(
- "N",
- "Inf",
- "-NaN",
- "+NaN",
- "e1",
- "1.1.1",
- "1 ",
- "NaN",
- "Infinity",
- "+Infinity",
- "-Infinity"
- ).map(s => SafeNumbers.bigDecimal(s))
-
- assert(invalidBigDecimalEdgeCases)(forall(isNone))
- },
- test("valid big decimal edge cases") {
- val invalidBigDecimalEdgeCases = List(
- ".0",
- "-.0",
- "0",
- "0.0",
- "-0.0", // zeroes
- "0000.1",
- "0.00001",
- "000.00001000" // various trailing zeros, should be preserved
- )
-
- check(Gen.fromIterable(invalidBigDecimalEdgeCases)) { s =>
- assert(SafeNumbers.bigDecimal(s).map(_.toString))(
- isSome(
- equalTo((new java.math.BigDecimal(s)).toString)
- )
- )
- }
- },
- test("invalid BigDecimal text") {
- check(genAlphaLowerString)(s => assert(SafeNumbers.bigDecimal(s))(isNone))
- },
- test("valid BigInteger edge cases") {
- val inputs = List(
- "00",
- "01",
- "0000001",
- "-9223372036854775807",
- "9223372036854775806",
- "-9223372036854775809",
- "9223372036854775808"
- )
-
- check(Gen.fromIterable(inputs)) { s =>
- assert(SafeNumbers.bigInteger(s))(
- isSome(
- equalTo((new java.math.BigInteger(s)))
- )
- )
- }
- },
- test("invalid BigInteger edge cases") {
- val inputs = List("0foo", "01foo", "0.1", "", "1 ")
-
- check(Gen.fromIterable(inputs))(s => assert(SafeNumbers.bigInteger(s))(isNone))
- },
- test("valid big Integer") {
- check(genBigInteger)(i => assert(SafeNumbers.bigInteger(i.toString, 2048))(isSome(equalTo(i))))
- },
- test("invalid BigInteger") {
- check(genAlphaLowerString)(s => assert(SafeNumbers.bigInteger(s))(isNone))
- },
- test("valid Byte") {
- check(Gen.byte(Byte.MinValue, Byte.MaxValue)) { b =>
- assert(SafeNumbers.byte(b.toString))(equalTo(ByteSome(b)))
- }
- },
- test("invalid Byte (numbers)") {
- check(Gen.long.filter(i => i < Byte.MinValue || i > Byte.MaxValue)) { b =>
- assert(SafeNumbers.byte(b.toString))(equalTo(ByteNone))
- }
- },
- test("invalid Byte (text)") {
- check(genAlphaLowerString)(b => assert(SafeNumbers.byte(b.toString))(equalTo(ByteNone)))
- },
- suite("Double")(
- test("valid") {
- check(Gen.double.filterNot(_.isNaN)) { d =>
- assert(SafeNumbers.double(d.toString))(equalTo(DoubleSome(d)))
- }
- },
- test("valid (from Int)") {
- check(Gen.int)(i => assert(SafeNumbers.double(i.toString))(equalTo(DoubleSome(i.toDouble))))
- },
- test("valid (from Long)") {
- check(Gen.long)(i => assert(SafeNumbers.double(i.toString))(equalTo(DoubleSome(i.toDouble))))
- },
- test("invalid edge cases") {
- val inputs = List("N", "Inf", "-NaN", "+NaN", "e1", "1.1.1", "1 ")
-
- check(Gen.fromIterable(inputs))(i => assert(SafeNumbers.double(i))(equalTo(DoubleNone)))
- },
- test("valid edge cases") {
- val inputs = List(
- ".0",
- "-.0",
- "0",
- "0.0",
- "-0.0", // zeroes
- "0000.1",
- "0.00001",
- "000.00001000", // trailing zeros
- "NaN",
- "92233720368547758070", // overflows a Long significand
- "Infinity",
- "+Infinity",
- "-Infinity",
- "3.976210887433566E-281" // rounds if a naive scaling is used
- )
-
- check(Gen.fromIterable(inputs)) { s =>
- // better to do the comparison on strings to deal with NaNs
- assert(SafeNumbers.double(s).toString)(
- equalTo(DoubleSome(s.toDouble).toString)
- )
- }
- },
- test("valid magic doubles") {
- assert(SafeNumbers.double("NaN"))(not(equalTo(DoubleNone))) &&
- assert(SafeNumbers.double("Infinity"))(not(equalTo(DoubleNone))) &&
- assert(SafeNumbers.double("+Infinity"))(not(equalTo(DoubleNone))) &&
- assert(SafeNumbers.double("-Infinity"))(not(equalTo(DoubleNone)))
- },
- test("invalid doubles (text)") {
- check(genAlphaLowerString)(s => assert(SafeNumbers.double(s))(equalTo(DoubleNone)))
- }
- ),
- suite("Float")(
- test("valid") {
- check(Gen.float.filterNot(_.isNaN))(d => assert(SafeNumbers.float(d.toString))(equalTo(FloatSome(d))))
- },
- test("large mantissa") {
- // https://github.com/zio/zio-json/issues/221
- assert(SafeNumbers.float("1.199999988079071"))(equalTo(FloatSome(1.1999999f)))
- },
- test("valid (from Int)") {
- check(Gen.int)(i => assert(SafeNumbers.float(i.toString))(equalTo(FloatSome(i.toFloat))))
- },
- test("valid (from Long)") {
- check(Gen.long)(i => assert(SafeNumbers.float(i.toString))(equalTo(FloatSome(i.toFloat))))
- },
- test("invalid edge cases") {
- val inputs = List("N", "Inf", "-NaN", "+NaN", "e1", "1.1.1")
-
- check(Gen.fromIterable(inputs))(i => assert(SafeNumbers.float(i))(equalTo(FloatNone)))
- },
- test("valid edge cases") {
- val inputs = List(
- ".0",
- "-.0",
- "0",
- "0.0",
- "-0.0", // zeroes
- "0000.1",
- "0.00001",
- "000.00001000", // trailing zeros
- "NaN",
- "92233720368547758070", // overflows a Long significand
- "Infinity",
- "+Infinity",
- "-Infinity"
- )
-
- check(Gen.fromIterable(inputs)) { s =>
- // better to do the comparison on strings to deal with NaNs
- assert(SafeNumbers.float(s).toString)(
- equalTo(FloatSome(s.toFloat).toString)
- )
- }
- },
- test("valid (from Double)") {
- check(Gen.double.filterNot(_.isNaN)) { d =>
- assert(SafeNumbers.float(d.toString))(equalTo(FloatSome(d.toFloat)))
- }
- },
- test("invalid float (text)") {
- check(genAlphaLowerString)(s => assert(SafeNumbers.float(s))(equalTo(FloatNone)))
- }
- ),
- suite("Int")(
- test("valid") {
- check(Gen.int)(d => assert(SafeNumbers.int(d.toString))(equalTo(IntSome(d))))
- },
- test("invalid (out of range)") {
- check(Gen.long.filter(i => i < Int.MinValue || i > Int.MaxValue))(d =>
- assert(SafeNumbers.int(d.toString))(equalTo(IntNone))
- )
- },
- test("invalid (text)") {
- check(genAlphaLowerString)(s => assert(SafeNumbers.int(s))(equalTo(IntNone)))
- }
- ),
- suite("Long")(
- test("valid edge cases") {
- val input = List("00", "01", "0000001", "-9223372036854775807", "9223372036854775806")
-
- check(Gen.fromIterable(input))(x => assert(SafeNumbers.long(x))(equalTo(LongSome(x.toLong))))
- },
- test("in valid edge cases") {
- val input = List(
- "0foo",
- "01foo",
- "0.1",
- "",
- "1 ",
- "-9223372036854775809",
- "9223372036854775808"
- )
-
- check(Gen.fromIterable(input))(x => assert(SafeNumbers.long(x))(equalTo(LongNone)))
- },
- test("valid") {
- check(Gen.long)(d => assert(SafeNumbers.long(d.toString))(equalTo(LongSome(d))))
- },
- test("invalid (out of range)") {
- val outOfRange = genBigInteger
- .filter(_.bitLength > 63)
-
- check(outOfRange)(x => assert(SafeNumbers.long(x.toString))(equalTo(LongNone)))
- },
- test("invalid (text)") {
- check(genAlphaLowerString)(s => assert(SafeNumbers.long(s))(equalTo(LongNone)))
- }
- ),
- suite("Short")(
- test("valid") {
- check(Gen.short)(d => assert(SafeNumbers.short(d.toString))(equalTo(ShortSome(d))))
- },
- test("invalid (out of range)") {
- check(Gen.long.filter(i => i < Short.MinValue || i > Short.MaxValue))(d =>
- assert(SafeNumbers.short(d.toString))(equalTo(ShortNone))
- )
- },
- test("invalid (text)") {
- check(genAlphaLowerString)(s => assert(SafeNumbers.short(s))(equalTo(ShortNone)))
- }
- )
- )
-}
diff --git a/zio-json/shared/src/main/scala-2.x/zio/json/JsonCodecVersionSpecific.scala b/zio-json/shared/src/main/scala-2.12/zio/json/JsonCodecVersionSpecific.scala
similarity index 100%
rename from zio-json/shared/src/main/scala-2.x/zio/json/JsonCodecVersionSpecific.scala
rename to zio-json/shared/src/main/scala-2.12/zio/json/JsonCodecVersionSpecific.scala
diff --git a/zio-json/shared/src/main/scala-2.12/zio/json/JsonDecoderVersionSpecific.scala b/zio-json/shared/src/main/scala-2.12/zio/json/JsonDecoderVersionSpecific.scala
new file mode 100644
index 000000000..5e8d10030
--- /dev/null
+++ b/zio-json/shared/src/main/scala-2.12/zio/json/JsonDecoderVersionSpecific.scala
@@ -0,0 +1,5 @@
+package zio.json
+
+private[json] trait JsonDecoderVersionSpecific
+
+private[json] trait DecoderLowPriorityVersionSpecific
diff --git a/zio-json/shared/src/main/scala-2.12/zio/json/JsonEncoderVersionSpecific.scala b/zio-json/shared/src/main/scala-2.12/zio/json/JsonEncoderVersionSpecific.scala
new file mode 100644
index 000000000..8b360d95d
--- /dev/null
+++ b/zio-json/shared/src/main/scala-2.12/zio/json/JsonEncoderVersionSpecific.scala
@@ -0,0 +1,5 @@
+package zio.json
+
+private[json] trait JsonEncoderVersionSpecific
+
+private[json] trait EncoderLowPriorityVersionSpecific
diff --git a/zio-json/shared/src/main/scala-2.13/zio/json/JsonCodecVersionSpecific.scala b/zio-json/shared/src/main/scala-2.13/zio/json/JsonCodecVersionSpecific.scala
new file mode 100644
index 000000000..e6d8f0f9b
--- /dev/null
+++ b/zio-json/shared/src/main/scala-2.13/zio/json/JsonCodecVersionSpecific.scala
@@ -0,0 +1,8 @@
+package zio.json
+
+import scala.collection.immutable
+
+trait JsonCodecVersionSpecific {
+ implicit def arraySeq[A: JsonEncoder: JsonDecoder: reflect.ClassTag]: JsonCodec[immutable.ArraySeq[A]] =
+ JsonCodec(JsonEncoder.arraySeq[A], JsonDecoder.arraySeq[A])
+}
diff --git a/zio-json/shared/src/main/scala-2.13/zio/json/JsonDecoderVersionSpecific.scala b/zio-json/shared/src/main/scala-2.13/zio/json/JsonDecoderVersionSpecific.scala
new file mode 100644
index 000000000..eb0ffc9e0
--- /dev/null
+++ b/zio-json/shared/src/main/scala-2.13/zio/json/JsonDecoderVersionSpecific.scala
@@ -0,0 +1,20 @@
+package zio.json
+
+import zio.json.JsonDecoder.JsonError
+import zio.json.internal.RetractReader
+
+import scala.collection.immutable
+
+private[json] trait JsonDecoderVersionSpecific {
+ implicit def arraySeq[A: JsonDecoder: reflect.ClassTag]: JsonDecoder[immutable.ArraySeq[A]] =
+ new CollectionJsonDecoder[immutable.ArraySeq[A]] {
+ private[this] val arrayDecoder = JsonDecoder.array[A]
+
+ override def unsafeDecodeMissing(trace: List[JsonError]): immutable.ArraySeq[A] = immutable.ArraySeq.empty
+
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): immutable.ArraySeq[A] =
+ immutable.ArraySeq.unsafeWrapArray(arrayDecoder.unsafeDecode(trace, in))
+ }
+}
+
+private[json] trait DecoderLowPriorityVersionSpecific
diff --git a/zio-json/shared/src/main/scala-2.13/zio/json/JsonEncoderVersionSpecific.scala b/zio-json/shared/src/main/scala-2.13/zio/json/JsonEncoderVersionSpecific.scala
new file mode 100644
index 000000000..693472327
--- /dev/null
+++ b/zio-json/shared/src/main/scala-2.13/zio/json/JsonEncoderVersionSpecific.scala
@@ -0,0 +1,23 @@
+package zio.json
+
+import zio.json.ast.Json
+import zio.json.internal.Write
+
+import scala.collection.immutable
+
+private[json] trait JsonEncoderVersionSpecific {
+ implicit def arraySeq[A: JsonEncoder: scala.reflect.ClassTag]: JsonEncoder[immutable.ArraySeq[A]] =
+ new JsonEncoder[immutable.ArraySeq[A]] {
+ private[this] val arrayEnc = JsonEncoder.array[A]
+
+ override def isEmpty(as: immutable.ArraySeq[A]): Boolean = as.isEmpty
+
+ def unsafeEncode(as: immutable.ArraySeq[A], indent: Option[Int], out: Write): Unit =
+ arrayEnc.unsafeEncode(as.unsafeArray.asInstanceOf[Array[A]], indent, out)
+
+ override final def toJsonAST(as: immutable.ArraySeq[A]): Either[String, Json] =
+ arrayEnc.toJsonAST(as.unsafeArray.asInstanceOf[Array[A]])
+ }
+}
+
+private[json] trait EncoderLowPriorityVersionSpecific
diff --git a/zio-json/shared/src/main/scala-2.x/zio/json/JsonCodecConfiguration.scala b/zio-json/shared/src/main/scala-2.x/zio/json/JsonCodecConfiguration.scala
new file mode 100644
index 000000000..e59274966
--- /dev/null
+++ b/zio-json/shared/src/main/scala-2.x/zio/json/JsonCodecConfiguration.scala
@@ -0,0 +1,188 @@
+package zio.json
+
+import zio.json.JsonCodecConfiguration.SumTypeHandling
+import zio.json.JsonCodecConfiguration.SumTypeHandling.WrapperWithClassNameField
+
+/**
+ * When disabled for decoding, keys with empty collections will be omitted from the JSON. When disabled for encoding,
+ * missing keys will default to empty collections.
+ */
+case class ExplicitEmptyCollections(encoding: Boolean = true, decoding: Boolean = true)
+
+/**
+ * Implicit codec derivation configuration.
+ *
+ * @param sumTypeHandling
+ * see [[jsonDiscriminator]]
+ * @param fieldNameMapping
+ * see [[jsonMemberNames]]
+ * @param allowExtraFields
+ * see [[jsonNoExtraFields]]
+ * @param sumTypeMapping
+ * see [[jsonHintNames]]
+ * @param explicitNulls
+ * turns on explicit serialization of optional fields with None values
+ * @param explicitEmptyCollections
+ * turns on explicit serialization of fields with empty collections
+ * @param enumValuesAsStrings
+ * turns on serialization of enum values and sealed trait's case objects as strings
+ */
+final case class JsonCodecConfiguration(
+ sumTypeHandling: SumTypeHandling = WrapperWithClassNameField,
+ fieldNameMapping: JsonMemberFormat = IdentityFormat,
+ allowExtraFields: Boolean = true,
+ sumTypeMapping: JsonMemberFormat = IdentityFormat,
+ explicitNulls: Boolean = false,
+ explicitEmptyCollections: ExplicitEmptyCollections = ExplicitEmptyCollections(),
+ enumValuesAsStrings: Boolean = false
+) {
+ def this(
+ sumTypeHandling: SumTypeHandling,
+ fieldNameMapping: JsonMemberFormat,
+ allowExtraFields: Boolean,
+ sumTypeMapping: JsonMemberFormat,
+ explicitNulls: Boolean,
+ explicitEmptyCollections: ExplicitEmptyCollections
+ ) = this(
+ sumTypeHandling,
+ fieldNameMapping,
+ allowExtraFields,
+ sumTypeMapping,
+ explicitNulls,
+ explicitEmptyCollections,
+ false
+ )
+
+ def this(
+ sumTypeHandling: SumTypeHandling,
+ fieldNameMapping: JsonMemberFormat,
+ allowExtraFields: Boolean,
+ sumTypeMapping: JsonMemberFormat,
+ explicitNulls: Boolean
+ ) = this(
+ sumTypeHandling,
+ fieldNameMapping,
+ allowExtraFields,
+ sumTypeMapping,
+ explicitNulls,
+ ExplicitEmptyCollections(),
+ false
+ )
+
+ def copy(
+ sumTypeHandling: SumTypeHandling = WrapperWithClassNameField.asInstanceOf[SumTypeHandling],
+ fieldNameMapping: JsonMemberFormat = IdentityFormat.asInstanceOf[JsonMemberFormat],
+ allowExtraFields: Boolean = true,
+ sumTypeMapping: JsonMemberFormat = IdentityFormat.asInstanceOf[JsonMemberFormat],
+ explicitNulls: Boolean = false,
+ explicitEmptyCollections: ExplicitEmptyCollections = ExplicitEmptyCollections(),
+ enumValuesAsStrings: Boolean = false
+ ) = new JsonCodecConfiguration(
+ sumTypeHandling,
+ fieldNameMapping,
+ allowExtraFields,
+ sumTypeMapping,
+ explicitNulls,
+ explicitEmptyCollections,
+ enumValuesAsStrings
+ )
+
+ def copy(
+ sumTypeHandling: SumTypeHandling,
+ fieldNameMapping: JsonMemberFormat,
+ allowExtraFields: Boolean,
+ sumTypeMapping: JsonMemberFormat,
+ explicitNulls: Boolean,
+ explicitEmptyCollections: ExplicitEmptyCollections
+ ) = new JsonCodecConfiguration(
+ sumTypeHandling,
+ fieldNameMapping,
+ allowExtraFields,
+ sumTypeMapping,
+ explicitNulls,
+ explicitEmptyCollections,
+ this.enumValuesAsStrings
+ )
+
+ def copy(
+ sumTypeHandling: SumTypeHandling,
+ fieldNameMapping: JsonMemberFormat,
+ allowExtraFields: Boolean,
+ sumTypeMapping: JsonMemberFormat,
+ explicitNulls: Boolean
+ ) = new JsonCodecConfiguration(
+ sumTypeHandling,
+ fieldNameMapping,
+ allowExtraFields,
+ sumTypeMapping,
+ explicitNulls,
+ this.explicitEmptyCollections,
+ this.enumValuesAsStrings
+ )
+}
+
+object JsonCodecConfiguration {
+ def apply(
+ sumTypeHandling: SumTypeHandling,
+ fieldNameMapping: JsonMemberFormat,
+ allowExtraFields: Boolean,
+ sumTypeMapping: JsonMemberFormat,
+ explicitNulls: Boolean,
+ explicitEmptyCollections: ExplicitEmptyCollections
+ ) = new JsonCodecConfiguration(
+ sumTypeHandling,
+ fieldNameMapping,
+ allowExtraFields,
+ sumTypeMapping,
+ explicitNulls,
+ explicitEmptyCollections,
+ false
+ )
+
+ def apply(
+ sumTypeHandling: SumTypeHandling,
+ fieldNameMapping: JsonMemberFormat,
+ allowExtraFields: Boolean,
+ sumTypeMapping: JsonMemberFormat,
+ explicitNulls: Boolean
+ ) = new JsonCodecConfiguration(
+ sumTypeHandling,
+ fieldNameMapping,
+ allowExtraFields,
+ sumTypeMapping,
+ explicitNulls,
+ ExplicitEmptyCollections(),
+ false
+ )
+
+ implicit val default: JsonCodecConfiguration = JsonCodecConfiguration()
+
+ sealed trait SumTypeHandling {
+ def discriminatorField: Option[String]
+ }
+
+ object SumTypeHandling {
+
+ /**
+ * Use an object with a single key that is the class name.
+ */
+ case object WrapperWithClassNameField extends SumTypeHandling {
+ override def discriminatorField: Option[String] = None
+ }
+
+ /**
+ * For sealed classes, will determine the name of the field for disambiguating classes.
+ *
+ * The default is to not use a typehint field and instead have an object with a single key that is the class name.
+ * See [[WrapperWithClassNameField]].
+ *
+ * Note that using a discriminator is less performant, uses more memory, and may be prone to DOS attacks that are
+ * impossible with the default encoding. In addition, there is slightly less type safety when using custom product
+ * encoders (which must write an unenforced object type). Only use this option if you must model an externally
+ * defined schema.
+ */
+ final case class DiscriminatorField(name: String) extends SumTypeHandling {
+ override def discriminatorField: Option[String] = Some(name)
+ }
+ }
+}
diff --git a/zio-json/shared/src/main/scala-2.x/zio/json/JsonDecoderVersionSpecific.scala b/zio-json/shared/src/main/scala-2.x/zio/json/JsonDecoderVersionSpecific.scala
deleted file mode 100644
index 728b65002..000000000
--- a/zio-json/shared/src/main/scala-2.x/zio/json/JsonDecoderVersionSpecific.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-package zio.json
-
-trait JsonDecoderVersionSpecific
diff --git a/zio-json/shared/src/main/scala-2.x/zio/json/JsonEncoderVersionSpecific.scala b/zio-json/shared/src/main/scala-2.x/zio/json/JsonEncoderVersionSpecific.scala
deleted file mode 100644
index 5b912efe2..000000000
--- a/zio-json/shared/src/main/scala-2.x/zio/json/JsonEncoderVersionSpecific.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-package zio.json
-
-trait JsonEncoderVersionSpecific
diff --git a/zio-json/shared/src/main/scala-2.x/zio/json/JsonFieldDecoder.scala b/zio-json/shared/src/main/scala-2.x/zio/json/JsonFieldDecoder.scala
new file mode 100644
index 000000000..5febd6f29
--- /dev/null
+++ b/zio-json/shared/src/main/scala-2.x/zio/json/JsonFieldDecoder.scala
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2019-2022 John A. De Goes and the ZIO Contributors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package zio.json
+
+import zio.json.internal.Lexer
+import zio.json.uuid.UUIDParser
+
+/** When decoding a JSON Object, we only allow the keys that implement this interface. */
+trait JsonFieldDecoder[+A] {
+ self =>
+
+ final def map[B](f: A => B): JsonFieldDecoder[B] =
+ new JsonFieldDecoder[B] {
+
+ def unsafeDecodeField(trace: List[JsonError], in: String): B =
+ f(self.unsafeDecodeField(trace, in))
+ }
+
+ final def mapOrFail[B](f: A => Either[String, B]): JsonFieldDecoder[B] =
+ new JsonFieldDecoder[B] {
+
+ def unsafeDecodeField(trace: List[JsonError], in: String): B =
+ f(self.unsafeDecodeField(trace, in)) match {
+ case Left(err) => Lexer.error(err, trace)
+ case Right(b) => b
+ }
+ }
+
+ def unsafeDecodeField(trace: List[JsonError], in: String): A
+}
+
+object JsonFieldDecoder extends LowPriorityJsonFieldDecoder {
+ def apply[A](implicit a: JsonFieldDecoder[A]): JsonFieldDecoder[A] = a
+
+ implicit val string: JsonFieldDecoder[String] = new JsonFieldDecoder[String] {
+ def unsafeDecodeField(trace: List[JsonError], in: String): String = in
+ }
+
+ implicit val int: JsonFieldDecoder[Int] = new JsonFieldDecoder[Int] {
+ def unsafeDecodeField(trace: List[JsonError], in: String): Int =
+ try in.toInt
+ catch {
+ case _: NumberFormatException => Lexer.error(s"Invalid Int: ${strip(in)}", trace)
+ }
+ }
+
+ implicit val long: JsonFieldDecoder[Long] = new JsonFieldDecoder[Long] {
+ def unsafeDecodeField(trace: List[JsonError], in: String): Long =
+ try in.toLong
+ catch {
+ case _: NumberFormatException => Lexer.error(s"Invalid Long: ${strip(in)}", trace)
+ }
+ }
+
+ implicit val uuid: JsonFieldDecoder[java.util.UUID] = new JsonFieldDecoder[java.util.UUID] {
+ def unsafeDecodeField(trace: List[JsonError], in: String): java.util.UUID =
+ try UUIDParser.unsafeParse(in)
+ catch {
+ case _: IllegalArgumentException => Lexer.error("expected a UUID", trace)
+ }
+ }
+
+ // FIXME: remove from the next major version
+ private[json] def mapStringOrFail[A](f: String => Either[String, A]): JsonFieldDecoder[A] =
+ new JsonFieldDecoder[A] {
+ def unsafeDecodeField(trace: List[JsonError], in: String): A =
+ f(string.unsafeDecodeField(trace, in)) match {
+ case Left(err) => Lexer.error(err, trace)
+ case Right(value) => value
+ }
+ }
+
+ private[json] def strip(s: String, len: Int = 50): String =
+ if (s.length <= len) s
+ else s.substring(0, len) + "..."
+}
+
+private[json] trait LowPriorityJsonFieldDecoder
diff --git a/zio-json/shared/src/main/scala-2.x/zio/json/macros.scala b/zio-json/shared/src/main/scala-2.x/zio/json/macros.scala
index 3810b8c58..6a13b1b50 100644
--- a/zio-json/shared/src/main/scala-2.x/zio/json/macros.scala
+++ b/zio-json/shared/src/main/scala-2.x/zio/json/macros.scala
@@ -2,19 +2,15 @@ package zio.json
import magnolia1._
import zio.Chunk
-import zio.json.JsonCodecConfiguration.SumTypeHandling
-import zio.json.JsonCodecConfiguration.SumTypeHandling.WrapperWithClassNameField
-import zio.json.JsonDecoder.{ JsonError, UnsafeJson }
+import zio.json.JsonDecoder.JsonError
import zio.json.ast.Json
-import zio.json.internal.{ Lexer, RetractReader, StringMatrix, Write }
-
+import zio.json.internal.{ FieldEncoder, Lexer, RecordingReader, RetractReader, StringMatrix, Write }
import scala.annotation._
-import scala.collection.mutable
+import scala.collection.mutable.ArrayBuffer
import scala.language.experimental.macros
/**
- * If used on a case class field, determines the name of the JSON field.
- * Defaults to the case class field name.
+ * If used on a case class field, determines the name of the JSON field. Defaults to the case class field name.
*/
final case class jsonField(name: String) extends Annotation
@@ -24,17 +20,24 @@ final case class jsonField(name: String) extends Annotation
final case class jsonAliases(alias: String, aliases: String*) extends Annotation
/**
- * If used on a sealed class, will determine the name of the field for
- * disambiguating classes.
+ * Empty option fields will be encoded as `null`.
+ */
+final class jsonExplicitNull extends Annotation
+
+/**
+ * When disabled keys with empty collections will be omitted from the JSON.
+ */
+final case class jsonExplicitEmptyCollections(encoding: Boolean = true, decoding: Boolean = true) extends Annotation
+
+/**
+ * If used on a sealed class, will determine the name of the field for disambiguating classes.
*
- * The default is to not use a typehint field and instead
- * have an object with a single key that is the class name.
+ * The default is to not use a typehint field and instead have an object with a single key that is the class name.
*
- * Note that using a discriminator is less performant, uses more memory, and may
- * be prone to DOS attacks that are impossible with the default encoding. In
- * addition, there is slightly less type safety when using custom product
- * encoders (which must write an unenforced object type). Only use this option
- * if you must model an externally defined schema.
+ * Note that using a discriminator is less performant, uses more memory, and may be prone to DOS attacks that are
+ * impossible with the default encoding. In addition, there is slightly less type safety when using custom product
+ * encoders (which must write an unenforced object type). Only use this option if you must model an externally defined
+ * schema.
*/
final case class jsonDiscriminator(name: String) extends Annotation
// TODO a strategy where the constructor is inferred from the field names, only
@@ -79,25 +82,19 @@ object ziojson_03 {
}
/**
- * If used on a case class, determines the strategy of member names
- * transformation during serialization and deserialization. Four common
- * strategies are provided above and a custom one to support specific use cases.
+ * If used on a case class, determines the strategy of member names transformation during serialization and
+ * deserialization. Four common strategies are provided above and a custom one to support specific use cases.
*/
final case class jsonMemberNames(format: JsonMemberFormat) extends Annotation
private[json] object jsonMemberNames {
- /**
- * ~~Stolen~~ Borrowed from jsoniter-scala by Andriy Plokhotnyuk
- * (he even granted permission for this, imagine that!)
- */
-
import java.lang.Character._
def enforceCamelOrPascalCase(s: String, toPascal: Boolean): String =
if (s.indexOf('_') == -1 && s.indexOf('-') == -1) {
if (s.isEmpty) s
else {
- val ch = s.charAt(0)
+ val ch = s.charAt(0)
val fixedCh =
if (toPascal) toUpperCase(ch)
else toLowerCase(ch)
@@ -171,21 +168,26 @@ private[json] object jsonMemberNames {
}
/**
- * If used on a case class will determine the type hint value for disambiguating
- * sealed traits. Defaults to the short type name.
+ * If used on a case class will determine the type hint value for disambiguating sealed traits. Defaults to the short
+ * type name.
*/
final case class jsonHint(name: String) extends Annotation
/**
- * If used on a case class, will exit early if any fields are in the JSON that
- * do not correspond to field names in the case class.
+ * If used on a sealed class will determine the strategy of type hint value transformation for disambiguating classes
+ * during serialization and deserialization. Same strategies are provided as for [[jsonMemberNames]].
+ */
+final case class jsonHintNames(format: JsonMemberFormat) extends Annotation
+
+/**
+ * If used on a case class, will exit early if any fields are in the JSON that do not correspond to field names in the
+ * case class.
*
- * This adds extra protections against a DOS attacks but means that changes in
- * the schema will result in a hard error rather than silently ignoring those
- * fields.
+ * This adds extra protections against a DOS attacks but means that changes in the schema will result in a hard error
+ * rather than silently ignoring those fields.
*
- * Cannot be combined with `@jsonDiscriminator` since it is considered an extra
- * field from the perspective of the case class.
+ * Cannot be combined with `@jsonDiscriminator` since it is considered an extra field from the perspective of the case
+ * class.
*/
final class jsonNoExtraFields extends Annotation
@@ -194,465 +196,698 @@ final class jsonNoExtraFields extends Annotation
*/
final class jsonExclude extends Annotation
-// TODO: implement same configuration for Scala 3 once this issue is resolved: https://github.com/softwaremill/magnolia/issues/296
-/**
- * Implicit codec derivation configuration.
- *
- * @param sumTypeHandling see [[jsonDiscriminator]]
- * @param fieldNameMapping see [[jsonMemberNames]]
- * @param allowExtraFields see [[jsonNoExtraFields]]
- */
-final case class JsonCodecConfiguration(
- sumTypeHandling: SumTypeHandling = WrapperWithClassNameField,
- fieldNameMapping: JsonMemberFormat = IdentityFormat,
- allowExtraFields: Boolean = true
-)
-
-object JsonCodecConfiguration {
- implicit val default: JsonCodecConfiguration = JsonCodecConfiguration()
-
- sealed trait SumTypeHandling {
- def discriminatorField: Option[String]
+private class CaseObjectDecoder[Typeclass[_], A](val ctx: CaseClass[Typeclass, A], no_extra: Boolean)
+ extends CollectionJsonDecoder[A] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
+ if (no_extra) {
+ Lexer.char(trace, in, '{')
+ Lexer.char(trace, in, '}')
+ } else Lexer.skipValue(trace, in)
+ ctx.rawConstruct(Nil)
}
- object SumTypeHandling {
+ override def unsafeDecodeMissing(trace: List[JsonError]): A = ctx.rawConstruct(Nil)
- /**
- * Use an object with a single key that is the class name.
- */
- case object WrapperWithClassNameField extends SumTypeHandling {
- override def discriminatorField: Option[String] = None
+ override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
+ json match {
+ case _: Json.Obj | Json.Null => ctx.rawConstruct(Nil)
+ case _ => Lexer.error("expected object", trace)
}
-
- /**
- * For sealed classes, will determine the name of the field for
- * disambiguating classes.
- *
- * The default is to not use a typehint field and instead
- * have an object with a single key that is the class name.
- * See [[WrapperWithClassNameField]].
- *
- * Note that using a discriminator is less performant, uses more memory, and may
- * be prone to DOS attacks that are impossible with the default encoding. In
- * addition, there is slightly less type safety when using custom product
- * encoders (which must write an unenforced object type). Only use this option
- * if you must model an externally defined schema.
- */
- final case class DiscriminatorField(name: String) extends SumTypeHandling {
- override def discriminatorField: Option[String] = Some(name)
- }
- }
}
object DeriveJsonDecoder {
type Typeclass[A] = JsonDecoder[A]
def join[A](ctx: CaseClass[JsonDecoder, A])(implicit config: JsonCodecConfiguration): JsonDecoder[A] = {
- val (transformNames, nameTransform): (Boolean, String => String) =
- ctx.annotations.collectFirst { case jsonMemberNames(format) => format }
- .orElse(Some(config.fieldNameMapping))
- .filter(_ != IdentityFormat)
- .map(true -> _)
- .getOrElse(false -> identity _)
-
+ val nameTransform =
+ ctx.annotations.collectFirst { case jsonMemberNames(format) => format }.getOrElse(config.fieldNameMapping)
val no_extra = ctx.annotations.collectFirst { case _: jsonNoExtraFields =>
()
}.isDefined || !config.allowExtraFields
-
- if (ctx.parameters.isEmpty)
- new JsonDecoder[A] {
- def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
- if (no_extra) {
- Lexer.char(trace, in, '{')
- Lexer.char(trace, in, '}')
- } else {
- Lexer.skipValue(trace, in)
- }
- ctx.rawConstruct(Nil)
- }
-
- override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
- json match {
- case Json.Obj(_) => ctx.rawConstruct(Nil)
- case Json.Null => ctx.rawConstruct(Nil)
- case _ => throw UnsafeJson(JsonError.Message("Not an object") :: trace)
- }
- }
- else
- new JsonDecoder[A] {
- val (names, aliases): (Array[String], Array[(String, Int)]) = {
- val names = Array.ofDim[String](ctx.parameters.size)
- val aliasesBuilder = Array.newBuilder[(String, Int)]
- ctx.parameters.zipWithIndex.foreach { case (p, i) =>
- names(i) = p.annotations.collectFirst { case jsonField(name) => name }
- .getOrElse(if (transformNames) nameTransform(p.label) else p.label)
+ if (ctx.parameters.isEmpty) new CaseObjectDecoder(ctx, no_extra)
+ else {
+ var splitIndex = -1
+ val (names, aliases): (Array[String], Array[(String, Int)]) = {
+ val names = new Array[String](ctx.parameters.size)
+ val aliasesBuilder = new ArrayBuffer[(String, Int)]
+ ctx.parameters.foreach {
+ var idx = 0
+ p =>
+ names(idx) = p.annotations.collectFirst { case jsonField(name) => name }.getOrElse(nameTransform(p.label))
aliasesBuilder ++= p.annotations.flatMap {
- case jsonAliases(alias, aliases @ _*) => (alias +: aliases).map(_ -> i)
+ case jsonAliases(alias, aliases @ _*) => (alias +: aliases).map(_ -> idx)
case _ => Seq.empty
}
- }
- val aliases = aliasesBuilder.result()
-
- val allFieldNames = names ++ aliases.map(_._1)
- if (allFieldNames.length != allFieldNames.distinct.length) {
- val aliasNames = aliases.map(_._1)
- val collisions = aliasNames
- .filter(alias => names.contains(alias) || aliases.count { case (a, _) => a == alias } > 1)
- .distinct
- val msg = s"Field names and aliases in case class ${ctx.typeName.full} must be distinct, " +
- s"alias(es) ${collisions.mkString(",")} collide with a field or another alias"
- throw new AssertionError(msg)
- }
-
- (names, aliases)
+ idx += 1
+ if (splitIndex < 0 && idx + aliasesBuilder.length > 64) splitIndex = idx - 1
+ }
+ val aliases = aliasesBuilder.toArray
+ val allFieldNames = names ++ aliases.map(_._1)
+ if (allFieldNames.length != allFieldNames.distinct.length) {
+ val typeName = ctx.typeName.full
+ val collisions = aliases
+ .map(_._1)
+ .distinct
+ .filter(alias => names.contains(alias) || aliases.count(_._1 == alias) > 1)
+ .mkString(",")
+ throw new AssertionError(
+ s"Field names and aliases in case class $typeName must be distinct, alias(es) $collisions collide with a field or another alias"
+ )
}
+ (names, aliases)
+ }
+ if (splitIndex < 0) {
+ new CollectionJsonDecoder[A] {
+ private[this] val len = names.length
+ private[this] val matrix = new StringMatrix(names, aliases)
+ private[this] val spans = names.map(JsonError.ObjectAccess)
+ private[this] val defaults = ctx.parameters.map(_.evaluateDefault.orNull).toArray
+ private[this] lazy val tcs = ctx.parameters.map(_.typeclass).toArray.asInstanceOf[Array[JsonDecoder[Any]]]
+ private[this] lazy val namesMap = (names.zipWithIndex ++ aliases).toMap
+ private[this] val explicitEmptyCollections =
+ ctx.annotations.collectFirst { case a: jsonExplicitEmptyCollections =>
+ a.decoding
+ }.getOrElse(config.explicitEmptyCollections.decoding)
+ private[this] val missingValueDecoder =
+ if (explicitEmptyCollections) {
+ lazy val missingValueDecoders = tcs.map { d =>
+ if (allowMissingValueDecoder(d)) d
+ else null
+ }
+ (idx: Int, trace: List[JsonError]) => {
+ val trace_ = spans(idx) :: trace
+ val decoder = missingValueDecoders(idx)
+ if (decoder eq null) Lexer.error("missing", trace_)
+ decoder.unsafeDecodeMissing(trace_)
+ }
+ } else { (idx: Int, trace: List[JsonError]) =>
+ tcs(idx).unsafeDecodeMissing(spans(idx) :: trace)
+ }
+
+ @tailrec
+ private[this] def allowMissingValueDecoder(d: JsonDecoder[_]): Boolean = d match {
+ case _: OptionJsonDecoder[_] => true
+ case _: CollectionJsonDecoder[_] => !explicitEmptyCollections
+ case d: MappedJsonDecoder[_] => allowMissingValueDecoder(d.underlying)
+ case _ => true
+ }
- val len: Int = names.length
- val matrix: StringMatrix = new StringMatrix(names, aliases)
- val spans: Array[JsonError] = names.map(JsonError.ObjectAccess(_))
- lazy val tcs: Array[JsonDecoder[Any]] =
- ctx.parameters.map(_.typeclass).toArray.asInstanceOf[Array[JsonDecoder[Any]]]
- lazy val defaults: Array[Option[Any]] =
- ctx.parameters.map(_.default).toArray
- lazy val namesMap: Map[String, Int] =
- (names.zipWithIndex ++ aliases).toMap
-
- def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
- Lexer.char(trace, in, '{')
-
- // TODO it would be more efficient to have a solution that didn't box
- // primitives, but Magnolia does not expose an API for that. Adding
- // such a feature to Magnolia is the only way to avoid this, e.g. a
- // ctx.createMutableCons that specialises on the types (with some way
- // of noting that things have been initialised), which can be called
- // to instantiate the case class. Would also require JsonDecoder to be
- // specialised.
- val ps: Array[Any] = Array.ofDim(len)
-
- if (Lexer.firstField(trace, in))
- do {
- var trace_ = trace
- val field = Lexer.field(trace, in, matrix)
- if (field != -1) {
- trace_ = spans(field) :: trace
- if (ps(field) != null)
- throw UnsafeJson(JsonError.Message("duplicate") :: trace)
- if (defaults(field).isDefined) {
- val opt = JsonDecoder.option(tcs(field)).unsafeDecode(trace_, in)
- ps(field) = opt.getOrElse(defaults(field).get)
- } else
- ps(field) = tcs(field).unsafeDecode(trace_, in)
- } else if (no_extra) {
- throw UnsafeJson(
- JsonError.Message(s"invalid extra field") :: trace
- )
- } else
- Lexer.skipValue(trace_, in)
- } while (Lexer.nextField(trace, in))
-
- var i = 0
- while (i < len) {
- if (ps(i) == null) {
- if (defaults(i).isDefined)
- ps(i) = defaults(i).get
- else
- ps(i) = tcs(i).unsafeDecodeMissing(spans(i) :: trace)
+ override def unsafeDecodeMissing(trace: List[JsonError]): A = {
+ val ps = new Array[Any](len)
+ var idx = 0
+ while (idx < len) {
+ if (ps(idx) == null) {
+ val default = defaults(idx)
+ ps(idx) =
+ if (default ne null) default()
+ else missingValueDecoder(idx, trace)
+ }
+ idx += 1
}
- i += 1
+ ctx.rawConstruct(new ArraySeq(ps))
}
- ctx.rawConstruct(new ArraySeq(ps))
- }
+ override def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
+ Lexer.char(trace, in, '{')
- override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
- json match {
- case Json.Obj(fields) =>
- val ps: Array[Any] = Array.ofDim(len)
-
- if (aliases.nonEmpty) {
- val present = fields.map { case (key, _) => namesMap(key) }
- if (present.distinct.size != present.size) {
- throw UnsafeJson(
- JsonError.Message("duplicate") :: trace
- )
- }
+ // TODO it would be more efficient to have a solution that didn't box
+ // primitives, but Magnolia does not ealiasesxpose an API for that. Adding
+ // such a feature to Magnolia is the only way to avoid this, e.g. a
+ // ctx.createMutableCons that specialises on the types (with some way
+ // of noting that things have been initialised), which can be called
+ // to instantiate the case class. Would also require JsonDecoder to be
+ // specialised.
+ val ps = new Array[Any](len)
+ if (Lexer.firstField(trace, in)) {
+ do {
+ val idx = Lexer.field(trace, in, matrix)
+ if (idx >= 0) {
+ if (ps(idx) == null) {
+ val default = defaults(idx)
+ ps(idx) =
+ if (
+ (default eq null) || in.nextNonWhitespace() != 'n' && {
+ in.retract()
+ true
+ }
+ ) tcs(idx).unsafeDecode(spans(idx) :: trace, in)
+ else if (in.readChar() == 'u' && in.readChar() == 'l' && in.readChar() == 'l') default()
+ else Lexer.error("expected 'null'", spans(idx) :: trace)
+ } else Lexer.error("duplicate", trace)
+ } else if (no_extra) Lexer.error("invalid extra field", trace)
+ else Lexer.skipValue(trace, in)
+ } while (Lexer.nextField(trace, in))
+ }
+ var idx = 0
+ while (idx < len) {
+ if (ps(idx) == null) {
+ val default = defaults(idx)
+ ps(idx) =
+ if (default ne null) default()
+ else missingValueDecoder(idx, trace)
}
+ idx += 1
+ }
+ ctx.rawConstruct(new ArraySeq(ps))
+ }
- for ((key, value) <- fields) {
- namesMap.get(key) match {
- case Some(field) =>
- val trace_ = JsonError.ObjectAccess(key) :: trace
- if (defaults(field).isDefined) {
- val opt = JsonDecoder.option(tcs(field)).unsafeFromJsonAST(trace_, value)
- ps(field) = opt.getOrElse(defaults(field).get)
- } else {
- ps(field) = tcs(field).unsafeFromJsonAST(trace_, value)
- }
- case None =>
- if (no_extra) {
- throw UnsafeJson(
- JsonError.Message(s"invalid extra field") :: trace
- )
- }
+ override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
+ json match {
+ case o: Json.Obj =>
+ val ps = new Array[Any](len)
+ o.fields.foreach { kv =>
+ namesMap.get(kv._1) match {
+ case Some(idx) =>
+ if (ps(idx) != null) Lexer.error("duplicate", trace)
+ val default = defaults(idx)
+ ps(idx) =
+ if ((default ne null) && (kv._2 eq Json.Null)) default()
+ else tcs(idx).unsafeFromJsonAST(spans(idx) :: trace, kv._2)
+ case _ =>
+ if (no_extra) Lexer.error("invalid extra field", trace)
+ }
}
- }
-
- var i = 0
- while (i < len) {
- if (ps(i) == null) {
- if (defaults(i).isDefined) {
- ps(i) = defaults(i).get
- } else {
- ps(i) = tcs(i).unsafeDecodeMissing(JsonError.ObjectAccess(names(i)) :: trace)
+ var idx = 0
+ while (idx < len) {
+ if (ps(idx) == null) {
+ val default = defaults(idx)
+ ps(idx) =
+ if (default ne null) default()
+ else missingValueDecoder(idx, trace)
}
+ idx += 1
}
- i += 1
+ ctx.rawConstruct(new ArraySeq(ps))
+ case _ => Lexer.error("expected object", trace)
+ }
+ }
+ } else {
+ val (names1, names2) = names.splitAt(splitIndex)
+ val aliases1 = aliases.filter(kv => kv._2 <= splitIndex)
+ val aliases2 = aliases.collect {
+ case (k, v) if v > splitIndex =>
+ (k, v - splitIndex)
+ }
+ new CollectionJsonDecoder[A] {
+ private[this] val len = names.length
+ private[this] val matrix1 = new StringMatrix(names1, aliases1)
+ private[this] val matrix2 = new StringMatrix(names2, aliases2)
+ private[this] val spans = names.map(JsonError.ObjectAccess)
+ private[this] val defaults = ctx.parameters.map(_.evaluateDefault.orNull).toArray
+ private[this] lazy val tcs = ctx.parameters.map(_.typeclass).toArray.asInstanceOf[Array[JsonDecoder[Any]]]
+ private[this] lazy val namesMap = (names.zipWithIndex ++ aliases).toMap
+ private[this] val explicitEmptyCollections =
+ ctx.annotations.collectFirst { case a: jsonExplicitEmptyCollections =>
+ a.decoding
+ }.getOrElse(config.explicitEmptyCollections.decoding)
+ private[this] val missingValueDecoder =
+ if (explicitEmptyCollections) {
+ lazy val missingValueDecoders = tcs.map { d =>
+ if (allowMissingValueDecoder(d)) d
+ else null
+ }
+ (idx: Int, trace: List[JsonError]) => {
+ val trace_ = spans(idx) :: trace
+ val decoder = missingValueDecoders(idx)
+ if (decoder eq null) Lexer.error("missing", trace_)
+ decoder.unsafeDecodeMissing(trace_)
}
+ } else { (idx: Int, trace: List[JsonError]) =>
+ tcs(idx).unsafeDecodeMissing(spans(idx) :: trace)
+ }
- ctx.rawConstruct(new ArraySeq(ps))
+ @tailrec
+ private[this] def allowMissingValueDecoder(d: JsonDecoder[_]): Boolean = d match {
+ case _: OptionJsonDecoder[_] => true
+ case _: CollectionJsonDecoder[_] => !explicitEmptyCollections
+ case d: MappedJsonDecoder[_] => allowMissingValueDecoder(d.underlying)
+ case _ => true
+ }
- case _ => throw UnsafeJson(JsonError.Message("Not an object") :: trace)
+ override def unsafeDecodeMissing(trace: List[JsonError]): A = {
+ val ps = new Array[Any](len)
+ var idx = 0
+ while (idx < len) {
+ if (ps(idx) == null) {
+ val default = defaults(idx)
+ ps(idx) =
+ if (default ne null) default()
+ else missingValueDecoder(idx, trace)
+ }
+ idx += 1
+ }
+ ctx.rawConstruct(new ArraySeq(ps))
}
+
+ override def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
+ Lexer.char(trace, in, '{')
+
+ // TODO it would be more efficient to have a solution that didn't box
+ // primitives, but Magnolia does not ealiasesxpose an API for that. Adding
+ // such a feature to Magnolia is the only way to avoid this, e.g. a
+ // ctx.createMutableCons that specialises on the types (with some way
+ // of noting that things have been initialised), which can be called
+ // to instantiate the case class. Would also require JsonDecoder to be
+ // specialised.
+ val ps = new Array[Any](len)
+ if (Lexer.firstField(trace, in)) {
+ do {
+ val idx = Lexer.field128(trace, in, matrix1, matrix2)
+ if (idx >= 0) {
+ if (ps(idx) == null) {
+ val default = defaults(idx)
+ ps(idx) =
+ if (
+ (default eq null) || in.nextNonWhitespace() != 'n' && {
+ in.retract()
+ true
+ }
+ ) tcs(idx).unsafeDecode(spans(idx) :: trace, in)
+ else if (in.readChar() == 'u' && in.readChar() == 'l' && in.readChar() == 'l') default()
+ else Lexer.error("expected 'null'", spans(idx) :: trace)
+ } else Lexer.error("duplicate", trace)
+ } else if (no_extra) Lexer.error("invalid extra field", trace)
+ else Lexer.skipValue(trace, in)
+ } while (Lexer.nextField(trace, in))
+ }
+ var idx = 0
+ while (idx < len) {
+ if (ps(idx) == null) {
+ val default = defaults(idx)
+ ps(idx) =
+ if (default ne null) default()
+ else missingValueDecoder(idx, trace)
+ }
+ idx += 1
+ }
+ ctx.rawConstruct(new ArraySeq(ps))
+ }
+
+ override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
+ json match {
+ case o: Json.Obj =>
+ val ps = new Array[Any](len)
+ o.fields.foreach { kv =>
+ namesMap.get(kv._1) match {
+ case Some(idx) =>
+ if (ps(idx) != null) Lexer.error("duplicate", trace)
+ val default = defaults(idx)
+ ps(idx) =
+ if ((default ne null) && (kv._2 eq Json.Null)) default()
+ else tcs(idx).unsafeFromJsonAST(spans(idx) :: trace, kv._2)
+ case _ =>
+ if (no_extra) Lexer.error("invalid extra field", trace)
+ }
+ }
+ var idx = 0
+ while (idx < len) {
+ if (ps(idx) == null) {
+ val default = defaults(idx)
+ ps(idx) =
+ if (default ne null) default()
+ else missingValueDecoder(idx, trace)
+ }
+ idx += 1
+ }
+ ctx.rawConstruct(new ArraySeq(ps))
+ case _ => Lexer.error("expected object", trace)
+ }
+ }
}
+ }
}
def split[A](ctx: SealedTrait[JsonDecoder, A])(implicit config: JsonCodecConfiguration): JsonDecoder[A] = {
- val names: Array[String] = ctx.subtypes.map { p =>
- p.annotations.collectFirst { case jsonHint(name) =>
- name
- }.getOrElse(p.typeName.short)
+ val jsonHintFormat =
+ ctx.annotations.collectFirst { case jsonHintNames(format) => format }.getOrElse(config.sumTypeMapping)
+ val names = ctx.subtypes.map { p =>
+ p.annotations.collectFirst { case jsonHint(name) => name }.getOrElse(jsonHintFormat(p.typeName.short))
}.toArray
- val matrix: StringMatrix = new StringMatrix(names)
- lazy val tcs: Array[JsonDecoder[Any]] =
- ctx.subtypes.map(_.typeclass).toArray.asInstanceOf[Array[JsonDecoder[Any]]]
- lazy val namesMap: Map[String, Int] = names.zipWithIndex.toMap
-
- def discrim =
+ if (names.distinct.length != names.length) {
+ val typeName = ctx.typeName.full
+ val collisions = names.groupBy(identity).collect { case (n, ns) if ns.lengthCompare(1) > 0 => n }.mkString(",")
+ throw new AssertionError(s"Case names in ADT $typeName must be distinct, name(s) $collisions are duplicated")
+ }
+ val (names1, names2) = names.splitAt(64)
+ val matrix1 = new StringMatrix(names1)
+ val matrix2 =
+ if (names2.isEmpty) null
+ else new StringMatrix(names2)
+ lazy val tcs = ctx.subtypes.map(_.typeclass).toArray.asInstanceOf[Array[JsonDecoder[Any]]]
+ lazy val namesMap = names.zipWithIndex.toMap
+ val discrim =
ctx.annotations.collectFirst { case jsonDiscriminator(n) => n }.orElse(config.sumTypeHandling.discriminatorField)
- if (discrim.isEmpty)
- new JsonDecoder[A] {
- val spans: Array[JsonError] = names.map(JsonError.ObjectAccess(_))
- def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
- Lexer.char(trace, in, '{')
- // we're not allowing extra fields in this encoding
- if (Lexer.firstField(trace, in)) {
- val field = Lexer.field(trace, in, matrix)
- if (field != -1) {
- val trace_ = spans(field) :: trace
- val a = tcs(field).unsafeDecode(trace_, in).asInstanceOf[A]
- Lexer.char(trace, in, '}')
- a
- } else
- throw UnsafeJson(
- JsonError.Message("invalid disambiguator") :: trace
- )
- } else
- throw UnsafeJson(
- JsonError.Message("expected non-empty object") :: trace
- )
- }
+ lazy val isEnumeration = config.enumValuesAsStrings &&
+ ctx.subtypes.forall(_.typeclass.isInstanceOf[CaseObjectDecoder[JsonDecoder, _]])
+ if (discrim.isEmpty && isEnumeration) {
+ if (names.length <= 64) {
+ new JsonDecoder[A] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
+ val idx = Lexer.enumeration(trace, in, matrix1)
+ if (idx >= 0) tcs(idx).asInstanceOf[CaseObjectDecoder[JsonDecoder, A]].ctx.rawConstruct(Nil)
+ else Lexer.error("invalid enumeration value", trace)
+ }
- override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
- json match {
- case Json.Obj(chunk) if chunk.size == 1 =>
- val (key, inner) = chunk.head
- namesMap.get(key) match {
- case Some(idx) =>
- tcs(idx).unsafeFromJsonAST(JsonError.ObjectAccess(key) :: trace, inner).asInstanceOf[A]
- case None => throw UnsafeJson(JsonError.Message("Invalid disambiguator") :: trace)
- }
- case Json.Obj(_) => throw UnsafeJson(JsonError.Message("Not an object with a single field") :: trace)
- case _ => throw UnsafeJson(JsonError.Message("Not an object") :: trace)
+ override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
+ json match {
+ case s: Json.Str =>
+ namesMap.get(s.value) match {
+ case Some(idx) => tcs(idx).asInstanceOf[CaseObjectDecoder[JsonDecoder, A]].ctx.rawConstruct(Nil)
+ case _ => Lexer.error("invalid enumeration value", trace)
+ }
+ case _ => Lexer.error("expected string", trace)
+ }
+ }
+ } else {
+ new JsonDecoder[A] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
+ val idx = Lexer.enumeration128(trace, in, matrix1, matrix2)
+ if (idx >= 0) tcs(idx).asInstanceOf[CaseObjectDecoder[JsonDecoder, A]].ctx.rawConstruct(Nil)
+ else Lexer.error("invalid enumeration value", trace)
}
+
+ override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
+ json match {
+ case s: Json.Str =>
+ namesMap.get(s.value) match {
+ case Some(idx) => tcs(idx).asInstanceOf[CaseObjectDecoder[JsonDecoder, A]].ctx.rawConstruct(Nil)
+ case _ => Lexer.error("invalid enumeration value", trace)
+ }
+ case _ => Lexer.error("expected string", trace)
+ }
+ }
}
- else
- new JsonDecoder[A] {
- val hintfield = discrim.get
- val hintmatrix = new StringMatrix(Array(hintfield))
- val spans: Array[JsonError] = names.map(JsonError.Message(_))
-
- def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
- val in_ = internal.RecordingReader(in)
- Lexer.char(trace, in_, '{')
- if (Lexer.firstField(trace, in_))
- do {
- if (Lexer.field(trace, in_, hintmatrix) != -1) {
- val field = Lexer.enumeration(trace, in_, matrix)
- if (field == -1)
- throw UnsafeJson(
- JsonError.Message(s"invalid disambiguator") :: trace
- )
- in_.rewind()
- val trace_ = spans(field) :: trace
- return tcs(field).unsafeDecode(trace_, in_).asInstanceOf[A]
- } else
- Lexer.skipValue(trace, in_)
- } while (Lexer.nextField(trace, in_))
-
- throw UnsafeJson(
- JsonError.Message(s"missing hint '$hintfield'") :: trace
- )
+ } else if (discrim.isEmpty) {
+ // We're not allowing extra fields in this encoding
+ if (names.length <= 64) {
+ new JsonDecoder[A] {
+ private[this] val spans = names.map(JsonError.ObjectAccess)
+
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
+ Lexer.char(trace, in, '{')
+ if (Lexer.firstField(trace, in)) {
+ val idx = Lexer.field(trace, in, matrix1)
+ if (idx >= 0) {
+ val a = tcs(idx).unsafeDecode(spans(idx) :: trace, in).asInstanceOf[A]
+ Lexer.char(trace, in, '}')
+ a
+ } else Lexer.error("invalid disambiguator", trace)
+ } else Lexer.error("expected non-empty object", trace)
+ }
+
+ override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
+ json match {
+ case o: Json.Obj if o.fields.length == 1 =>
+ val kv = o.fields(0)
+ namesMap.get(kv._1) match {
+ case Some(idx) => tcs(idx).unsafeFromJsonAST(spans(idx) :: trace, kv._2).asInstanceOf[A]
+ case _ => Lexer.error("invalid disambiguator", trace)
+ }
+ case _ => Lexer.error("expected single field object", trace)
+ }
}
+ } else {
+ new JsonDecoder[A] {
+ private[this] val spans = names.map(JsonError.ObjectAccess)
- override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
- json match {
- case Json.Obj(fields) =>
- fields.find { case (k, _) => k == hintfield } match {
- case Some((_, Json.Str(name))) =>
- namesMap.get(name) match {
- case Some(idx) => tcs(idx).unsafeFromJsonAST(trace, json).asInstanceOf[A]
- case None => throw UnsafeJson(JsonError.Message("Invalid disambiguator") :: trace)
- }
- case Some(_) =>
- throw UnsafeJson(JsonError.Message(s"Non-string hint '$hintfield'") :: trace)
- case None =>
- throw UnsafeJson(JsonError.Message(s"Missing hint '$hintfield'") :: trace)
- }
- case _ => throw UnsafeJson(JsonError.Message("Not an object") :: trace)
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
+ Lexer.char(trace, in, '{')
+ if (Lexer.firstField(trace, in)) {
+ val idx = Lexer.field128(trace, in, matrix1, matrix2)
+ if (idx >= 0) {
+ val a = tcs(idx).unsafeDecode(spans(idx) :: trace, in).asInstanceOf[A]
+ Lexer.char(trace, in, '}')
+ a
+ } else Lexer.error("invalid disambiguator", trace)
+ } else Lexer.error("expected non-empty object", trace)
}
+
+ override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
+ json match {
+ case o: Json.Obj if o.fields.length == 1 =>
+ val kv = o.fields(0)
+ namesMap.get(kv._1) match {
+ case Some(idx) => tcs(idx).unsafeFromJsonAST(spans(idx) :: trace, kv._2).asInstanceOf[A]
+ case _ => Lexer.error("invalid disambiguator", trace)
+ }
+ case _ => Lexer.error("expected single field object", trace)
+ }
+ }
}
+ } else {
+ if (names.length <= 64) {
+ new JsonDecoder[A] {
+ private[this] val hintfield = discrim.get
+ private[this] val hintmatrix = new StringMatrix(Array(hintfield))
+ private[this] val spans = names.map(JsonError.Message)
+
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
+ val in_ = RecordingReader(in)
+ Lexer.char(trace, in_, '{')
+ if (Lexer.firstField(trace, in_)) {
+ do {
+ if (Lexer.field(trace, in_, hintmatrix) >= 0) {
+ val idx = Lexer.enumeration(trace, in_, matrix1)
+ if (idx >= 0) {
+ in_.rewind()
+ return tcs(idx).unsafeDecode(spans(idx) :: trace, in_).asInstanceOf[A]
+ } else Lexer.error("invalid disambiguator", trace)
+ } else Lexer.skipValue(trace, in_)
+ } while (Lexer.nextField(trace, in_))
+ }
+ Lexer.error(s"missing hint '$hintfield'", trace)
+ }
+
+ override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
+ json match {
+ case o: Json.Obj =>
+ o.fields.collectFirst {
+ case kv if kv._1 == hintfield && kv._2.isInstanceOf[Json.Str] =>
+ kv._2.asInstanceOf[Json.Str].value
+ } match {
+ case Some(name) =>
+ namesMap.get(name) match {
+ case Some(idx) => tcs(idx).unsafeFromJsonAST(spans(idx) :: trace, json).asInstanceOf[A]
+ case _ => Lexer.error("invalid disambiguator", trace)
+ }
+ case _ => Lexer.error(s"missing hint '$hintfield'", trace)
+ }
+ case _ => Lexer.error("expected object", trace)
+ }
+ }
+ } else {
+ new JsonDecoder[A] {
+ private[this] val hintfield = discrim.get
+ private[this] val hintmatrix = new StringMatrix(Array(hintfield))
+ private[this] val spans = names.map(JsonError.Message)
+
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
+ val in_ = RecordingReader(in)
+ Lexer.char(trace, in_, '{')
+ if (Lexer.firstField(trace, in_)) {
+ do {
+ if (Lexer.field(trace, in_, hintmatrix) >= 0) {
+ val idx = Lexer.enumeration128(trace, in_, matrix1, matrix2)
+ if (idx >= 0) {
+ in_.rewind()
+ return tcs(idx).unsafeDecode(spans(idx) :: trace, in_).asInstanceOf[A]
+ } else Lexer.error("invalid disambiguator", trace)
+ } else Lexer.skipValue(trace, in_)
+ } while (Lexer.nextField(trace, in_))
+ }
+ Lexer.error(s"missing hint '$hintfield'", trace)
+ }
+
+ override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
+ json match {
+ case o: Json.Obj =>
+ o.fields.collectFirst {
+ case kv if kv._1 == hintfield && kv._2.isInstanceOf[Json.Str] =>
+ kv._2.asInstanceOf[Json.Str].value
+ } match {
+ case Some(name) =>
+ namesMap.get(name) match {
+ case Some(idx) => tcs(idx).unsafeFromJsonAST(spans(idx) :: trace, json).asInstanceOf[A]
+ case _ => Lexer.error("invalid disambiguator", trace)
+ }
+ case _ => Lexer.error(s"missing hint '$hintfield'", trace)
+ }
+ case _ => Lexer.error("expected object", trace)
+ }
+ }
+ }
+ }
}
def gen[A]: JsonDecoder[A] = macro Magnolia.gen[A]
}
object DeriveJsonEncoder {
+ private lazy val caseObjectEncoder = new JsonEncoder[Any] {
+ override def isEmpty(a: Any): Boolean = true
+
+ def unsafeEncode(a: Any, indent: Option[Int], out: Write): Unit = out.write("{}")
+
+ override final def toJsonAST(a: Any): Either[String, Json] = new Right(Json.Obj.empty)
+ }
+
type Typeclass[A] = JsonEncoder[A]
def join[A](ctx: CaseClass[JsonEncoder, A])(implicit config: JsonCodecConfiguration): JsonEncoder[A] =
- if (ctx.parameters.isEmpty)
- new JsonEncoder[A] {
- def unsafeEncode(a: A, indent: Option[Int], out: Write): Unit = out.write("{}")
-
- override final def toJsonAST(a: A): Either[String, Json] =
- Right(Json.Obj(Chunk.empty))
- }
- else
+ if (ctx.parameters.isEmpty) caseObjectEncoder.narrow[A]
+ else {
+ val nameTransform =
+ ctx.annotations.collectFirst { case jsonMemberNames(format) => format }.getOrElse(config.fieldNameMapping)
+ val explicitNulls = config.explicitNulls || ctx.annotations.exists(_.isInstanceOf[jsonExplicitNull])
+ val explicitEmptyCollections = ctx.annotations.collectFirst { case a: jsonExplicitEmptyCollections => a.encoding }
+ .getOrElse(config.explicitEmptyCollections.encoding)
+ val params = ctx.parameters.filter(p => p.annotations.collectFirst { case _: jsonExclude => () }.isEmpty).toArray
new JsonEncoder[A] {
- val (transformNames, nameTransform): (Boolean, String => String) =
- ctx.annotations.collectFirst { case jsonMemberNames(format) => format }
- .orElse(Some(config.fieldNameMapping))
- .filter(_ != IdentityFormat)
- .map(true -> _)
- .getOrElse(false -> identity)
-
- val params = ctx.parameters
- .filter(p => p.annotations.collectFirst { case _: jsonExclude => () }.isEmpty)
- .toArray
-
- val names: Array[String] = params.map { p =>
- p.annotations.collectFirst { case jsonField(name) =>
- name
- }.getOrElse(if (transformNames) nameTransform(p.label) else p.label)
+ private[this] lazy val fields = params.map { p =>
+ FieldEncoder(
+ p = p,
+ name = p.annotations.collectFirst { case jsonField(name) => name }.getOrElse(nameTransform(p.label)),
+ encoder = p.typeclass.asInstanceOf[JsonEncoder[Any]],
+ withExplicitNulls = explicitNulls || p.annotations.exists(_.isInstanceOf[jsonExplicitNull]),
+ withExplicitEmptyCollections = p.annotations.collectFirst { case a: jsonExplicitEmptyCollections =>
+ a.encoding
+ }.getOrElse(explicitEmptyCollections)
+ )
}
- lazy val tcs: Array[JsonEncoder[Any]] = params.map(p => p.typeclass.asInstanceOf[JsonEncoder[Any]])
- val len: Int = params.length
+
def unsafeEncode(a: A, indent: Option[Int], out: Write): Unit = {
- var i = 0
- out.write("{")
+ out.write('{')
val indent_ = JsonEncoder.bump(indent)
- JsonEncoder.pad(indent_, out)
-
- var prevFields = false // whether any fields have been written
- while (i < len) {
- val tc = tcs(i)
- val p = params(i).dereference(a)
- if (!tc.isNothing(p)) {
- // if we have at least one field already, we need a comma
- if (prevFields) {
- if (indent.isEmpty) out.write(",")
- else {
- out.write(",")
- JsonEncoder.pad(indent_, out)
- }
- }
- JsonEncoder.string.unsafeEncode(names(i), indent_, out)
- if (indent.isEmpty) out.write(":")
- else out.write(" : ")
- tc.unsafeEncode(p, indent_, out)
- prevFields = true // record that we have at least one field so far
+ val fields = this.fields
+ var idx = 0
+ var comma = false
+ while (idx < fields.length) {
+ val field = fields(idx)
+ idx += 1
+ val p = field.p.dereference(a)
+ if (field.skip(p)) ()
+ else {
+ if (comma) out.write(',')
+ else comma = true
+ JsonEncoder.pad(indent_, out)
+ out.write(if (indent eq None) field.encodedName else field.prettyEncodedName)
+ field.encoder.unsafeEncode(p, indent_, out)
}
- i += 1
}
JsonEncoder.pad(indent, out)
- out.write("}")
+ out.write('}')
}
- override final def toJsonAST(a: A): Either[String, Json] =
- ctx.parameters
- .foldLeft[Either[String, Chunk[(String, Json)]]](Right(Chunk.empty)) { case (c, param) =>
- val name = param.annotations.collectFirst { case jsonField(name) =>
- name
- }.getOrElse(nameTransform(param.label))
- c.flatMap { chunk =>
- param.typeclass.toJsonAST(param.dereference(a)).map { value =>
- if (value == Json.Null) chunk
- else chunk :+ name -> value
- }
+ override final def toJsonAST(a: A): Either[String, Json] = {
+ val fields = this.fields
+ var buf = new Array[(String, Json)](fields.length)
+ var i, idx = 0
+ while (idx < fields.length) {
+ val field = fields(idx)
+ idx += 1
+ val p = field.p.dereference(a)
+ if (field.skip(p)) ()
+ else {
+ field.encoder.toJsonAST(p) match {
+ case Right(value) =>
+ buf(i) = (field.name, value)
+ i += 1
+ case left =>
+ return left
}
}
- .map(Json.Obj.apply)
+ }
+ if (i != buf.length) buf = java.util.Arrays.copyOf(buf, i)
+ new Right(Json.Obj(Chunk.fromArray(buf)))
+ }
}
+ }
def split[A](ctx: SealedTrait[JsonEncoder, A])(implicit config: JsonCodecConfiguration): JsonEncoder[A] = {
+ val jsonHintFormat: JsonMemberFormat =
+ ctx.annotations.collectFirst { case jsonHintNames(format) => format }.getOrElse(config.sumTypeMapping)
val names: Array[String] = ctx.subtypes.map { p =>
- p.annotations.collectFirst { case jsonHint(name) =>
- name
- }.getOrElse(p.typeName.short)
+ p.annotations.collectFirst { case jsonHint(name) => name }.getOrElse(jsonHintFormat(p.typeName.short))
}.toArray
- def discrim =
+ val encodedNames: Array[String] = names.map(name => JsonEncoder.string.encodeJson(name, None).toString)
+ lazy val tcs = ctx.subtypes.map(_.typeclass).toArray.asInstanceOf[Array[JsonEncoder[Any]]]
+ val discrim =
ctx.annotations.collectFirst { case jsonDiscriminator(n) => n }.orElse(config.sumTypeHandling.discriminatorField)
- if (discrim.isEmpty)
+ lazy val isEnumeration = config.enumValuesAsStrings &&
+ ctx.subtypes.forall(_.typeclass == caseObjectEncoder)
+ if (discrim.isEmpty && isEnumeration) {
+ new JsonEncoder[A] {
+ private[this] val casts = ctx.subtypes.map(_.cast).toArray
+
+ def unsafeEncode(a: A, indent: Option[Int], out: Write): Unit = {
+ var idx = 0
+ while (!casts(idx).isDefinedAt(a)) idx += 1
+ out.write(encodedNames(idx))
+ }
+
+ override final def toJsonAST(a: A): Either[String, Json] = {
+ var idx = 0
+ while (!casts(idx).isDefinedAt(a)) idx += 1
+ new Right(new Json.Str(names(idx)))
+ }
+ }
+ } else if (discrim.isEmpty) {
new JsonEncoder[A] {
- def unsafeEncode(a: A, indent: Option[Int], out: Write): Unit = ctx.split(a) { sub =>
- out.write("{")
+ private[this] val casts = ctx.subtypes.map(_.cast).toArray
+
+ def unsafeEncode(a: A, indent: Option[Int], out: Write): Unit = {
+ var idx = 0
+ while (!casts(idx).isDefinedAt(a)) idx += 1
+ out.write('{')
val indent_ = JsonEncoder.bump(indent)
JsonEncoder.pad(indent_, out)
- JsonEncoder.string.unsafeEncode(names(sub.index), indent_, out)
- if (indent.isEmpty) out.write(":")
+ out.write(encodedNames(idx))
+ if (indent eq None) out.write(':')
else out.write(" : ")
- sub.typeclass.unsafeEncode(sub.cast(a), indent_, out)
+ tcs(idx).unsafeEncode(a, indent_, out)
JsonEncoder.pad(indent, out)
- out.write("}")
+ out.write('}')
}
- override def toJsonAST(a: A): Either[String, Json] =
- ctx.split(a) { sub =>
- sub.typeclass.toJsonAST(sub.cast(a)).map { inner =>
- Json.Obj(
- Chunk(
- names(sub.index) -> inner
- )
- )
- }
- }
+ override def toJsonAST(a: A): Either[String, Json] = {
+ var idx = 0
+ while (!casts(idx).isDefinedAt(a)) idx += 1
+ tcs(idx).toJsonAST(a).map(inner => new Json.Obj(Chunk(names(idx) -> inner)))
+ }
}
- else
+ } else {
new JsonEncoder[A] {
- val hintfield = discrim.get
- def unsafeEncode(a: A, indent: Option[Int], out: Write): Unit = ctx.split(a) { sub =>
- out.write("{")
+ private[this] val casts = ctx.subtypes.map(_.cast).toArray
+ private[this] val hintFieldName = discrim.get
+ private[this] val encodedHintFieldName = JsonEncoder.string.encodeJson(hintFieldName, None).toString
+
+ def unsafeEncode(a: A, indent: Option[Int], out: Write): Unit = {
+ var idx = 0
+ while (!casts(idx).isDefinedAt(a)) idx += 1
+ out.write('{')
val indent_ = JsonEncoder.bump(indent)
JsonEncoder.pad(indent_, out)
- JsonEncoder.string.unsafeEncode(hintfield, indent_, out)
- if (indent.isEmpty) out.write(":")
+ out.write(encodedHintFieldName)
+ if (indent eq None) out.write(':')
else out.write(" : ")
- JsonEncoder.string.unsafeEncode(names(sub.index), indent_, out)
-
+ out.write(encodedNames(idx))
// whitespace is always off by 2 spaces at the end, probably not worth fixing
- val intermediate = new NestedWriter(out, indent_)
- sub.typeclass.unsafeEncode(sub.cast(a), indent, intermediate)
+ tcs(idx).unsafeEncode(a, indent, new NestedWriter(out, indent_))
}
- override def toJsonAST(a: A): Either[String, Json] =
- ctx.split(a) { sub =>
- sub.typeclass.toJsonAST(sub.cast(a)).flatMap {
- case Json.Obj(fields) => Right(Json.Obj(fields :+ hintfield -> Json.Str(names(sub.index))))
- case _ => Left("Subtype is not encoded as an object")
- }
+ override final def toJsonAST(a: A): Either[String, Json] = {
+ var idx = 0
+ while (!casts(idx).isDefinedAt(a)) idx += 1
+ tcs(idx).toJsonAST(a).flatMap {
+ case o: Json.Obj =>
+ val hintField = hintFieldName -> new Json.Str(names(idx))
+ new Right(new Json.Obj(hintField +: o.fields)) // hint field is always first
+ case _ =>
+ new Left("expected object")
}
+ }
}
-
+ }
}
def gen[A]: JsonEncoder[A] = macro Magnolia.gen[A]
@@ -660,35 +895,159 @@ object DeriveJsonEncoder {
// backcompat for 2.12, otherwise we'd use ArraySeq.unsafeWrapArray
private final class ArraySeq(p: Array[Any]) extends IndexedSeq[Any] {
- def apply(i: Int): Any = p(i)
- def length: Int = p.length
+ @inline def apply(i: Int): Any = p(i)
+ @inline def length: Int = p.length
}
// intercepts the first `{` of a nested writer and discards it. We also need to
// inject a `,` unless an empty object `{}` has been written.
private[this] final class NestedWriter(out: Write, indent: Option[Int]) extends Write {
- private[this] var first, second = true
-
- def write(c: Char): Unit = write(c.toString) // could be optimised
-
- def write(s: String): Unit =
- if (first || second) {
- var i = 0
- while (i < s.length) {
- val c = s.charAt(i)
- if (c == ' ' || c == '\n') {} else if (first && c == '{') {
- first = false
- } else if (second) {
- second = false
+ private[this] var state = 2
+
+ @inline def write(c: Char): Unit =
+ if (state == 0) out.write(c)
+ else nonZeroStateWrite(c)
+
+ @noinline private[this] def nonZeroStateWrite(c: Char): Unit =
+ if (c != ' ' && c != '\n') {
+ if (state == 2) {
+ if (c == '{') state = 1
+ } else {
+ state = 0
+ if (c != '}') {
+ out.write(',')
+ JsonEncoder.pad(indent, out)
+ }
+ out.write(c)
+ }
+ }
+
+ @inline def write(s: String): Unit =
+ if (state == 0) out.write(s)
+ else nonZeroStateWrite(s)
+
+ @noinline private[this] def nonZeroStateWrite(s: String): Unit = {
+ var i = 0
+ while (i < s.length) {
+ val c = s.charAt(i)
+ if (c != ' ' && c != '\n') {
+ if (state == 2) {
+ if (c == '{') state = 1
+ } else {
+ state = 0
if (c != '}') {
out.write(',')
JsonEncoder.pad(indent, out)
}
- return out.write(s.substring(i))
+ while (i < s.length) {
+ out.write(s.charAt(i))
+ i += 1
+ }
+ return
+ }
+ }
+ i += 1
+ }
+ }
+
+ @inline override def write(cs: Array[Char], from: Int, to: Int): Unit =
+ if (state == 0) out.write(cs, from, to)
+ else nonZeroStateWrite(cs, from, to)
+
+ @noinline def nonZeroStateWrite(cs: Array[Char], from: Int, to: Int): Unit = {
+ var i = from
+ while (i < to) {
+ val c = cs(i)
+ if (c != ' ' && c != '\n') {
+ if (state == 2) {
+ if (c == '{') state = 1
+ } else {
+ state = 0
+ if (c != '}') {
+ out.write(',')
+ JsonEncoder.pad(indent, out)
+ }
+ out.write(cs, i, to)
+ return
}
- i += 1
}
- } else out.write(s)
+ i += 1
+ }
+ }
+
+ @inline override def write(c1: Char, c2: Char): Unit =
+ if (state == 0) out.write(c1, c2)
+ else {
+ nonZeroStateWrite(c1)
+ nonZeroStateWrite(c2)
+ }
+
+ @inline override def write(c1: Char, c2: Char, c3: Char): Unit =
+ if (state == 0) out.write(c1, c2, c3)
+ else {
+ nonZeroStateWrite(c1)
+ nonZeroStateWrite(c2)
+ nonZeroStateWrite(c3)
+ }
+
+ @inline override def write(c1: Char, c2: Char, c3: Char, c4: Char): Unit =
+ if (state == 0) out.write(c1, c2, c3, c4)
+ else {
+ nonZeroStateWrite(c1)
+ nonZeroStateWrite(c2)
+ nonZeroStateWrite(c3)
+ nonZeroStateWrite(c4)
+ }
+
+ @inline override def write(c1: Char, c2: Char, c3: Char, c4: Char, c5: Char): Unit =
+ if (state == 0) out.write(c1, c2, c3, c4, c5)
+ else {
+ nonZeroStateWrite(c1)
+ nonZeroStateWrite(c2)
+ nonZeroStateWrite(c3)
+ nonZeroStateWrite(c4)
+ nonZeroStateWrite(c5)
+ }
+
+ @inline override def write(s: Short): Unit =
+ if (state == 0) out.write(s)
+ else {
+ nonZeroStateWrite((s & 0xff).toChar)
+ nonZeroStateWrite((s >> 8).toChar)
+ }
+
+ @inline override def write(s1: Short, s2: Short): Unit =
+ if (state == 0) out.write(s1, s2)
+ else {
+ nonZeroStateWrite((s1 & 0xff).toChar)
+ nonZeroStateWrite((s1 >> 8).toChar)
+ nonZeroStateWrite((s2 & 0xff).toChar)
+ nonZeroStateWrite((s2 >> 8).toChar)
+ }
+
+ @inline override def write(s1: Short, s2: Short, s3: Short): Unit =
+ if (state == 0) out.write(s1, s2, s3)
+ else {
+ nonZeroStateWrite((s1 & 0xff).toChar)
+ nonZeroStateWrite((s1 >> 8).toChar)
+ nonZeroStateWrite((s2 & 0xff).toChar)
+ nonZeroStateWrite((s2 >> 8).toChar)
+ nonZeroStateWrite((s3 & 0xff).toChar)
+ nonZeroStateWrite((s3 >> 8).toChar)
+ }
+
+ @inline override def write(s1: Short, s2: Short, s3: Short, s4: Short): Unit =
+ if (state == 0) out.write(s1, s2, s3, s4)
+ else {
+ nonZeroStateWrite((s1 & 0xff).toChar)
+ nonZeroStateWrite((s1 >> 8).toChar)
+ nonZeroStateWrite((s2 & 0xff).toChar)
+ nonZeroStateWrite((s2 >> 8).toChar)
+ nonZeroStateWrite((s3 & 0xff).toChar)
+ nonZeroStateWrite((s3 >> 8).toChar)
+ nonZeroStateWrite((s4 & 0xff).toChar)
+ nonZeroStateWrite((s4 >> 8).toChar)
+ }
}
object DeriveJsonCodec {
diff --git a/zio-json/shared/src/main/scala-3/zio/json/JsonCodecConfiguration.scala b/zio-json/shared/src/main/scala-3/zio/json/JsonCodecConfiguration.scala
new file mode 100644
index 000000000..4765f3a1c
--- /dev/null
+++ b/zio-json/shared/src/main/scala-3/zio/json/JsonCodecConfiguration.scala
@@ -0,0 +1,188 @@
+package zio.json
+
+import zio.json.JsonCodecConfiguration.SumTypeHandling
+import zio.json.JsonCodecConfiguration.SumTypeHandling.WrapperWithClassNameField
+
+/**
+ * When disabled for decoding, keys with empty collections will be omitted from the JSON. When disabled for encoding,
+ * missing keys will default to empty collections.
+ */
+case class ExplicitEmptyCollections(encoding: Boolean = true, decoding: Boolean = true)
+
+/**
+ * Implicit codec derivation configuration.
+ *
+ * @param sumTypeHandling
+ * see [[jsonDiscriminator]]
+ * @param fieldNameMapping
+ * see [[jsonMemberNames]]
+ * @param allowExtraFields
+ * see [[jsonNoExtraFields]]
+ * @param sumTypeMapping
+ * see [[jsonHintNames]]
+ * @param explicitNulls
+ * turns on explicit serialization of optional fields with None values
+ * @param explicitEmptyCollections
+ * turns on explicit serialization of fields with empty collections
+ * @param enumValuesAsStrings
+ * turns on serialization of enum values and sealed trait's case objects as strings
+ */
+final case class JsonCodecConfiguration(
+ sumTypeHandling: SumTypeHandling = WrapperWithClassNameField,
+ fieldNameMapping: JsonMemberFormat = IdentityFormat,
+ allowExtraFields: Boolean = true,
+ sumTypeMapping: JsonMemberFormat = IdentityFormat,
+ explicitNulls: Boolean = false,
+ explicitEmptyCollections: ExplicitEmptyCollections = ExplicitEmptyCollections(),
+ enumValuesAsStrings: Boolean = true
+) {
+ def this(
+ sumTypeHandling: SumTypeHandling,
+ fieldNameMapping: JsonMemberFormat,
+ allowExtraFields: Boolean,
+ sumTypeMapping: JsonMemberFormat,
+ explicitNulls: Boolean,
+ explicitEmptyCollections: ExplicitEmptyCollections
+ ) = this(
+ sumTypeHandling,
+ fieldNameMapping,
+ allowExtraFields,
+ sumTypeMapping,
+ explicitNulls,
+ explicitEmptyCollections,
+ true
+ )
+
+ def this(
+ sumTypeHandling: SumTypeHandling,
+ fieldNameMapping: JsonMemberFormat,
+ allowExtraFields: Boolean,
+ sumTypeMapping: JsonMemberFormat,
+ explicitNulls: Boolean
+ ) = this(
+ sumTypeHandling,
+ fieldNameMapping,
+ allowExtraFields,
+ sumTypeMapping,
+ explicitNulls,
+ ExplicitEmptyCollections(),
+ true
+ )
+
+ def copy(
+ sumTypeHandling: SumTypeHandling = WrapperWithClassNameField.asInstanceOf[SumTypeHandling],
+ fieldNameMapping: JsonMemberFormat = IdentityFormat.asInstanceOf[JsonMemberFormat],
+ allowExtraFields: Boolean = true,
+ sumTypeMapping: JsonMemberFormat = IdentityFormat.asInstanceOf[JsonMemberFormat],
+ explicitNulls: Boolean = false,
+ explicitEmptyCollections: ExplicitEmptyCollections = ExplicitEmptyCollections(),
+ enumValuesAsStrings: Boolean = true
+ ) = new JsonCodecConfiguration(
+ sumTypeHandling,
+ fieldNameMapping,
+ allowExtraFields,
+ sumTypeMapping,
+ explicitNulls,
+ explicitEmptyCollections,
+ enumValuesAsStrings
+ )
+
+ def copy(
+ sumTypeHandling: SumTypeHandling,
+ fieldNameMapping: JsonMemberFormat,
+ allowExtraFields: Boolean,
+ sumTypeMapping: JsonMemberFormat,
+ explicitNulls: Boolean,
+ explicitEmptyCollections: ExplicitEmptyCollections
+ ) = new JsonCodecConfiguration(
+ sumTypeHandling,
+ fieldNameMapping,
+ allowExtraFields,
+ sumTypeMapping,
+ explicitNulls,
+ explicitEmptyCollections,
+ this.enumValuesAsStrings
+ )
+
+ def copy(
+ sumTypeHandling: SumTypeHandling,
+ fieldNameMapping: JsonMemberFormat,
+ allowExtraFields: Boolean,
+ sumTypeMapping: JsonMemberFormat,
+ explicitNulls: Boolean
+ ) = new JsonCodecConfiguration(
+ sumTypeHandling,
+ fieldNameMapping,
+ allowExtraFields,
+ sumTypeMapping,
+ explicitNulls,
+ this.explicitEmptyCollections,
+ this.enumValuesAsStrings
+ )
+}
+
+object JsonCodecConfiguration {
+ def apply(
+ sumTypeHandling: SumTypeHandling,
+ fieldNameMapping: JsonMemberFormat,
+ allowExtraFields: Boolean,
+ sumTypeMapping: JsonMemberFormat,
+ explicitNulls: Boolean,
+ explicitEmptyCollections: ExplicitEmptyCollections
+ ) = new JsonCodecConfiguration(
+ sumTypeHandling,
+ fieldNameMapping,
+ allowExtraFields,
+ sumTypeMapping,
+ explicitNulls,
+ explicitEmptyCollections,
+ true
+ )
+
+ def apply(
+ sumTypeHandling: SumTypeHandling,
+ fieldNameMapping: JsonMemberFormat,
+ allowExtraFields: Boolean,
+ sumTypeMapping: JsonMemberFormat,
+ explicitNulls: Boolean
+ ) = new JsonCodecConfiguration(
+ sumTypeHandling,
+ fieldNameMapping,
+ allowExtraFields,
+ sumTypeMapping,
+ explicitNulls,
+ ExplicitEmptyCollections(),
+ true
+ )
+
+ implicit val default: JsonCodecConfiguration = JsonCodecConfiguration()
+
+ sealed trait SumTypeHandling {
+ def discriminatorField: Option[String]
+ }
+
+ object SumTypeHandling {
+
+ /**
+ * Use an object with a single key that is the class name.
+ */
+ case object WrapperWithClassNameField extends SumTypeHandling {
+ override def discriminatorField: Option[String] = None
+ }
+
+ /**
+ * For sealed classes, will determine the name of the field for disambiguating classes.
+ *
+ * The default is to not use a typehint field and instead have an object with a single key that is the class name.
+ * See [[WrapperWithClassNameField]].
+ *
+ * Note that using a discriminator is less performant, uses more memory, and may be prone to DOS attacks that are
+ * impossible with the default encoding. In addition, there is slightly less type safety when using custom product
+ * encoders (which must write an unenforced object type). Only use this option if you must model an externally
+ * defined schema.
+ */
+ final case class DiscriminatorField(name: String) extends SumTypeHandling {
+ override def discriminatorField: Option[String] = Some(name)
+ }
+ }
+}
diff --git a/zio-json/shared/src/main/scala-3/zio/json/JsonCodecVersionSpecific.scala b/zio-json/shared/src/main/scala-3/zio/json/JsonCodecVersionSpecific.scala
index 44bd2673d..acfead58b 100644
--- a/zio-json/shared/src/main/scala-3/zio/json/JsonCodecVersionSpecific.scala
+++ b/zio-json/shared/src/main/scala-3/zio/json/JsonCodecVersionSpecific.scala
@@ -1,5 +1,14 @@
package zio.json
-trait JsonCodecVersionSpecific {
- inline def derived[A: deriving.Mirror.Of]: JsonCodec[A] = DeriveJsonCodec.gen[A]
+import scala.collection.immutable
+
+private[json] trait JsonCodecVersionSpecific {
+ implicit def arraySeq[A: JsonEncoder: JsonDecoder: reflect.ClassTag]: JsonCodec[immutable.ArraySeq[A]] =
+ JsonCodec(JsonEncoder.arraySeq[A], JsonDecoder.arraySeq[A])
+
+ inline def derived[A: deriving.Mirror.Of](using config: JsonCodecConfiguration): JsonCodec[A] = DeriveJsonCodec.gen[A]
+
+ implicit def iArray[A: JsonEncoder: JsonDecoder: reflect.ClassTag]: JsonCodec[IArray[A]] =
+ JsonCodec(JsonEncoder.iArray[A], JsonDecoder.iArray[A])
+
}
diff --git a/zio-json/shared/src/main/scala-3/zio/json/JsonDecoderVersionSpecific.scala b/zio-json/shared/src/main/scala-3/zio/json/JsonDecoderVersionSpecific.scala
index 233e6ac9a..0d92dae89 100644
--- a/zio-json/shared/src/main/scala-3/zio/json/JsonDecoderVersionSpecific.scala
+++ b/zio-json/shared/src/main/scala-3/zio/json/JsonDecoderVersionSpecific.scala
@@ -1,5 +1,36 @@
package zio.json
-trait JsonDecoderVersionSpecific {
- inline def derived[A: deriving.Mirror.Of]: JsonDecoder[A] = DeriveJsonDecoder.gen[A]
+import zio.json.JsonDecoder.JsonError
+import zio.json.internal.RetractReader
+
+import scala.collection.immutable
+import scala.compiletime.*
+import scala.compiletime.ops.any.IsConst
+
+private[json] trait JsonDecoderVersionSpecific {
+ implicit def arraySeq[A: JsonDecoder: reflect.ClassTag]: JsonDecoder[immutable.ArraySeq[A]] =
+ new CollectionJsonDecoder[immutable.ArraySeq[A]] {
+ private[this] val arrayDecoder = JsonDecoder.array[A]
+
+ override def unsafeDecodeMissing(trace: List[JsonError]): immutable.ArraySeq[A] = immutable.ArraySeq.empty
+
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): immutable.ArraySeq[A] =
+ immutable.ArraySeq.unsafeWrapArray(arrayDecoder.unsafeDecode(trace, in))
+ }
+
+ inline def derived[A: deriving.Mirror.Of](using config: JsonCodecConfiguration): JsonDecoder[A] =
+ DeriveJsonDecoder.gen[A]
+
+ implicit def iArray[A](implicit A: JsonDecoder[A], classTag: reflect.ClassTag[A]): JsonDecoder[IArray[A]] =
+ JsonDecoder.array[A].map(IArray.unsafeFromArray)
+
+}
+
+trait DecoderLowPriorityVersionSpecific {
+ inline given unionOfStringEnumeration[T](using IsUnionOf[String, T]): JsonDecoder[T] =
+ val values = UnionDerivation.constValueUnionTuple[String, T]
+ JsonDecoder.string.mapOrFail {
+ case raw if values.toList.contains(raw) => Right(raw.asInstanceOf[T])
+ case _ => Left("expected one of: " + values.toList.mkString(", "))
+ }
}
diff --git a/zio-json/shared/src/main/scala-3/zio/json/JsonEncoderVersionSpecific.scala b/zio-json/shared/src/main/scala-3/zio/json/JsonEncoderVersionSpecific.scala
index 7cd37a803..67de4b045 100644
--- a/zio-json/shared/src/main/scala-3/zio/json/JsonEncoderVersionSpecific.scala
+++ b/zio-json/shared/src/main/scala-3/zio/json/JsonEncoderVersionSpecific.scala
@@ -1,5 +1,34 @@
package zio.json
-trait JsonEncoderVersionSpecific {
- inline def derived[A: deriving.Mirror.Of]: JsonEncoder[A] = DeriveJsonEncoder.gen[A]
+import zio.json.ast.Json
+import zio.json.internal.Write
+
+import scala.collection.immutable
+import scala.compiletime.ops.any.IsConst
+
+private[json] trait JsonEncoderVersionSpecific {
+ implicit def arraySeq[A: JsonEncoder: scala.reflect.ClassTag]: JsonEncoder[immutable.ArraySeq[A]] =
+ new JsonEncoder[immutable.ArraySeq[A]] {
+ private[this] val arrayEnc = JsonEncoder.array[A]
+
+ override def isEmpty(as: immutable.ArraySeq[A]): Boolean = as.isEmpty
+
+ def unsafeEncode(as: immutable.ArraySeq[A], indent: Option[Int], out: Write): Unit =
+ arrayEnc.unsafeEncode(as.unsafeArray.asInstanceOf[Array[A]], indent, out)
+
+ override final def toJsonAST(as: immutable.ArraySeq[A]): Either[String, Json] =
+ arrayEnc.toJsonAST(as.unsafeArray.asInstanceOf[Array[A]])
+ }
+
+ inline def derived[A: deriving.Mirror.Of](using config: JsonCodecConfiguration): JsonEncoder[A] =
+ DeriveJsonEncoder.gen[A]
+
+ implicit def iArray[A](implicit A: JsonEncoder[A], classTag: scala.reflect.ClassTag[A]): JsonEncoder[IArray[A]] =
+ JsonEncoder.array[A].contramap[IArray[A]](arr => IArray.genericWrapArray(arr).toArray)
+
+}
+
+private[json] trait EncoderLowPriorityVersionSpecific {
+ inline given unionOfStringEnumeration[T](using IsUnionOf[String, T]): JsonEncoder[T] =
+ JsonEncoder.string.asInstanceOf[JsonEncoder[T]]
}
diff --git a/zio-json/shared/src/main/scala-3/zio/json/JsonFieldDecoder.scala b/zio-json/shared/src/main/scala-3/zio/json/JsonFieldDecoder.scala
new file mode 100644
index 000000000..ac2d2e2dc
--- /dev/null
+++ b/zio-json/shared/src/main/scala-3/zio/json/JsonFieldDecoder.scala
@@ -0,0 +1,98 @@
+/*
+ * Copyright 2019-2022 John A. De Goes and the ZIO Contributors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package zio.json
+
+import zio.json.internal.{ FastStringReader, Lexer }
+import zio.json.uuid.UUIDParser
+
+/** When decoding a JSON Object, we only allow the keys that implement this interface. */
+trait JsonFieldDecoder[+A] {
+ self =>
+
+ final def map[B](f: A => B): JsonFieldDecoder[B] =
+ new JsonFieldDecoder[B] {
+
+ def unsafeDecodeField(trace: List[JsonError], in: String): B =
+ f(self.unsafeDecodeField(trace, in))
+ }
+
+ final def mapOrFail[B](f: A => Either[String, B]): JsonFieldDecoder[B] =
+ new JsonFieldDecoder[B] {
+
+ def unsafeDecodeField(trace: List[JsonError], in: String): B =
+ f(self.unsafeDecodeField(trace, in)) match {
+ case Left(err) => Lexer.error(err, trace)
+ case Right(b) => b
+ }
+ }
+
+ def unsafeDecodeField(trace: List[JsonError], in: String): A
+}
+
+object JsonFieldDecoder extends LowPriorityJsonFieldDecoder {
+ def apply[A](implicit a: JsonFieldDecoder[A]): JsonFieldDecoder[A] = a
+
+ implicit val string: JsonFieldDecoder[String] = new JsonFieldDecoder[String] {
+ def unsafeDecodeField(trace: List[JsonError], in: String): String = in
+ }
+
+ implicit val int: JsonFieldDecoder[Int] = new JsonFieldDecoder[Int] {
+ def unsafeDecodeField(trace: List[JsonError], in: String): Int =
+ try in.toInt
+ catch {
+ case _: NumberFormatException => Lexer.error(s"Invalid Int: ${strip(in)}", trace)
+ }
+ }
+
+ implicit val long: JsonFieldDecoder[Long] = new JsonFieldDecoder[Long] {
+ def unsafeDecodeField(trace: List[JsonError], in: String): Long =
+ try in.toLong
+ catch {
+ case _: NumberFormatException => Lexer.error(s"Invalid Long: ${strip(in)}", trace)
+ }
+ }
+
+ implicit val uuid: JsonFieldDecoder[java.util.UUID] = new JsonFieldDecoder[java.util.UUID] {
+ def unsafeDecodeField(trace: List[JsonError], in: String): java.util.UUID =
+ try UUIDParser.unsafeParse(in)
+ catch {
+ case _: IllegalArgumentException => Lexer.error("expected a UUID", trace)
+ }
+ }
+
+ // FIXME: remove from the next major version
+ private[json] def mapStringOrFail[A](f: String => Either[String, A]): JsonFieldDecoder[A] =
+ new JsonFieldDecoder[A] {
+ def unsafeDecodeField(trace: List[JsonError], in: String): A =
+ f(string.unsafeDecodeField(trace, in)) match {
+ case Left(err) => Lexer.error(err, trace)
+ case Right(value) => value
+ }
+ }
+
+ private[json] def strip(s: String, len: Int = 50): String =
+ if (s.length <= len) s
+ else s.substring(0, len) + "..."
+}
+
+private[json] trait LowPriorityJsonFieldDecoder {
+
+ implicit def stringLike[T <: String](implicit decoder: JsonDecoder[T]): JsonFieldDecoder[T] =
+ new JsonFieldDecoder[T] {
+ def unsafeDecodeField(trace: List[JsonError], in: String): T =
+ decoder.unsafeDecode(trace, new FastStringReader(s""""$in""""))
+ }
+}
diff --git a/zio-json/shared/src/main/scala-3/zio/json/macros.scala b/zio-json/shared/src/main/scala-3/zio/json/macros.scala
index 31012a6ba..26e17fc9d 100644
--- a/zio-json/shared/src/main/scala-3/zio/json/macros.scala
+++ b/zio-json/shared/src/main/scala-3/zio/json/macros.scala
@@ -1,18 +1,17 @@
package zio.json
-import zio.json.ast.Json
import scala.annotation.*
import magnolia1.*
import scala.deriving.Mirror
import scala.compiletime.*
import scala.reflect.*
import zio.Chunk
-
-import zio.json.JsonDecoder.{ JsonError, UnsafeJson }
+import zio.json.JsonDecoder.JsonError
import zio.json.ast.Json
-import zio.json.internal.{ Lexer, RetractReader, StringMatrix, Write }
+import zio.json.internal.{ FieldEncoder, Lexer, RecordingReader, RetractReader, StringMatrix, Write }
import scala.annotation._
+import scala.collection.Factory
import scala.collection.mutable
import scala.language.experimental.macros
@@ -27,6 +26,16 @@ final case class jsonField(name: String) extends Annotation
*/
final case class jsonAliases(alias: String, aliases: String*) extends Annotation
+/**
+ * Empty option fields will be encoded as `null`.
+ */
+final class jsonExplicitNull extends Annotation
+
+/**
+ * When disabled keys with empty collections will be omitted from the JSON.
+ */
+final case class jsonExplicitEmptyCollections(encoding: Boolean = true, decoding: Boolean = true) extends Annotation
+
/**
* If used on a sealed class, will determine the name of the field for
* disambiguating classes.
@@ -48,6 +57,7 @@ final case class jsonDiscriminator(name: String) extends Annotation
// Subtype.
sealed trait JsonMemberFormat extends (String => String)
+
case class CustomCase(f: String => String) extends JsonMemberFormat {
override def apply(memberName: String): String = f(memberName)
}
@@ -58,12 +68,16 @@ case object CamelCase extends JsonMemberFormat {
override def apply(memberName: String): String =
jsonMemberNames.enforceCamelOrPascalCase(memberName, toPascal = false)
}
+
case object PascalCase extends JsonMemberFormat {
override def apply(memberName: String): String = jsonMemberNames.enforceCamelOrPascalCase(memberName, toPascal = true)
}
case object KebabCase extends JsonMemberFormat {
override def apply(memberName: String): String = jsonMemberNames.enforceSnakeOrKebabCase(memberName, '-')
}
+case object IdentityFormat extends JsonMemberFormat {
+ override def apply(memberName: String): String = memberName
+}
/** zio-json version 0.3.0 formats. abc123Def -> abc_123_def */
object ziojson_03 {
@@ -85,11 +99,6 @@ object ziojson_03 {
final case class jsonMemberNames(format: JsonMemberFormat) extends Annotation
private[json] object jsonMemberNames {
- /**
- * ~~Stolen~~ Borrowed from jsoniter-scala by Andriy Plokhotnyuk
- * (he even granted permission for this, imagine that!)
- */
-
import java.lang.Character._
def enforceCamelOrPascalCase(s: String, toPascal: Boolean): String =
@@ -122,9 +131,9 @@ private[json] object jsonMemberNames {
}
def enforceSnakeOrKebabCase(s: String, separator: Char): String = {
- val len = s.length
- val sb = new StringBuilder(len << 1)
- var i = 0
+ val len = s.length
+ val sb = new StringBuilder(len << 1)
+ var i = 0
var isPrecedingNotUpperCased = false
while (i < len) isPrecedingNotUpperCased = {
val ch = s.charAt(i)
@@ -145,9 +154,9 @@ private[json] object jsonMemberNames {
}
def enforceSnakeOrKebabCaseSeparateNumbers(s: String, separator: Char): String = {
- val len = s.length
- val sb = new StringBuilder(len << 1)
- var i = 0
+ val len = s.length
+ val sb = new StringBuilder(len << 1)
+ var i = 0
var isPrecedingLowerCased = false
while (i < len) isPrecedingLowerCased = {
val ch = s.charAt(i)
@@ -175,6 +184,12 @@ private[json] object jsonMemberNames {
*/
final case class jsonHint(name: String) extends Annotation
+/**
+ * If used on a sealed class will determine the strategy of type hint value transformation for disambiguating
+ * classes during serialization and deserialization. Same strategies are provided as for [[jsonMemberNames]].
+ */
+final case class jsonHintNames(format: JsonMemberFormat) extends Annotation
+
/**
* If used on a case class, will exit early if any fields are in the JSON that
* do not correspond to field names in the case class.
@@ -193,289 +208,503 @@ final class jsonNoExtraFields extends Annotation
*/
final class jsonExclude extends Annotation
-// TODO: implement same configuration as for Scala 2 once this issue is resolved: https://github.com/softwaremill/magnolia/issues/296
-object DeriveJsonDecoder extends Derivation[JsonDecoder] { self =>
+private class CaseObjectDecoder[Typeclass[*], A](val ctx: CaseClass[Typeclass, A], no_extra: Boolean)
+ extends CollectionJsonDecoder[A] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
+ if (no_extra) {
+ Lexer.char(trace, in, '{')
+ Lexer.char(trace, in, '}')
+ } else Lexer.skipValue(trace, in)
+ ctx.rawConstruct(Nil)
+ }
+
+ override def unsafeDecodeMissing(trace: List[JsonError]): A = ctx.rawConstruct(Nil)
+
+ override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
+ json match {
+ case _: Json.Obj | Json.Null => ctx.rawConstruct(Nil)
+ case _ => Lexer.error("expected object", trace)
+ }
+}
+
+sealed class JsonDecoderDerivation(config: JsonCodecConfiguration) extends Derivation[JsonDecoder] { self =>
def join[A](ctx: CaseClass[Typeclass, A]): JsonDecoder[A] = {
- val (transformNames, nameTransform): (Boolean, String => String) =
- ctx.annotations.collectFirst { case jsonMemberNames(format) => format }
- .map(true -> _)
- .getOrElse(false -> identity)
-
- val no_extra = ctx
- .annotations
- .collectFirst { case _: jsonNoExtraFields => () }
- .isDefined
-
- if (ctx.params.isEmpty) {
- new JsonDecoder[A] {
- def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
- if (no_extra) {
- Lexer.char(trace, in, '{')
- Lexer.char(trace, in, '}')
- } else {
- Lexer.skipValue(trace, in)
- }
- ctx.rawConstruct(Nil)
+ val nameTransform: String => String =
+ ctx.annotations.collectFirst { case jsonMemberNames(format) => format }.getOrElse(config.fieldNameMapping)
+ val no_extra = ctx.annotations.collectFirst {
+ case _: jsonNoExtraFields => ()
+ }.isDefined || !config.allowExtraFields
+ if (ctx.params.isEmpty) new CaseObjectDecoder(ctx, no_extra)
+ else {
+ var splitIndex = -1
+ val (names, aliases): (Array[String], Array[(String, Int)]) = {
+ val names = new Array[String](ctx.params.size)
+ val aliasesBuilder = Array.newBuilder[(String, Int)]
+ ctx.params.foreach {
+ var idx = 0
+ p =>
+ names(idx) = p.annotations.collectFirst { case jsonField(name) => name }.getOrElse(nameTransform(p.label))
+ aliasesBuilder ++= p.annotations.flatMap {
+ case jsonAliases(alias, aliases*) => (alias +: aliases).map(_ -> idx)
+ case _ => Seq.empty
+ }
+ idx += 1
+ if (splitIndex < 0 && idx + aliasesBuilder.length > 64) splitIndex = idx - 1
}
-
- override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
- json match {
- case Json.Obj(_) => ctx.rawConstruct(Nil)
- case Json.Null => ctx.rawConstruct(Nil)
- case _ => throw UnsafeJson(JsonError.Message("Not an object") :: trace)
- }
+ val aliases = aliasesBuilder.result()
+ val allFieldNames = names ++ aliases.map(_._1)
+ if (allFieldNames.length != allFieldNames.distinct.length) {
+ val typeName = ctx.typeInfo.full
+ val collisions = aliases
+ .map(_._1)
+ .distinct
+ .filter(alias => names.contains(alias) || aliases.count(_._1 == alias) > 1)
+ .mkString(",")
+ throw new AssertionError(
+ s"Field names and aliases in case class $typeName must be distinct, alias(es) $collisions collide with a field or another alias"
+ )
+ }
+ (names, aliases)
}
- } else {
- new JsonDecoder[A] {
- val (names, aliases): (Array[String], Array[(String, Int)]) = {
- val names = Array.ofDim[String](ctx.params.size)
- val aliasesBuilder = Array.newBuilder[(String, Int)]
- ctx.params.zipWithIndex.foreach { (p, i) =>
- names(i) = p
- .annotations
- .collectFirst { case jsonField(name) => name }
- .getOrElse(if (transformNames) nameTransform(p.label) else p.label)
- aliasesBuilder ++= p
- .annotations
- .flatMap {
- case jsonAliases(alias, aliases*) => (alias +: aliases).map(_ -> i)
- case _ => Seq.empty
+ if (splitIndex < 0) {
+ new CollectionJsonDecoder[A] {
+ private val len = names.length
+ private val matrix = new StringMatrix(names, aliases)
+ private val spans = names.map(JsonError.ObjectAccess(_))
+ private val defaults = IArray.genericWrapArray(ctx.params.map(_.evaluateDefault.orNull)).toArray
+ private lazy val tcs =
+ IArray.genericWrapArray(ctx.params.map(_.typeclass)).toArray.asInstanceOf[Array[JsonDecoder[Any]]]
+ private lazy val namesMap = (names.zipWithIndex ++ aliases).toMap
+ private val explicitEmptyCollections =
+ ctx.annotations.collectFirst { case a: jsonExplicitEmptyCollections =>
+ a.decoding
+ }.getOrElse(config.explicitEmptyCollections.decoding)
+ private val missingValueDecoder =
+ if (explicitEmptyCollections) {
+ lazy val missingValueDecoders = tcs.map { d =>
+ if (allowMissingValueDecoder(d)) d
+ else null
}
- }
- val aliases = aliasesBuilder.result()
-
- val allFieldNames = names ++ aliases.map(_._1)
- if (allFieldNames.length != allFieldNames.distinct.length) {
- val aliasNames = aliases.map(_._1)
- val collisions = aliasNames
- .filter(alias => names.contains(alias) || aliases.count { case (a, _) => a == alias } > 1)
- .distinct
- val msg = s"Field names and aliases in case class ${ctx.typeInfo.full} must be distinct, " +
- s"alias(es) ${collisions.mkString(",")} collide with a field or another alias"
- throw new AssertionError(msg)
- }
+ (idx: Int, trace: List[JsonError]) => {
+ val trace_ = spans(idx) :: trace
+ val decoder = missingValueDecoders(idx)
+ if (decoder eq null) Lexer.error("missing", trace_)
+ decoder.unsafeDecodeMissing(trace_)
+ }
+ } else {
+ (idx: Int, trace: List[JsonError]) => tcs(idx).unsafeDecodeMissing(spans(idx) :: trace)
+ }
- (names, aliases)
- }
+ @tailrec
+ private def allowMissingValueDecoder(d: JsonDecoder[_]): Boolean = d match {
+ case _: OptionJsonDecoder[_] => true
+ case _: CollectionJsonDecoder[_] => !explicitEmptyCollections
+ case d: MappedJsonDecoder[_] => allowMissingValueDecoder(d.underlying)
+ case _ => true
+ }
- val len: Int = names.length
- val matrix: StringMatrix = new StringMatrix(names, aliases)
- val spans: Array[JsonError] = names.map(JsonError.ObjectAccess(_))
-
- lazy val tcs: Array[JsonDecoder[Any]] =
- IArray.genericWrapArray(ctx.params.map(_.typeclass)).toArray.asInstanceOf[Array[JsonDecoder[Any]]]
-
- lazy val defaults: Array[Option[Any]] =
- IArray.genericWrapArray(ctx.params.map(_.default)).toArray
-
- lazy val namesMap: Map[String, Int] =
- (names.zipWithIndex ++ aliases).toMap
-
- def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
- Lexer.char(trace, in, '{')
-
- val ps: Array[Any] = Array.ofDim(len)
-
- if (Lexer.firstField(trace, in))
- while({
- var trace_ = trace
- val field = Lexer.field(trace, in, matrix)
- if (field != -1) {
- trace_ = spans(field) :: trace
- if (ps(field) != null)
- throw UnsafeJson(JsonError.Message("duplicate") :: trace)
- if (defaults(field).isDefined) {
- val opt = JsonDecoder.option(tcs(field)).unsafeDecode(trace_, in)
- ps(field) = opt.getOrElse(defaults(field).get)
- } else
- ps(field) = tcs(field).unsafeDecode(trace_, in)
- } else if (no_extra) {
- throw UnsafeJson(
- JsonError.Message(s"invalid extra field") :: trace
- )
- } else
- Lexer.skipValue(trace_, in)
-
- Lexer.nextField(trace, in)
- }) ()
-
- var i = 0
-
- while (i < len) {
- if (ps(i) == null) {
- if (defaults(i).isDefined) {
- ps(i) = defaults(i).get
- } else {
- ps(i) = tcs(i).unsafeDecodeMissing(spans(i) :: trace)
+ override def unsafeDecodeMissing(trace: List[JsonError]): A = {
+ val ps = new Array[Any](len)
+ var idx = 0
+ while (idx < len) {
+ if (ps(idx) == null) {
+ val default = defaults(idx)
+ ps(idx) =
+ if (default ne null) default()
+ else missingValueDecoder(idx, trace)
}
+ idx += 1
}
- i += 1
+ ctx.rawConstruct(ps)
}
- ctx.rawConstruct(new ArraySeq(ps))
- }
-
- override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A = {
- json match {
- case Json.Obj(fields) =>
- val ps: Array[Any] = Array.ofDim(len)
-
- if (aliases.nonEmpty) {
- val present = fields.map { case (key, _) => namesMap(key) }
- if (present.distinct.size != present.size) {
- throw UnsafeJson(
- JsonError.Message("duplicate") :: trace
- )
- }
+ override def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
+ Lexer.char(trace, in, '{')
+ val ps = new Array[Any](len)
+ if (Lexer.firstField(trace, in))
+ while({
+ val idx = Lexer.field(trace, in, matrix)
+ if (idx >= 0) {
+ if (ps(idx) == null) {
+ val default = defaults(idx)
+ ps(idx) = if ((default eq null) || in.nextNonWhitespace() != 'n' && {
+ in.retract()
+ true
+ }) tcs(idx).unsafeDecode(spans(idx) :: trace, in)
+ else if (in.readChar() == 'u' && in.readChar() == 'l' && in.readChar() == 'l') default()
+ else Lexer.error("expected 'null'", spans(idx) :: trace)
+ } else Lexer.error("duplicate", trace)
+ } else if (no_extra) Lexer.error("invalid extra field", trace)
+ else Lexer.skipValue(trace, in)
+ Lexer.nextField(trace, in)
+ }) ()
+ var idx = 0
+ while (idx < len) {
+ if (ps(idx) == null) {
+ val default = defaults(idx)
+ ps(idx) =
+ if (default ne null) default()
+ else missingValueDecoder(idx, trace)
}
+ idx += 1
+ }
+ ctx.rawConstruct(ps)
+ }
- for ((key, value) <- fields) {
- namesMap.get(key) match {
- case Some(field) =>
- val trace_ = JsonError.ObjectAccess(key) :: trace
- if (defaults(field).isDefined) {
- val opt = JsonDecoder.option(tcs(field)).unsafeFromJsonAST(trace_, value)
- ps(field) = opt.getOrElse(defaults(field).get)
- } else {
- ps(field) = tcs(field).unsafeFromJsonAST(trace_, value)
- }
- case None =>
- if (no_extra) {
- throw UnsafeJson(
- JsonError.Message(s"invalid extra field") :: trace
- )
- }
+ override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
+ json match {
+ case o: Json.Obj =>
+ val ps = new Array[Any](len)
+ o.fields.foreach { kv =>
+ namesMap.get(kv._1) match {
+ case Some(idx) =>
+ if (ps(idx) == null) {
+ val default = defaults(idx)
+ ps(idx) =
+ if ((default ne null) && (kv._2 eq Json.Null)) default()
+ else tcs(idx).unsafeFromJsonAST(spans(idx) :: trace, kv._2)
+ } else Lexer.error("duplicate", trace)
+ case _ =>
+ if (no_extra) Lexer.error("invalid extra field", trace)
+ }
}
- }
-
- var i = 0
- while (i < len) {
- if (ps(i) == null) {
- if (defaults(i).isDefined) {
- ps(i) = defaults(i).get
- } else {
- ps(i) = tcs(i).unsafeDecodeMissing(JsonError.ObjectAccess(names(i)) :: trace)
+ var idx = 0
+ while (idx < len) {
+ if (ps(idx) == null) {
+ val default = defaults(idx)
+ ps(idx) =
+ if (default ne null) default()
+ else missingValueDecoder(idx, trace)
}
+ idx += 1
}
- i += 1
+ ctx.rawConstruct(ps)
+ case _ => Lexer.error("expected object", trace)
+ }
+ }
+ } else {
+ val (names1, names2) = names.splitAt(splitIndex)
+ val aliases1 = aliases.filter(kv => kv._2 <= splitIndex)
+ val aliases2 = aliases.collect { case (k, v) if v > splitIndex =>
+ (k, v - splitIndex)
+ }
+ new CollectionJsonDecoder[A] {
+ private val len = names.length
+ private val matrix1 = new StringMatrix(names1, aliases1)
+ private val matrix2 = new StringMatrix(names2, aliases2)
+ private val spans = names.map(JsonError.ObjectAccess(_))
+ private val defaults = IArray.genericWrapArray(ctx.params.map(_.evaluateDefault.orNull)).toArray
+ private lazy val tcs =
+ IArray.genericWrapArray(ctx.params.map(_.typeclass)).toArray.asInstanceOf[Array[JsonDecoder[Any]]]
+ private lazy val namesMap = (names.zipWithIndex ++ aliases).toMap
+ private val explicitEmptyCollections =
+ ctx.annotations.collectFirst { case a: jsonExplicitEmptyCollections =>
+ a.decoding
+ }.getOrElse(config.explicitEmptyCollections.decoding)
+ private val missingValueDecoder =
+ if (explicitEmptyCollections) {
+ lazy val missingValueDecoders = tcs.map { d =>
+ if (allowMissingValueDecoder(d)) d
+ else null
+ }
+ (idx: Int, trace: List[JsonError]) => {
+ val trace_ = spans(idx) :: trace
+ val decoder = missingValueDecoders(idx)
+ if (decoder eq null) Lexer.error("missing", trace_)
+ decoder.unsafeDecodeMissing(trace_)
}
+ } else {
+ (idx: Int, trace: List[JsonError]) => tcs(idx).unsafeDecodeMissing(spans(idx) :: trace)
+ }
- ctx.rawConstruct(new ArraySeq(ps))
+ @tailrec
+ private def allowMissingValueDecoder(d: JsonDecoder[_]): Boolean = d match {
+ case _: OptionJsonDecoder[_] => true
+ case _: CollectionJsonDecoder[_] => !explicitEmptyCollections
+ case d: MappedJsonDecoder[_] => allowMissingValueDecoder(d.underlying)
+ case _ => true
+ }
+
+ override def unsafeDecodeMissing(trace: List[JsonError]): A = {
+ val ps = new Array[Any](len)
+ var idx = 0
+ while (idx < len) {
+ if (ps(idx) == null) {
+ val default = defaults(idx)
+ ps(idx) =
+ if (default ne null) default()
+ else missingValueDecoder(idx, trace)
+ }
+ idx += 1
+ }
+ ctx.rawConstruct(ps)
+ }
- case _ => throw UnsafeJson(JsonError.Message("Not an object") :: trace)
+ override def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
+ Lexer.char(trace, in, '{')
+ val ps = new Array[Any](len)
+ if (Lexer.firstField(trace, in))
+ while({
+ val idx = Lexer.field128(trace, in, matrix1, matrix2)
+ if (idx >= 0) {
+ if (ps(idx) == null) {
+ val default = defaults(idx)
+ ps(idx) = if ((default eq null) || in.nextNonWhitespace() != 'n' && {
+ in.retract()
+ true
+ }) tcs(idx).unsafeDecode(spans(idx) :: trace, in)
+ else if (in.readChar() == 'u' && in.readChar() == 'l' && in.readChar() == 'l') default()
+ else Lexer.error("expected 'null'", spans(idx) :: trace)
+ } else Lexer.error("duplicate", trace)
+ } else if (no_extra) Lexer.error("invalid extra field", trace)
+ else Lexer.skipValue(trace, in)
+ Lexer.nextField(trace, in)
+ }) ()
+ var idx = 0
+ while (idx < len) {
+ if (ps(idx) == null) {
+ val default = defaults(idx)
+ ps(idx) =
+ if (default ne null) default()
+ else missingValueDecoder(idx, trace)
+ }
+ idx += 1
+ }
+ ctx.rawConstruct(ps)
}
+
+ override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
+ json match {
+ case o: Json.Obj =>
+ val ps = new Array[Any](len)
+ o.fields.foreach { kv =>
+ namesMap.get(kv._1) match {
+ case Some(idx) =>
+ if (ps(idx) == null) {
+ val default = defaults(idx)
+ ps(idx) =
+ if ((default ne null) && (kv._2 eq Json.Null)) default()
+ else tcs(idx).unsafeFromJsonAST(spans(idx) :: trace, kv._2)
+ } else Lexer.error("duplicate", trace)
+ case _ =>
+ if (no_extra) Lexer.error("invalid extra field", trace)
+ }
+ }
+ var idx = 0
+ while (idx < len) {
+ if (ps(idx) == null) {
+ val default = defaults(idx)
+ ps(idx) =
+ if (default ne null) default()
+ else missingValueDecoder(idx, trace)
+ }
+ idx += 1
+ }
+ ctx.rawConstruct(ps)
+ case _ => Lexer.error("expected object", trace)
+ }
}
}
}
}
def split[A](ctx: SealedTrait[JsonDecoder, A]): JsonDecoder[A] = {
+ val jsonHintFormat: JsonMemberFormat =
+ ctx.annotations.collectFirst { case jsonHintNames(format) => format }.getOrElse(config.sumTypeMapping)
val names: Array[String] = IArray.genericWrapArray(ctx.subtypes.map { p =>
- p.annotations.collectFirst { case jsonHint(name) =>
- name
- }.getOrElse(p.typeInfo.short)
+ p.annotations.collectFirst { case jsonHint(name) => name }.getOrElse(jsonHintFormat(p.typeInfo.short))
}).toArray
-
- val matrix: StringMatrix = new StringMatrix(names)
-
+ if (names.distinct.length != names.length) {
+ val typeName = ctx.typeInfo.full
+ val collisions = names.groupBy(identity).collect { case (n, ns) if ns.lengthCompare(1) > 0 => n }.mkString(",")
+ throw new AssertionError(s"Case names in ADT $typeName must be distinct, name(s) $collisions are duplicated")
+ }
+ val (names1, names2) = names.splitAt(64)
+ val matrix1 = new StringMatrix(names1)
+ val matrix2 =
+ if (names2.isEmpty) null
+ else new StringMatrix(names2)
lazy val tcs: Array[JsonDecoder[Any]] =
IArray.genericWrapArray(ctx.subtypes.map(_.typeclass)).toArray.asInstanceOf[Array[JsonDecoder[Any]]]
+ lazy val namesMap: Map[String, Int] = names.zipWithIndex.toMap
+ val discrim =
+ ctx.annotations.collectFirst { case jsonDiscriminator(n) => n }.orElse(config.sumTypeHandling.discriminatorField)
+ lazy val isEnumeration = config.enumValuesAsStrings &&
+ (ctx.isEnum && ctx.subtypes.forall(_.typeclass.isInstanceOf[CaseObjectDecoder[?, ?]]) ||
+ !ctx.isEnum && ctx.subtypes.forall(_.isObject))
+ if (discrim.isEmpty && isEnumeration) {
+ if (names.length <= 64) {
+ new JsonDecoder[A] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
+ val idx = Lexer.enumeration(trace, in, matrix1)
+ if (idx >= 0) tcs(idx).asInstanceOf[CaseObjectDecoder[JsonDecoder, A]].ctx.rawConstruct(Nil)
+ else Lexer.error("invalid enumeration value", trace)
+ }
- lazy val namesMap: Map[String, Int] =
- names.zipWithIndex.toMap
-
- def discrim = ctx.annotations.collectFirst { case jsonDiscriminator(n) => n }
-
- if (discrim.isEmpty) {
- // We're not allowing extra fields in this encoding
- new JsonDecoder[A] {
- val spans: Array[JsonError] = names.map(JsonError.ObjectAccess(_))
-
- def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
- Lexer.char(trace, in, '{')
-
- if (Lexer.firstField(trace, in)) {
- val field = Lexer.field(trace, in, matrix)
-
- if (field != -1) {
- val trace_ = spans(field) :: trace
- val a = tcs(field).unsafeDecode(trace_, in).asInstanceOf[A]
- Lexer.char(trace, in, '}')
- a
- } else
- throw UnsafeJson(
- JsonError.Message("invalid disambiguator") :: trace
- )
- } else
- throw UnsafeJson(
- JsonError.Message("expected non-empty object") :: trace
- )
+ override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
+ json match {
+ case s: Json.Str => namesMap.get(s.value) match {
+ case Some(idx) => tcs(idx).asInstanceOf[CaseObjectDecoder[JsonDecoder, A]].ctx.rawConstruct(Nil)
+ case _ => Lexer.error("invalid enumeration value", trace)
+ }
+ case _ => Lexer.error("expected string", trace)
+ }
}
+ } else {
+ new JsonDecoder[A] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
+ val idx = Lexer.enumeration128(trace, in, matrix1, matrix2)
+ if (idx >= 0) tcs(idx).asInstanceOf[CaseObjectDecoder[JsonDecoder, A]].ctx.rawConstruct(Nil)
+ else Lexer.error("invalid enumeration value", trace)
+ }
- override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A = {
- json match {
- case Json.Obj(chunk) if chunk.size == 1 =>
- val (key, inner) = chunk.head
- namesMap.get(key) match {
- case Some(idx) => tcs(idx).unsafeFromJsonAST(JsonError.ObjectAccess(key) :: trace, inner).asInstanceOf[A]
- case None => throw UnsafeJson(JsonError.Message("Invalid disambiguator") :: trace)
+ override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
+ json match {
+ case s: Json.Str => namesMap.get(s.value) match {
+ case Some(idx) => tcs(idx).asInstanceOf[CaseObjectDecoder[JsonDecoder, A]].ctx.rawConstruct(Nil)
+ case _ => Lexer.error("invalid enumeration value", trace)
}
- case Json.Obj(_) => throw UnsafeJson(JsonError.Message("Not an object with a single field") :: trace)
- case _ => throw UnsafeJson(JsonError.Message("Not an object") :: trace)
- }
+ case _ => Lexer.error("expected string", trace)
+ }
}
}
- } else {
- new JsonDecoder[A] {
- val hintfield = discrim.get
- val hintmatrix = new StringMatrix(Array(hintfield))
- val spans: Array[JsonError] = names.map(JsonError.Message(_))
-
- def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
- val in_ = zio.json.internal.RecordingReader(in)
-
- Lexer.char(trace, in_, '{')
+ } else if (discrim.isEmpty) {
+ // We're not allowing extra fields in this encoding
+ if (names.length <= 64) {
+ new JsonDecoder[A] {
+ private val spans = names.map(JsonError.ObjectAccess(_))
- if (Lexer.firstField(trace, in_)) {
- while({
- if (Lexer.field(trace, in_, hintmatrix) != -1) {
- val field = Lexer.enumeration(trace, in_, matrix)
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
+ Lexer.char(trace, in, '{')
+ if (Lexer.firstField(trace, in)) {
+ val idx = Lexer.field(trace, in, matrix1)
+ if (idx >= 0) {
+ val a = tcs(idx).unsafeDecode(spans(idx) :: trace, in).asInstanceOf[A]
+ Lexer.char(trace, in, '}')
+ a
+ } else Lexer.error("invalid disambiguator", trace)
+ } else Lexer.error("expected non-empty object", trace)
+ }
- if (field == -1) {
- throw UnsafeJson(JsonError.Message(s"invalid disambiguator") :: trace)
+ override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
+ json match {
+ case o: Json.Obj if o.fields.length == 1 =>
+ val keyValue = o.fields(0)
+ namesMap.get(keyValue._1) match {
+ case Some(idx) => tcs(idx).unsafeFromJsonAST(spans(idx) :: trace, keyValue._2).asInstanceOf[A]
+ case _ => Lexer.error("invalid disambiguator", trace)
}
+ case _ => Lexer.error("expected single field object", trace)
+ }
+ }
+ } else {
+ new JsonDecoder[A] {
+ private val spans = names.map(JsonError.ObjectAccess(_))
- in_.rewind()
- val trace_ = spans(field) :: trace
-
- return tcs(field).unsafeDecode(trace_, in_).asInstanceOf[A]
- } else {
- Lexer.skipValue(trace, in_)
- }
-
- Lexer.nextField(trace, in_)
- }) ()
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
+ Lexer.char(trace, in, '{')
+ if (Lexer.firstField(trace, in)) {
+ val idx = Lexer.field128(trace, in, matrix1, matrix2)
+ if (idx >= 0) {
+ val a = tcs(idx).unsafeDecode(spans(idx) :: trace, in).asInstanceOf[A]
+ Lexer.char(trace, in, '}')
+ a
+ } else Lexer.error("invalid disambiguator", trace)
+ } else Lexer.error("expected non-empty object", trace)
}
- throw UnsafeJson(JsonError.Message(s"missing hint '$hintfield'") :: trace)
+ override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
+ json match {
+ case o: Json.Obj if o.fields.length == 1 =>
+ val keyValue = o.fields(0)
+ namesMap.get(keyValue._1) match {
+ case Some(idx) => tcs(idx).unsafeFromJsonAST(spans(idx) :: trace, keyValue._2).asInstanceOf[A]
+ case _ => Lexer.error("invalid disambiguator", trace)
+ }
+ case _ => Lexer.error("expected single field object", trace)
+ }
}
+ }
+ } else {
+ if (names.length <= 64) {
+ new JsonDecoder[A] {
+ private val hintfield = discrim.get
+ private val hintmatrix = new StringMatrix(Array(hintfield))
+ private val spans = names.map(JsonError.Message(_))
+
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
+ val in_ = RecordingReader(in)
+ Lexer.char(trace, in_, '{')
+ if (Lexer.firstField(trace, in_)) {
+ while ({
+ if (Lexer.field(trace, in_, hintmatrix) >= 0) {
+ val idx = Lexer.enumeration(trace, in_, matrix1)
+ if (idx >= 0) {
+ in_.rewind()
+ return tcs(idx).unsafeDecode(spans(idx) :: trace, in_).asInstanceOf[A]
+ } else Lexer.error("invalid disambiguator", trace)
+ } else Lexer.skipValue(trace, in_)
+ Lexer.nextField(trace, in_)
+ }) ()
+ }
+ Lexer.error(s"missing hint '$hintfield'", trace)
+ }
- override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A = {
- json match {
- case Json.Obj(fields) =>
- fields.find { case (k, _) => k == hintfield } match {
- case Some((_, Json.Str(name))) =>
- namesMap.get(name) match {
- case Some(idx) => tcs(idx).unsafeFromJsonAST(JsonError.ObjectAccess(name) :: trace, json).asInstanceOf[A]
- case None => throw UnsafeJson(JsonError.Message("Invalid disambiguator") :: trace)
- }
- case Some(_) =>
- throw UnsafeJson(JsonError.Message(s"Non-string hint '$hintfield'") :: trace)
- case None =>
- throw UnsafeJson(JsonError.Message(s"Missing hint '$hintfield'") :: trace)
- }
- case _ => throw UnsafeJson(JsonError.Message("Not an object") :: trace)
+ override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
+ json match {
+ case o: Json.Obj =>
+ o.fields.collectFirst { case kv if kv._1 == hintfield && kv._2.isInstanceOf[Json.Str] =>
+ kv._2.asInstanceOf[Json.Str].value
+ } match {
+ case Some(name) =>
+ namesMap.get(name) match {
+ case Some(idx) => tcs(idx).unsafeFromJsonAST(spans(idx) :: trace, json).asInstanceOf[A]
+ case _ => Lexer.error("invalid disambiguator", trace)
+ }
+ case _ => Lexer.error(s"missing hint '$hintfield'", trace)
+ }
+ case _ => Lexer.error("expected object", trace)
+ }
+ }
+ } else {
+ new JsonDecoder[A] {
+ private val hintfield = discrim.get
+ private val hintmatrix = new StringMatrix(Array(hintfield))
+ private val spans = names.map(JsonError.Message(_))
+
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
+ val in_ = RecordingReader(in)
+ Lexer.char(trace, in_, '{')
+ if (Lexer.firstField(trace, in_)) {
+ while ({
+ if (Lexer.field(trace, in_, hintmatrix) >= 0) {
+ val idx = Lexer.enumeration128(trace, in_, matrix1, matrix2)
+ if (idx >= 0) {
+ in_.rewind()
+ return tcs(idx).unsafeDecode(spans(idx) :: trace, in_).asInstanceOf[A]
+ } else Lexer.error("invalid disambiguator", trace)
+ } else Lexer.skipValue(trace, in_)
+ Lexer.nextField(trace, in_)
+ }) ()
+ }
+ Lexer.error(s"missing hint '$hintfield'", trace)
}
+
+ override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
+ json match {
+ case o: Json.Obj =>
+ o.fields.collectFirst { case kv if kv._1 == hintfield && kv._2.isInstanceOf[Json.Str] =>
+ kv._2.asInstanceOf[Json.Str].value
+ } match {
+ case Some(name) =>
+ namesMap.get(name) match {
+ case Some(idx) => tcs(idx).unsafeFromJsonAST(spans(idx) :: trace, json).asInstanceOf[A]
+ case _ => Lexer.error("invalid disambiguator", trace)
+ }
+ case _ => Lexer.error(s"missing hint '$hintfield'", trace)
+ }
+ case _ => Lexer.error("expected object", trace)
+ }
}
}
}
@@ -483,239 +712,359 @@ object DeriveJsonDecoder extends Derivation[JsonDecoder] { self =>
inline def gen[A](using mirror: Mirror.Of[A]) = self.derived[A]
- // Backcompat for 2.12, otherwise we'd use ArraySeq.unsafeWrapArray
+ // FIXME: remove in the next major version
private final class ArraySeq(p: Array[Any]) extends IndexedSeq[Any] {
def apply(i: Int): Any = p(i)
def length: Int = p.length
}
}
-object DeriveJsonEncoder extends Derivation[JsonEncoder] { self =>
- def join[A](ctx: CaseClass[Typeclass, A]): JsonEncoder[A] =
- if (ctx.params.isEmpty) {
- new JsonEncoder[A] {
- def unsafeEncode(a: A, indent: Option[Int], out: Write): Unit =
- out.write("{}")
-
- override final def toJsonAST(a: A): Either[String, Json] =
- Right(Json.Obj(Chunk.empty))
- }
- } else {
- new JsonEncoder[A] {
- val (transformNames, nameTransform): (Boolean, String => String) =
- ctx.annotations.collectFirst { case jsonMemberNames(format) => format }
- .map(true -> _)
- .getOrElse(false -> identity)
-
- val params = ctx
- .params
- .filterNot { param =>
- param
- .annotations
- .collectFirst {
- case _: jsonExclude => ()
- }
- .isDefined
- }
-
- val len = params.length
-
- val names =
- IArray.genericWrapArray(ctx
- .params
- .map { p =>
- p.annotations.collectFirst {
- case jsonField(name) => name
- }.getOrElse(if (transformNames) nameTransform(p.label) else p.label)
- })
- .toArray
+private lazy val caseObjectEncoder = new JsonEncoder[Any] {
+ override def isEmpty(a: Any): Boolean = true
- lazy val tcs: Array[JsonEncoder[Any]] =
- IArray.genericWrapArray(params.map(_.typeclass.asInstanceOf[JsonEncoder[Any]])).toArray
-
- def unsafeEncode(a: A, indent: Option[Int], out: Write): Unit = {
- out.write("{")
+ def unsafeEncode(a: Any, indent: Option[Int], out: Write): Unit = out.write("{}")
- var indent_ = JsonEncoder.bump(indent)
- JsonEncoder.pad(indent_, out)
+ override final def toJsonAST(a: Any): Either[String, Json] = new Right(Json.Obj.empty)
+}
- var i = 0
- var prevFields = false
-
- while (i < len) {
- val tc = tcs(i)
- val p = params(i).deref(a)
-
- if (! tc.isNothing(p)) {
- // if we have at least one field already, we need a comma
- if (prevFields) {
- if (indent.isEmpty) {
- out.write(",")
- } else {
- out.write(",")
- JsonEncoder.pad(indent_, out)
- }
- }
+object DeriveJsonDecoder extends JsonDecoderDerivation(JsonCodecConfiguration.default) { self =>
+ inline def gen[A](using config: JsonCodecConfiguration, mirror: Mirror.Of[A]) = {
+ val derivation = new JsonDecoderDerivation(config)
+ derivation.derived[A]
+ }
- JsonEncoder.string.unsafeEncode(names(i), indent_, out)
+ // FIXME: remove in the next major version
+ private final class ArraySeq(p: Array[Any]) extends IndexedSeq[Any] {
+ def apply(i: Int): Any = p(i)
+ def length: Int = p.length
+ }
+}
- if (indent.isEmpty) {
- out.write(":")
- } else {
- out.write(" : ")
- }
+sealed class JsonEncoderDerivation(config: JsonCodecConfiguration) extends Derivation[JsonEncoder] { self =>
+ def join[A](ctx: CaseClass[Typeclass, A]): JsonEncoder[A] =
+ if (ctx.params.isEmpty) caseObjectEncoder.narrow[A]
+ else {
+ val nameTransform =
+ ctx.annotations.collectFirst { case jsonMemberNames(format) => format }.getOrElse(config.fieldNameMapping)
+ val explicitNulls = config.explicitNulls || ctx.annotations.exists(_.isInstanceOf[jsonExplicitNull])
+ val explicitEmptyCollections = ctx.annotations.collectFirst { case a: jsonExplicitEmptyCollections =>
+ a.encoding
+ }.getOrElse(config.explicitEmptyCollections.encoding)
+ val params = IArray.genericWrapArray(ctx.params.filterNot { param =>
+ param.annotations.collectFirst { case _: jsonExclude => () }.isDefined
+ }).toArray
+ new JsonEncoder[A] {
+ private lazy val fields = params.map { p =>
+ FieldEncoder(
+ p = p,
+ name = p.annotations.collectFirst { case jsonField(name) => name }.getOrElse(nameTransform(p.label)),
+ encoder = p.typeclass.asInstanceOf[JsonEncoder[Any]],
+ withExplicitNulls = explicitNulls || p.annotations.exists(_.isInstanceOf[jsonExplicitNull]),
+ withExplicitEmptyCollections = p.annotations.collectFirst { case a: jsonExplicitEmptyCollections =>
+ a.encoding
+ }.getOrElse(explicitEmptyCollections)
+ )
+ }
- tc.unsafeEncode(p, indent_, out)
- prevFields = true // at least one field so far
+ def unsafeEncode(a: A, indent: Option[Int], out: Write): Unit = {
+ out.write('{')
+ val indent_ = JsonEncoder.bump(indent)
+ val fields = this.fields
+ var idx = 0
+ var comma = false
+ while (idx < fields.length) {
+ val field = fields(idx)
+ idx += 1
+ val p = field.p.deref(a)
+ if (field.skip(p)) ()
+ else {
+ if (comma) out.write(',')
+ else comma = true
+ JsonEncoder.pad(indent_, out)
+ out.write(if (indent eq None) field.encodedName else field.prettyEncodedName)
+ field.encoder.unsafeEncode(p, indent_, out)
}
-
- i += 1
}
-
JsonEncoder.pad(indent, out)
- out.write("}")
+ out.write('}')
}
override final def toJsonAST(a: A): Either[String, Json] = {
- ctx.params
- .foldLeft[Either[String, Chunk[(String, Json)]]](Right(Chunk.empty)) { case (c, param) =>
- val name = param.annotations.collectFirst { case jsonField(name) =>
- name
- }.getOrElse(nameTransform(param.label))
- c.flatMap { chunk =>
- param.typeclass.toJsonAST(param.deref(a)).map { value =>
- if (value == Json.Null) chunk
- else chunk :+ name -> value
- }
+ val fields = this.fields
+ var buf = new Array[(String, Json)](fields.length)
+ var i, idx = 0
+ while (idx < fields.length) {
+ val field = fields(idx)
+ idx += 1
+ val p = field.p.deref(a)
+ if (field.skip(p)) ()
+ else {
+ field.encoder.toJsonAST(p) match {
+ case Right(value) =>
+ buf(i) = (field.name, value)
+ i += 1
+ case left =>
+ return left
}
}
- .map(Json.Obj.apply)
+ }
+ if (i != buf.length) buf = java.util.Arrays.copyOf(buf, i)
+ new Right(Json.Obj(Chunk.fromArray(buf)))
}
}
}
def split[A](ctx: SealedTrait[JsonEncoder, A]): JsonEncoder[A] = {
- val discrim = ctx
- .annotations
- .collectFirst {
- case jsonDiscriminator(n) => n
- }
-
- if (discrim.isEmpty) {
+ val jsonHintFormat: JsonMemberFormat =
+ ctx.annotations.collectFirst { case jsonHintNames(format) => format }.getOrElse(config.sumTypeMapping)
+ val names: Array[String] = IArray.genericWrapArray(ctx.subtypes.map { p =>
+ p.annotations.collectFirst { case jsonHint(name) => name }.getOrElse(jsonHintFormat(p.typeInfo.short))
+ }).toArray
+ val encodedNames: Array[String] = names.map(name => JsonEncoder.string.encodeJson(name, None).toString)
+ lazy val tcs =
+ IArray.genericWrapArray(ctx.subtypes.map(_.typeclass)).toArray.asInstanceOf[Array[JsonEncoder[Any]]]
+ val discrim =
+ ctx.annotations.collectFirst { case jsonDiscriminator(n) => n }.orElse(config.sumTypeHandling.discriminatorField)
+ lazy val isEnumeration = config.enumValuesAsStrings &&
+ (ctx.isEnum && ctx.subtypes.forall(_.typeclass == caseObjectEncoder) ||
+ !ctx.isEnum && ctx.subtypes.forall(_.isObject))
+ if (discrim.isEmpty && isEnumeration) {
new JsonEncoder[A] {
+ private val casts = IArray.genericWrapArray(ctx.subtypes.map(_.cast)).toArray
+
def unsafeEncode(a: A, indent: Option[Int], out: Write): Unit = {
- ctx.choose(a) { sub =>
- val name = sub
- .annotations
- .collectFirst {
- case jsonHint(name) => name
- }.getOrElse(sub.typeInfo.short)
-
- out.write("{")
- val indent_ = JsonEncoder.bump(indent)
- JsonEncoder.pad(indent_, out)
- JsonEncoder.string.unsafeEncode(name, indent_, out)
-
- if (indent.isEmpty) {
- out.write(":")
- } else {
- out.write(" : ")
- }
+ var idx = 0
+ while (!casts(idx).isDefinedAt(a)) idx += 1
+ out.write(encodedNames(idx))
+ }
- sub.typeclass.unsafeEncode(sub.cast(a), indent_, out)
- JsonEncoder.pad(indent, out)
+ override final def toJsonAST(a: A): Either[String, Json] = {
+ var idx = 0
+ while (!casts(idx).isDefinedAt(a)) idx += 1
+ new Right(new Json.Str(names(idx)))
+ }
+ }
+ } else if (discrim.isEmpty) {
+ new JsonEncoder[A] {
+ private val casts = IArray.genericWrapArray(ctx.subtypes.map(_.cast)).toArray
- out.write("}")
- }
+ def unsafeEncode(a: A, indent: Option[Int], out: Write): Unit = {
+ var idx = 0
+ while (!casts(idx).isDefinedAt(a)) idx += 1
+ out.write('{')
+ val indent_ = JsonEncoder.bump(indent)
+ JsonEncoder.pad(indent_, out)
+ out.write(encodedNames(idx))
+ if (indent eq None) out.write(':')
+ else out.write(" : ")
+ tcs(idx).unsafeEncode(a, indent_, out)
+ JsonEncoder.pad(indent, out)
+ out.write('}')
}
- final override def toJsonAST(a: A): Either[String, Json] = {
- ctx.choose(a) { sub =>
- sub.typeclass.toJsonAST(sub.cast(a)).map { inner =>
- val name = sub
- .annotations
- .collectFirst {
- case jsonHint(name) => name
- }.getOrElse(sub.typeInfo.short)
-
- Json.Obj(
- Chunk(
- name -> inner
- )
- )
- }
- }
+ override def toJsonAST(a: A): Either[String, Json] = {
+ var idx = 0
+ while (!casts(idx).isDefinedAt(a)) idx += 1
+ tcs(idx).toJsonAST(a).map(inner => new Json.Obj(Chunk(names(idx) -> inner)))
}
}
} else {
- val hintField = discrim.get
-
- def getName(annotations: Iterable[_], default: => String): String =
- annotations
- .collectFirst { case jsonHint(name) => name }
- .getOrElse(default)
-
new JsonEncoder[A] {
+ private val casts = IArray.genericWrapArray(ctx.subtypes.map(_.cast)).toArray
+ private val hintFieldName = discrim.get
+ private val encodedHintFieldName = JsonEncoder.string.encodeJson(hintFieldName, None).toString
+
def unsafeEncode(a: A, indent: Option[Int], out: Write): Unit = {
- ctx.choose(a) { sub =>
- out.write("{")
- val indent_ = JsonEncoder.bump(indent)
- JsonEncoder.pad(indent_, out)
- JsonEncoder.string.unsafeEncode(hintField, indent_, out)
- if (indent.isEmpty) out.write(":")
- else out.write(" : ")
- JsonEncoder.string.unsafeEncode(getName(sub.annotations, sub.typeInfo.short), indent_, out)
-
- // whitespace is always off by 2 spaces at the end, probably not worth fixing
- val intermediate = new NestedWriter(out, indent_)
- sub.typeclass.unsafeEncode(sub.cast(a), indent, intermediate)
- }
+ var idx = 0
+ while (!casts(idx).isDefinedAt(a)) idx += 1
+ out.write('{')
+ val indent_ = JsonEncoder.bump(indent)
+ JsonEncoder.pad(indent_, out)
+ out.write(encodedHintFieldName)
+ if (indent eq None) out.write(':')
+ else out.write(" : ")
+ out.write(encodedNames(idx))
+ // whitespace is always off by 2 spaces at the end, probably not worth fixing
+ tcs(idx).unsafeEncode(a, indent, new DeriveJsonEncoder.NestedWriter(out, indent_))
}
override final def toJsonAST(a: A): Either[String, Json] = {
- ctx.choose(a) { sub =>
- sub.typeclass.toJsonAST(sub.cast(a)).flatMap {
- case Json.Obj(fields) => Right(Json.Obj(fields :+ hintField -> Json.Str(getName(sub.annotations, sub.typeInfo.short))))
- case _ => Left("Subtype is not encoded as an object")
- }
+ var idx = 0
+ while (!casts(idx).isDefinedAt(a)) idx += 1
+ tcs(idx).toJsonAST(a).flatMap {
+ case o: Json.Obj =>
+ val hintField = hintFieldName -> new Json.Str(names(idx))
+ new Right(new Json.Obj(hintField +: o.fields)) // hint field is always first
+ case _ =>
+ new Left("expected object")
}
}
}
}
}
+}
- inline def gen[A](using mirror: Mirror.Of[A]) = self.derived[A]
+object DeriveJsonEncoder extends JsonEncoderDerivation(JsonCodecConfiguration.default) { self =>
+ inline def gen[A](using config: JsonCodecConfiguration, mirror: Mirror.Of[A]) = {
+ val derivation = new JsonEncoderDerivation(config)
+ derivation.derived[A]
+ }
// intercepts the first `{` of a nested writer and discards it. We also need to
// inject a `,` unless an empty object `{}` has been written.
- private[this] final class NestedWriter(out: Write, indent: Option[Int]) extends Write {
- private[this] var first, second = true
-
- def write(c: Char): Unit = write(c.toString) // could be optimised
-
- def write(s: String): Unit =
- if (first || second) {
- var i = 0
- while (i < s.length) {
- val c = s.charAt(i)
- if (c == ' ' || c == '\n') {} else if (first && c == '{') {
- first = false
- } else if (second) {
- second = false
+ private[json] final class NestedWriter(out: Write, indent: Option[Int]) extends Write {
+ private var state = 2
+
+ @inline def write(c: Char): Unit =
+ if (state == 0) out.write(c)
+ else nonZeroStateWrite(c)
+
+ @noinline private def nonZeroStateWrite(c: Char): Unit = {
+ if (c != ' ' && c != '\n') {
+ if (state == 2) {
+ if (c == '{') state = 1
+ } else {
+ state = 0
+ if (c != '}') {
+ out.write(',')
+ JsonEncoder.pad(indent, out)
+ }
+ out.write(c)
+ }
+ }
+ }
+
+ @inline def write(s: String): Unit =
+ if (state == 0) out.write(s)
+ else nonZeroStateWrite(s)
+
+ @noinline private def nonZeroStateWrite(s: String): Unit = {
+ var i = 0
+ while (i < s.length) {
+ val c = s.charAt(i)
+ if (c != ' ' && c != '\n') {
+ if (state == 2) {
+ if (c == '{') state = 1
+ } else {
+ state = 0
if (c != '}') {
out.write(',')
JsonEncoder.pad(indent, out)
}
- return out.write(s.substring(i))
+ while (i < s.length) {
+ out.write(s.charAt(i))
+ i += 1
+ }
+ return
}
- i += 1
}
- } else out.write(s)
+ i += 1
+ }
}
+
+ @inline override def write(cs: Array[Char], from: Int, to: Int): Unit =
+ if (state == 0) out.write(cs, from, to)
+ else nonZeroStateWrite(cs, from, to)
+
+ @noinline def nonZeroStateWrite(cs: Array[Char], from: Int, to: Int): Unit = {
+ var i = from
+ while (i < to) {
+ val c = cs(i)
+ if (c != ' ' && c != '\n') {
+ if (state == 2) {
+ if (c == '{') state = 1
+ } else {
+ state = 0
+ if (c != '}') {
+ out.write(',')
+ JsonEncoder.pad(indent, out)
+ }
+ out.write(cs, i, to)
+ return
+ }
+ }
+ i += 1
+ }
+ }
+
+ @inline override def write(c1: Char, c2: Char): Unit =
+ if (state == 0) out.write(c1, c2)
+ else {
+ nonZeroStateWrite(c1)
+ nonZeroStateWrite(c2)
+ }
+
+ @inline override def write(c1: Char, c2: Char, c3: Char): Unit =
+ if (state == 0) out.write(c1, c2, c3)
+ else {
+ nonZeroStateWrite(c1)
+ nonZeroStateWrite(c2)
+ nonZeroStateWrite(c3)
+ }
+
+ @inline override def write(c1: Char, c2: Char, c3: Char, c4: Char): Unit =
+ if (state == 0) out.write(c1, c2, c3, c4)
+ else {
+ nonZeroStateWrite(c1)
+ nonZeroStateWrite(c2)
+ nonZeroStateWrite(c3)
+ nonZeroStateWrite(c4)
+ }
+
+ @inline override def write(c1: Char, c2: Char, c3: Char, c4: Char, c5: Char): Unit =
+ if (state == 0) out.write(c1, c2, c3, c4, c5)
+ else {
+ nonZeroStateWrite(c1)
+ nonZeroStateWrite(c2)
+ nonZeroStateWrite(c3)
+ nonZeroStateWrite(c4)
+ nonZeroStateWrite(c5)
+ }
+
+ @inline override def write(s: Short): Unit =
+ if (state == 0) out.write(s)
+ else {
+ nonZeroStateWrite((s & 0xff).toChar)
+ nonZeroStateWrite((s >> 8).toChar)
+ }
+
+ @inline override def write(s1: Short, s2: Short): Unit =
+ if (state == 0) out.write(s1, s2)
+ else {
+ nonZeroStateWrite((s1 & 0xff).toChar)
+ nonZeroStateWrite((s1 >> 8).toChar)
+ nonZeroStateWrite((s2 & 0xff).toChar)
+ nonZeroStateWrite((s2 >> 8).toChar)
+ }
+
+ @inline override def write(s1: Short, s2: Short, s3: Short): Unit =
+ if (state == 0) out.write(s1, s2, s3)
+ else {
+ nonZeroStateWrite((s1 & 0xff).toChar)
+ nonZeroStateWrite((s1 >> 8).toChar)
+ nonZeroStateWrite((s2 & 0xff).toChar)
+ nonZeroStateWrite((s2 >> 8).toChar)
+ nonZeroStateWrite((s3 & 0xff).toChar)
+ nonZeroStateWrite((s3 >> 8).toChar)
+ }
+
+ @inline override def write(s1: Short, s2: Short, s3: Short, s4: Short): Unit =
+ if (state == 0) out.write(s1, s2, s3, s4)
+ else {
+ nonZeroStateWrite((s1 & 0xff).toChar)
+ nonZeroStateWrite((s1 >> 8).toChar)
+ nonZeroStateWrite((s2 & 0xff).toChar)
+ nonZeroStateWrite((s2 >> 8).toChar)
+ nonZeroStateWrite((s3 & 0xff).toChar)
+ nonZeroStateWrite((s3 >> 8).toChar)
+ nonZeroStateWrite((s4 & 0xff).toChar)
+ nonZeroStateWrite((s4 >> 8).toChar)
+ }
+ }
}
object DeriveJsonCodec {
- inline def gen[A](using mirror: Mirror.Of[A]) = {
+ inline def gen[A](using mirror: Mirror.Of[A], config: JsonCodecConfiguration) = {
val encoder = DeriveJsonEncoder.gen[A]
val decoder = DeriveJsonDecoder.gen[A]
diff --git a/zio-json/shared/src/main/scala-3/zio/json/union_derivation.scala b/zio-json/shared/src/main/scala-3/zio/json/union_derivation.scala
new file mode 100644
index 000000000..21b05e991
--- /dev/null
+++ b/zio-json/shared/src/main/scala-3/zio/json/union_derivation.scala
@@ -0,0 +1,59 @@
+package zio.json
+
+import scala.compiletime.*
+import scala.deriving.*
+import scala.quoted.*
+
+@scala.annotation.implicitNotFound("${A} is not a union of ${T}")
+private[json] sealed trait IsUnionOf[T, A]
+
+private[json] object IsUnionOf:
+
+ private val singleton: IsUnionOf[Any, Any] = new IsUnionOf[Any, Any] {}
+
+ transparent inline given derived[T, A]: IsUnionOf[T, A] = ${ deriveImpl[T, A] }
+
+ private def deriveImpl[T, A](using quotes: Quotes, t: Type[T], a: Type[A]): Expr[IsUnionOf[T, A]] =
+ import quotes.reflect.*
+ val tpe: TypeRepr = TypeRepr.of[A]
+ val bound: TypeRepr = TypeRepr.of[T]
+
+ def validateTypes(tpe: TypeRepr): Unit =
+ tpe.dealias match
+ case o: OrType =>
+ validateTypes(o.left)
+ validateTypes(o.right)
+ case o =>
+ if o <:< bound then ()
+ else report.errorAndAbort(s"${o.show} is not a subtype of ${bound.show}")
+
+ tpe.dealias match
+ case o: OrType =>
+ validateTypes(o)
+ ('{ IsUnionOf.singleton.asInstanceOf[IsUnionOf[T, A]] }).asExprOf[IsUnionOf[T, A]]
+ case o =>
+ if o <:< bound then ('{ IsUnionOf.singleton.asInstanceOf[IsUnionOf[T, A]] }).asExprOf[IsUnionOf[T, A]]
+ else report.errorAndAbort(s"${tpe.show} is not a Union")
+
+private[json] object UnionDerivation:
+ transparent inline def constValueUnionTuple[T, A](using IsUnionOf[T, A]): Tuple = ${ constValueUnionTupleImpl[T, A] }
+
+ private def constValueUnionTupleImpl[T: Type, A: Type](using Quotes): Expr[Tuple] =
+ Expr.ofTupleFromSeq(constTypes[T, A])
+
+ private def constTypes[T: Type, A: Type](using Quotes): List[Expr[Any]] =
+ import quotes.reflect.*
+ val tpe: TypeRepr = TypeRepr.of[A]
+ val bound: TypeRepr = TypeRepr.of[T]
+
+ def transformTypes(tpe: TypeRepr): List[TypeRepr] =
+ tpe.dealias match
+ case o: OrType =>
+ transformTypes(o.left) ::: transformTypes(o.right)
+ case o: Constant if o <:< bound && o.isSingleton =>
+ o :: Nil
+ case o =>
+ report.errorAndAbort(s"${o.show} is not a subtype of ${bound.show}")
+
+ transformTypes(tpe).distinct.map(_.asType match
+ case '[t] => '{ constValue[t] })
diff --git a/zio-json/shared/src/main/scala/zio/json/JsonCodec.scala b/zio-json/shared/src/main/scala/zio/json/JsonCodec.scala
index 9de0ac4bb..77ffccb2a 100644
--- a/zio-json/shared/src/main/scala/zio/json/JsonCodec.scala
+++ b/zio-json/shared/src/main/scala/zio/json/JsonCodec.scala
@@ -20,20 +20,17 @@ import zio.{ Chunk, NonEmptyChunk }
import scala.collection.immutable
/**
- * A `JsonCodec[A]` instance has the ability to encode values of type `A` into JSON, together with
- * the ability to decode such JSON into values of type `A`.
+ * A `JsonCodec[A]` instance has the ability to encode values of type `A` into JSON, together with the ability to decode
+ * such JSON into values of type `A`.
*
- * Instances of this trait should satisfy round-tripping laws: that is, for every value, instances
- * must be able to successfully encode the value into JSON, and then successfully decode the same
- * value from such JSON.
+ * Instances of this trait should satisfy round-tripping laws: that is, for every value, instances must be able to
+ * successfully encode the value into JSON, and then successfully decode the same value from such JSON.
*
* For more information, see [[JsonDecoder]] and [[JsonEncoder]].
*
- * {{
- * val intCodec: JsonCodec[Int] = JsonCodec[Int]
+ * {{ val intCodec: JsonCodec[Int] = JsonCodec[Int]
*
- * intCodec.encodeJson(intCodec.encodeJson(42)) == Right(42)
- * }}
+ * intCodec.encodeJson(intCodec.encodeJson(42)) == Right(42) }}
*/
final case class JsonCodec[A](encoder: JsonEncoder[A], decoder: JsonDecoder[A]) { self =>
@@ -87,7 +84,8 @@ final case class JsonCodec[A](encoder: JsonEncoder[A], decoder: JsonDecoder[A])
object JsonCodec extends GeneratedTupleCodecs with CodecLowPriority0 with JsonCodecVersionSpecific {
def apply[A](implicit jsonCodec: JsonCodec[A]): JsonCodec[A] = jsonCodec
- def apply[A](encoder: JsonEncoder[A], decoder: JsonDecoder[A]): JsonCodec[A] = new JsonCodec(encoder, decoder)
+ implicit def fromEncoderDecoder[A](encoder: JsonEncoder[A], decoder: JsonDecoder[A]): JsonCodec[A] =
+ JsonCodec(encoder, decoder)
private def orElseEither[A, B](A: JsonCodec[A], B: JsonCodec[B]): JsonCodec[Either[A, B]] =
JsonCodec(
@@ -122,6 +120,8 @@ object JsonCodec extends GeneratedTupleCodecs with CodecLowPriority0 with JsonCo
}
private[json] trait CodecLowPriority0 extends CodecLowPriority1 { this: JsonCodec.type =>
+ implicit def array[A: JsonEncoder: JsonDecoder: reflect.ClassTag]: JsonCodec[Array[A]] =
+ JsonCodec(JsonEncoder.array[A], JsonDecoder.array[A])
implicit def chunk[A: JsonEncoder: JsonDecoder]: JsonCodec[Chunk[A]] =
JsonCodec(JsonEncoder.chunk[A], JsonDecoder.chunk[A])
@@ -138,7 +138,7 @@ private[json] trait CodecLowPriority0 extends CodecLowPriority1 { this: JsonCode
}
private[json] trait CodecLowPriority1 extends CodecLowPriority2 { this: JsonCodec.type =>
- implicit def seq[A: JsonEncoder: JsonDecoder]: JsonCodec[Seq[A]] = JsonCodec(JsonEncoder.seq[A], JsonDecoder.seq[A])
+ implicit def seq[A: JsonEncoder: JsonDecoder]: JsonCodec[Seq[A]] = JsonCodec(JsonEncoder.seq[A], JsonDecoder.seq[A])
implicit def list[A: JsonEncoder: JsonDecoder]: JsonCodec[List[A]] =
JsonCodec(JsonEncoder.list[A], JsonDecoder.list[A])
implicit def vector[A: JsonEncoder: JsonDecoder]: JsonCodec[Vector[A]] =
@@ -156,6 +156,10 @@ private[json] trait CodecLowPriority1 extends CodecLowPriority2 { this: JsonCode
implicit def sortedSet[A: Ordering: JsonEncoder: JsonDecoder]: JsonCodec[immutable.SortedSet[A]] =
JsonCodec(JsonEncoder.sortedSet[A], JsonDecoder.sortedSet[A])
+
+ implicit def listMap[K: JsonFieldEncoder: JsonFieldDecoder, V: JsonEncoder: JsonDecoder]
+ : JsonCodec[immutable.ListMap[K, V]] =
+ JsonCodec(JsonEncoder.listMap[K, V], JsonDecoder.listMap[K, V])
}
private[json] trait CodecLowPriority2 extends CodecLowPriority3 { this: JsonCodec.type =>
diff --git a/zio-json/shared/src/main/scala/zio/json/JsonDecoder.scala b/zio-json/shared/src/main/scala/zio/json/JsonDecoder.scala
index 2d4b26507..f10c2cdb0 100644
--- a/zio-json/shared/src/main/scala/zio/json/JsonDecoder.scala
+++ b/zio-json/shared/src/main/scala/zio/json/JsonDecoder.scala
@@ -16,20 +16,20 @@
package zio.json
import zio.json.ast.Json
+import zio.json.internal.Lexer.NumberMaxBits
import zio.json.internal._
import zio.json.javatime.parsers
import zio.json.uuid.UUIDParser
import zio.{ Chunk, NonEmptyChunk }
import java.util.UUID
-import scala.annotation._
import scala.collection.immutable.{ LinearSeq, ListSet, TreeSet }
import scala.collection.{ immutable, mutable }
import scala.util.control.NoStackTrace
/**
- * A `JsonDecoder[A]` instance has the ability to decode JSON to values of type `A`, potentially
- * failing with an error if the JSON content does not encode a value of the given type.
+ * A `JsonDecoder[A]` instance has the ability to decode JSON to values of type `A`, potentially failing with an error
+ * if the JSON content does not encode a value of the given type.
*/
trait JsonDecoder[A] extends JsonDecoderPlatformSpecific[A] {
self =>
@@ -59,33 +59,54 @@ trait JsonDecoder[A] extends JsonDecoderPlatformSpecific[A] {
*/
final def <*[B](that: => JsonDecoder[B]): JsonDecoder[A] = self.zipLeft(that)
+ final def both[B](that: => JsonDecoder[B]): JsonDecoder[(A, B)] = bothWith(that)((a, b) => (a, b))
+
+ final def bothRight[B](that: => JsonDecoder[B]): JsonDecoder[B] = bothWith(that)((_, b) => b)
+
+ final def bothLeft[B](that: => JsonDecoder[B]): JsonDecoder[A] = bothWith(that)((a, _) => a)
+
+ final def bothWith[B, C](that: => JsonDecoder[B])(f: (A, B) => C): JsonDecoder[C] =
+ new JsonDecoder[C] {
+ override def unsafeDecode(trace: List[JsonError], in: RetractReader): C = {
+ val rr = RecordingReader(in)
+ val a = self.unsafeDecode(trace, rr)
+ rr.rewind()
+ val b = that.unsafeDecode(trace, rr)
+ f(a, b)
+ }
+
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): C = {
+ val a = self.unsafeFromJsonAST(trace, json)
+ val b = that.unsafeFromJsonAST(trace, json)
+ f(a, b)
+ }
+ }
+
/**
- * Attempts to decode a value of type `A` from the specified `CharSequence`, but may fail with
- * a human-readable error message if the provided text does not encode a value of this type.
+ * Attempts to decode a value of type `A` from the specified `CharSequence`, but may fail with a human-readable error
+ * message if the provided text does not encode a value of this type.
*
* Note: This method may not entirely consume the specified character sequence.
*/
final def decodeJson(str: CharSequence): Either[String, A] =
- try Right(unsafeDecode(Nil, new FastStringReader(str)))
+ try new Right(unsafeDecode(Nil, new FastStringReader(str)))
catch {
- case JsonDecoder.UnsafeJson(trace) => Left(JsonError.render(trace))
- case _: UnexpectedEnd => Left("Unexpected end of input")
- case _: StackOverflowError => Left("Unexpected structure")
+ case e: JsonDecoder.UnsafeJson => new Left(JsonError.render(e.trace))
+ case _: UnexpectedEnd => new Left("Unexpected end of input")
+ case _: StackOverflowError => new Left("Unexpected structure")
}
/**
- * Returns this decoder but widened to the its given super-type
+ * Returns this decoder but widened to the given super-type
*/
final def widen[B >: A]: JsonDecoder[B] = self.asInstanceOf[JsonDecoder[B]]
/**
- * Returns a new codec that combines this codec and the specified codec using fallback semantics:
- * such that if this codec fails, the specified codec will be tried instead.
- * This method may be unsafe from a security perspective: it can use more memory than hand coded
- * alternative and so lead to DOS.
+ * Returns a new codec that combines this codec and the specified codec using fallback semantics: such that if this
+ * codec fails, the specified codec will be tried instead. This method may be unsafe from a security perspective: it
+ * can use more memory than hand coded alternative and so lead to DOS.
*
- * For example, in the case of an alternative between `Int` and `Boolean`, a hand coded
- * alternative would look like:
+ * For example, in the case of an alternative between `Int` and `Boolean`, a hand coded alternative would look like:
*
* ```
* val decoder: JsonDecoder[AnyVal] = JsonDecoder.peekChar[AnyVal] {
@@ -96,92 +117,80 @@ trait JsonDecoder[A] extends JsonDecoderPlatformSpecific[A] {
*/
final def orElse[A1 >: A](that: => JsonDecoder[A1]): JsonDecoder[A1] =
new JsonDecoder[A1] {
-
def unsafeDecode(trace: List[JsonError], in: RetractReader): A1 = {
- val in2 = new zio.json.internal.WithRecordingReader(in, 64)
-
- try self.unsafeDecode(trace, in2)
+ val rr = RecordingReader(in)
+ try self.unsafeDecode(trace, rr)
catch {
- case JsonDecoder.UnsafeJson(_) =>
- in2.rewind()
- that.unsafeDecode(trace, in2)
-
- case _: UnexpectedEnd =>
- in2.rewind()
- that.unsafeDecode(trace, in2)
+ case _: JsonDecoder.UnsafeJson | _: UnexpectedEnd =>
+ rr.rewind()
+ that.unsafeDecode(trace, rr)
}
}
- override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A1 =
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): A1 =
try self.unsafeFromJsonAST(trace, json)
catch {
- case JsonDecoder.UnsafeJson(_) | _: UnexpectedEnd => that.unsafeFromJsonAST(trace, json)
+ case _: JsonDecoder.UnsafeJson | _: UnexpectedEnd => that.unsafeFromJsonAST(trace, json)
}
override def unsafeDecodeMissing(trace: List[JsonError]): A1 =
try self.unsafeDecodeMissing(trace)
catch {
- case _: Throwable => that.unsafeDecodeMissing(trace)
+ case _: JsonDecoder.UnsafeJson | _: UnexpectedEnd => that.unsafeDecodeMissing(trace)
}
-
}
/**
- * Returns a new codec that combines this codec and the specified codec using fallback semantics:
- * such that if this codec fails, the specified codec will be tried instead.
+ * Returns a new codec that combines this codec and the specified codec using fallback semantics: such that if this
+ * codec fails, the specified codec will be tried instead.
*/
final def orElseEither[B](that: => JsonDecoder[B]): JsonDecoder[Either[A, B]] =
- self.map(Left(_)).orElse(that.map(Right(_)))
+ self.map(new Left(_)).orElse(that.map(new Right(_)))
/**
* Returns a new codec whose decoded values will be mapped by the specified function.
*/
final def map[B](f: A => B): JsonDecoder[B] =
- new JsonDecoder[B] {
+ new MappedJsonDecoder[B] {
+ private[json] def underlying: JsonDecoder[A] = self
- def unsafeDecode(trace: List[JsonError], in: RetractReader): B =
- f(self.unsafeDecode(trace, in))
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): B = f(self.unsafeDecode(trace, in))
- override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): B =
- f(self.unsafeFromJsonAST(trace, json))
-
- override def unsafeDecodeMissing(trace: List[JsonError]): B =
- f(self.unsafeDecodeMissing(trace))
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): B = f(self.unsafeFromJsonAST(trace, json))
+ override def unsafeDecodeMissing(trace: List[JsonError]): B = f(self.unsafeDecodeMissing(trace))
}
/**
- * Returns a new codec whose decoded values will be mapped by the specified function, which may
- * itself decide to fail with some type of error.
+ * Returns a new codec whose decoded values will be mapped by the specified function, which may itself decide to fail
+ * with some type of error.
*/
final def mapOrFail[B](f: A => Either[String, B]): JsonDecoder[B] =
- new JsonDecoder[B] {
+ new MappedJsonDecoder[B] {
+ private[json] def underlying: JsonDecoder[A] = self
def unsafeDecode(trace: List[JsonError], in: RetractReader): B =
f(self.unsafeDecode(trace, in)) match {
- case Left(err) =>
- throw JsonDecoder.UnsafeJson(JsonError.Message(err) :: trace)
- case Right(b) => b
+ case Right(b) => b
+ case Left(err) => Lexer.error(err, trace)
}
- override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): B =
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): B =
f(self.unsafeFromJsonAST(trace, json)) match {
- case Left(err) => throw JsonDecoder.UnsafeJson(JsonError.Message(err) :: trace)
case Right(b) => b
+ case Left(err) => Lexer.error(err, trace)
}
override def unsafeDecodeMissing(trace: List[JsonError]): B =
f(self.unsafeDecodeMissing(trace)) match {
- case Left(err) =>
- throw JsonDecoder.UnsafeJson(JsonError.Message(err) :: trace)
- case Right(b) => b
+ case Right(b) => b
+ case Left(err) => Lexer.error(err, trace)
}
-
}
/**
- * Returns a new codec that combines this codec and the specified codec into a single codec that
- * decodes a tuple of the values decoded by the respective codecs.
+ * Returns a new codec that combines this codec and the specified codec into a single codec that decodes a tuple of
+ * the values decoded by the respective codecs.
*/
final def zip[B](that: => JsonDecoder[B]): JsonDecoder[(A, B)] = JsonDecoder.tuple2(this, that)
@@ -198,15 +207,13 @@ trait JsonDecoder[A] extends JsonDecoderPlatformSpecific[A] {
/**
* Zips two codecs into one, transforming the outputs of zip codecs by the specified function.
*/
- final def zipWith[B, C](that: => JsonDecoder[B])(f: (A, B) => C): JsonDecoder[C] =
- self.zip(that).map(f.tupled)
+ final def zipWith[B, C](that: => JsonDecoder[B])(f: (A, B) => C): JsonDecoder[C] = self.zip(that).map(f.tupled)
- def unsafeDecodeMissing(trace: List[JsonError]): A =
- throw JsonDecoder.UnsafeJson(JsonError.Message("missing") :: trace)
+ def unsafeDecodeMissing(trace: List[JsonError]): A = Lexer.error("missing", trace)
/**
- * Low-level, unsafe method to decode a value or throw an exception. This method should not be
- * called in application code, although it can be implemented for user-defined data structures.
+ * Low-level, unsafe method to decode a value or throw an exception. This method should not be called in application
+ * code, although it can be implemented for user-defined data structures.
*/
def unsafeDecode(trace: List[JsonError], in: RetractReader): A
@@ -216,17 +223,16 @@ trait JsonDecoder[A] extends JsonDecoderPlatformSpecific[A] {
/**
* Decode a value from an already parsed Json AST.
*
- * The default implementation encodes the Json to a byte stream and uses decode to parse that.
- * Override to provide a more performant implementation.
+ * The default implementation encodes the Json to a byte stream and uses decode to parse that. Override to provide a
+ * more performant implementation.
*/
final def fromJsonAST(json: Json): Either[String, A] =
- try Right(unsafeFromJsonAST(Nil, json))
+ try new Right(unsafeFromJsonAST(Nil, json))
catch {
- case JsonDecoder.UnsafeJson(trace) => Left(JsonError.render(trace))
- case _: UnexpectedEnd => Left("Unexpected end of input")
- case _: StackOverflowError => Left("Unexpected structure")
+ case e: JsonDecoder.UnsafeJson => new Left(JsonError.render(e.trace))
+ case _: UnexpectedEnd => new Left("Unexpected end of input")
+ case _: StackOverflowError => new Left("Unexpected structure")
}
-
}
object JsonDecoder extends GeneratedTupleDecoders with DecoderLowPriority1 with JsonDecoderVersionSpecific {
@@ -236,34 +242,31 @@ object JsonDecoder extends GeneratedTupleDecoders with DecoderLowPriority1 with
def apply[A](implicit a: JsonDecoder[A]): JsonDecoder[A] = a
/**
- * Design note: we could require the position in the stream here to improve
- * debugging messages. But the cost would be that the RetractReader would need
- * to keep track and any wrappers would need to preserve the position. It may
- * still be desirable to do this but at the moment it is not necessary.
+ * Design note: we could require the position in the stream here to improve debugging messages. But the cost would be
+ * that the RetractReader would need to keep track and any wrappers would need to preserve the position. It may still
+ * be desirable to do this but at the moment it is not necessary.
*/
final case class UnsafeJson(trace: List[JsonError])
extends Exception("If you see this, a developer made a mistake using JsonDecoder")
with NoStackTrace
def peekChar[A](partialFunction: PartialFunction[Char, JsonDecoder[A]]): JsonDecoder[A] = new JsonDecoder[A] {
-
override def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
val c = in.nextNonWhitespace()
if (partialFunction.isDefinedAt(c)) {
in.retract()
partialFunction(c).unsafeDecode(trace, in)
- } else {
- throw UnsafeJson(JsonError.Message(s"missing case in `peekChar` for '${c}''") :: trace)
- }
+ } else Lexer.error(s"missing case in `peekChar` for '${c}''", trace)
}
}
def suspend[A](decoder0: => JsonDecoder[A]): JsonDecoder[A] =
- new JsonDecoder[A] {
+ new MappedJsonDecoder[A] {
+ private[json] def underlying: JsonDecoder[A] = decoder0
+
lazy val decoder = decoder0
- override def unsafeDecode(trace: List[JsonError], in: RetractReader): A =
- decoder.unsafeDecode(trace, in)
+ override def unsafeDecode(trace: List[JsonError], in: RetractReader): A = decoder.unsafeDecode(trace, in)
override def unsafeDecodeMissing(trace: List[JsonError]): A = decoder.unsafeDecodeMissing(trace)
@@ -271,332 +274,621 @@ object JsonDecoder extends GeneratedTupleDecoders with DecoderLowPriority1 with
}
implicit val string: JsonDecoder[String] = new JsonDecoder[String] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): String = Lexer.string(trace, in).toString
- def unsafeDecode(trace: List[JsonError], in: RetractReader): String =
- Lexer.string(trace, in).toString
-
- override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): String =
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): String =
json match {
- case Json.Str(value) => value
- case _ => throw UnsafeJson(JsonError.Message("Not a string value") :: trace)
+ case s: Json.Str => s.value
+ case _ => Lexer.error("expected string", trace)
}
}
implicit val boolean: JsonDecoder[Boolean] = new JsonDecoder[Boolean] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): Boolean = Lexer.boolean(trace, in)
- def unsafeDecode(trace: List[JsonError], in: RetractReader): Boolean =
- Lexer.boolean(trace, in)
-
- override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): Boolean =
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): Boolean =
json match {
- case Json.Bool(value) => value
- case _ => throw UnsafeJson(JsonError.Message("Not a bool value") :: trace)
+ case b: Json.Bool => b.value
+ case _ => Lexer.error("expected boolean", trace)
}
}
- implicit val char: JsonDecoder[Char] = string.mapOrFail {
- case str if str.length == 1 => Right(str(0))
- case _ => Left("expected one character")
+ implicit val char: JsonDecoder[Char] = new JsonDecoder[Char] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): Char = Lexer.char(trace, in)
+
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): Char =
+ json match {
+ case s: Json.Str if s.value.length == 1 => s.value.charAt(0)
+ case _ => Lexer.error("expected single character string", trace)
+ }
}
+
implicit val symbol: JsonDecoder[Symbol] = string.map(Symbol(_))
- implicit val byte: JsonDecoder[Byte] = number(Lexer.byte, _.byteValueExact())
- implicit val short: JsonDecoder[Short] = number(Lexer.short, _.shortValueExact())
- implicit val int: JsonDecoder[Int] = number(Lexer.int, _.intValueExact())
- implicit val long: JsonDecoder[Long] = number(Lexer.long, _.longValueExact())
- implicit val bigInteger: JsonDecoder[java.math.BigInteger] = number(Lexer.bigInteger, _.toBigIntegerExact)
- implicit val scalaBigInt: JsonDecoder[BigInt] = bigInteger.map(x => x)
- implicit val float: JsonDecoder[Float] = number(Lexer.float, _.floatValue())
- implicit val double: JsonDecoder[Double] = number(Lexer.double, _.doubleValue())
- implicit val bigDecimal: JsonDecoder[java.math.BigDecimal] = number(Lexer.bigDecimal, identity)
- implicit val scalaBigDecimal: JsonDecoder[BigDecimal] = bigDecimal.map(x => x)
-
- // numbers decode from numbers or strings for maximum compatibility
- private[this] def number[A](
- f: (List[JsonError], RetractReader) => A,
- fromBigDecimal: java.math.BigDecimal => A
- ): JsonDecoder[A] =
- new JsonDecoder[A] {
+ implicit val byte: JsonDecoder[Byte] = new JsonDecoder[Byte] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): Byte =
+ if (in.nextNonWhitespace() != '"') {
+ in.retract()
+ Lexer.byte(trace, in)
+ } else {
+ val a = Lexer.byte(trace, in)
+ val c = in.readChar()
+ if (c != '"') Lexer.error("'\"'", c, trace)
+ a
+ }
- def unsafeDecode(trace: List[JsonError], in: RetractReader): A =
- (in.nextNonWhitespace(): @switch) match {
- case '"' =>
- val i = f(trace, in)
- Lexer.charOnly(trace, in, '"')
- i
- case _ =>
- in.retract()
- f(trace, in)
- }
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): Byte = {
+ json match {
+ case n: Json.Num =>
+ try return n.value.byteValueExact
+ catch {
+ case _: ArithmeticException =>
+ }
+ case s: Json.Str =>
+ try return UnsafeNumbers.byte_(new FastStringReader(s.value), true)
+ catch {
+ case _: UnexpectedEnd | UnsafeNumbers.UnsafeNumber =>
+ }
+ case _ =>
+ }
+ Lexer.error("expected a Byte", trace)
+ }
+ }
- override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
- json match {
- case Json.Num(value) =>
- try fromBigDecimal(value)
- catch {
- case exception: ArithmeticException => throw UnsafeJson(JsonError.Message(exception.getMessage) :: trace)
- }
- case Json.Str(value) =>
- val reader = new FastStringReader(value)
- try f(List.empty, reader)
- finally reader.close()
- case _ => throw UnsafeJson(JsonError.Message("Not a number or a string") :: trace)
- }
+ implicit val short: JsonDecoder[Short] = new JsonDecoder[Short] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): Short =
+ if (in.nextNonWhitespace() != '"') {
+ in.retract()
+ Lexer.short(trace, in)
+ } else {
+ val a = Lexer.short(trace, in)
+ val c = in.readChar()
+ if (c != '"') Lexer.error("'\"'", c, trace)
+ a
+ }
+
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): Short = {
+ json match {
+ case n: Json.Num =>
+ try return n.value.shortValueExact
+ catch {
+ case _: ArithmeticException =>
+ }
+ case s: Json.Str =>
+ try return UnsafeNumbers.short_(new FastStringReader(s.value), true)
+ catch {
+ case _: UnexpectedEnd | UnsafeNumbers.UnsafeNumber =>
+ }
+ case _ =>
+ }
+ Lexer.error("expected a Short", trace)
+ }
+ }
+
+ implicit val int: JsonDecoder[Int] = new JsonDecoder[Int] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): Int =
+ if (in.nextNonWhitespace() != '"') {
+ in.retract()
+ Lexer.int(trace, in)
+ } else {
+ val a = Lexer.int(trace, in)
+ val c = in.readChar()
+ if (c != '"') Lexer.error("'\"'", c, trace)
+ a
+ }
+
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): Int = {
+ json match {
+ case n: Json.Num =>
+ try return n.value.intValueExact
+ catch {
+ case _: ArithmeticException =>
+ }
+ case s: Json.Str =>
+ try return UnsafeNumbers.int_(new FastStringReader(s.value), true)
+ catch {
+ case _: UnexpectedEnd | UnsafeNumbers.UnsafeNumber =>
+ }
+ case _ =>
+ }
+ Lexer.error("expected a Int", trace)
+ }
+ }
+ implicit val long: JsonDecoder[Long] = new JsonDecoder[Long] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): Long =
+ if (in.nextNonWhitespace() != '"') {
+ in.retract()
+ Lexer.long(trace, in)
+ } else {
+ val a = Lexer.long(trace, in)
+ val c = in.readChar()
+ if (c != '"') Lexer.error("'\"'", c, trace)
+ a
+ }
+
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): Long = {
+ json match {
+ case n: Json.Num =>
+ try return n.value.longValueExact
+ catch {
+ case _: ArithmeticException =>
+ }
+ case s: Json.Str =>
+ try return UnsafeNumbers.long_(new FastStringReader(s.value), true)
+ catch {
+ case _: UnexpectedEnd | UnsafeNumbers.UnsafeNumber =>
+ }
+ case _ =>
+ }
+ Lexer.error("expected a Long", trace)
+ }
+ }
+
+ implicit val bigInteger: JsonDecoder[java.math.BigInteger] = new JsonDecoder[java.math.BigInteger] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): java.math.BigInteger =
+ if (in.nextNonWhitespace() != '"') {
+ in.retract()
+ Lexer.bigInteger(trace, in)
+ } else {
+ val a = Lexer.bigInteger(trace, in)
+ val c = in.readChar()
+ if (c != '"') Lexer.error("'\"'", c, trace)
+ a
+ }
+
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): java.math.BigInteger = {
+ json match {
+ case n: Json.Num =>
+ try return n.value.toBigIntegerExact
+ catch {
+ case _: ArithmeticException =>
+ }
+ case s: Json.Str =>
+ try return UnsafeNumbers.bigInteger_(new FastStringReader(s.value), true, Lexer.NumberMaxBits)
+ catch {
+ case _: UnexpectedEnd | UnsafeNumbers.UnsafeNumber =>
+ }
+ case _ =>
+ }
+ Lexer.error(s"expected a $NumberMaxBits-bit BigInteger", trace)
+ }
+ }
+ implicit val scalaBigInt: JsonDecoder[BigInt] = new JsonDecoder[BigInt] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): BigInt =
+ if (in.nextNonWhitespace() != '"') {
+ in.retract()
+ Lexer.bigInt(trace, in)
+ } else {
+ val a = Lexer.bigInt(trace, in)
+ val c = in.readChar()
+ if (c != '"') Lexer.error("'\"'", c, trace)
+ a
+ }
+
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): BigInt = {
+ json match {
+ case n: Json.Num =>
+ try return BigInt(n.value.toBigIntegerExact)
+ catch {
+ case _: ArithmeticException =>
+ }
+ case s: Json.Str =>
+ try return UnsafeNumbers.bigInt_(new FastStringReader(s.value), true, Lexer.NumberMaxBits)
+ catch {
+ case _: UnexpectedEnd | UnsafeNumbers.UnsafeNumber =>
+ }
+ case _ =>
+ }
+ Lexer.error(s"expected a $NumberMaxBits-bit BigInt", trace)
}
+ }
+ implicit val float: JsonDecoder[Float] = new JsonDecoder[Float] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): Float =
+ if (in.nextNonWhitespace() != '"') {
+ in.retract()
+ Lexer.float(trace, in)
+ } else {
+ val a = Lexer.float(trace, in)
+ val c = in.readChar()
+ if (c != '"') Lexer.error("'\"'", c, trace)
+ a
+ }
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): Float = {
+ json match {
+ case n: Json.Num =>
+ return n.value.floatValue
+ case s: Json.Str =>
+ try return UnsafeNumbers.float_(new FastStringReader(s.value), true, Lexer.NumberMaxBits)
+ catch {
+ case _: UnexpectedEnd | UnsafeNumbers.UnsafeNumber =>
+ }
+ case _ =>
+ }
+ Lexer.error("expected a Float", trace)
+ }
+ }
+ implicit val double: JsonDecoder[Double] = new JsonDecoder[Double] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): Double =
+ if (in.nextNonWhitespace() != '"') {
+ in.retract()
+ Lexer.double(trace, in)
+ } else {
+ val a = Lexer.double(trace, in)
+ val c = in.readChar()
+ if (c != '"') Lexer.error("'\"'", c, trace)
+ a
+ }
+
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): Double = {
+ json match {
+ case n: Json.Num =>
+ return n.value.doubleValue
+ case s: Json.Str =>
+ try return UnsafeNumbers.double_(new FastStringReader(s.value), true, Lexer.NumberMaxBits)
+ catch {
+ case _: UnexpectedEnd | UnsafeNumbers.UnsafeNumber =>
+ }
+ case _ =>
+ }
+ Lexer.error("expected a Double", trace)
+ }
+ }
+ implicit val bigDecimal: JsonDecoder[java.math.BigDecimal] = new JsonDecoder[java.math.BigDecimal] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): java.math.BigDecimal =
+ if (in.nextNonWhitespace() != '"') {
+ in.retract()
+ Lexer.bigDecimal(trace, in)
+ } else {
+ val a = Lexer.bigDecimal(trace, in)
+ val c = in.readChar()
+ if (c != '"') Lexer.error("'\"'", c, trace)
+ a
+ }
+
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): java.math.BigDecimal = {
+ json match {
+ case n: Json.Num =>
+ return n.value
+ case s: Json.Str =>
+ try return UnsafeNumbers.bigDecimal_(new FastStringReader(s.value), true, Lexer.NumberMaxBits)
+ catch {
+ case _: UnexpectedEnd | UnsafeNumbers.UnsafeNumber =>
+ }
+ case _ =>
+ }
+ Lexer.error(s"expected a BigDecimal with $NumberMaxBits-bit mantissa", trace)
+ }
+ }
+ implicit val scalaBigDecimal: JsonDecoder[BigDecimal] = new JsonDecoder[BigDecimal] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): BigDecimal =
+ if (in.nextNonWhitespace() != '"') {
+ in.retract()
+ Lexer.bigDecimal(trace, in)
+ } else {
+ val a = Lexer.bigDecimal(trace, in)
+ val c = in.readChar()
+ if (c != '"') Lexer.error("'\"'", c, trace)
+ a
+ }
+
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): BigDecimal = {
+ json match {
+ case n: Json.Num =>
+ return new BigDecimal(n.value, BigDecimal.defaultMathContext)
+ case s: Json.Str =>
+ try {
+ val bd = UnsafeNumbers.bigDecimal_(new FastStringReader(s.value), true, Lexer.NumberMaxBits)
+ return new BigDecimal(bd, BigDecimal.defaultMathContext)
+ } catch {
+ case _: UnexpectedEnd | UnsafeNumbers.UnsafeNumber =>
+ }
+ case _ =>
+ }
+ Lexer.error(s"expected a BigDecimal with $NumberMaxBits-bit mantissa", trace)
+ }
+ }
// Option treats empty and null values as Nothing and passes values to the decoder.
//
// If alternative behaviour is desired, e.g. pass null to the underlying, then
// use a newtype wrapper.
- implicit def option[A](implicit A: JsonDecoder[A]): JsonDecoder[Option[A]] =
- new JsonDecoder[Option[A]] { self =>
- private[this] val ull: Array[Char] = "ull".toCharArray
- override def unsafeDecodeMissing(trace: List[JsonError]): Option[A] =
- Option.empty
+ implicit def option[A](implicit A: JsonDecoder[A]): JsonDecoder[Option[A]] =
+ new OptionJsonDecoder[Option[A]] {
+ override def unsafeDecodeMissing(trace: List[JsonError]): Option[A] = None
def unsafeDecode(trace: List[JsonError], in: RetractReader): Option[A] =
- (in.nextNonWhitespace(): @switch) match {
- case 'n' =>
- Lexer.readChars(trace, in, ull, "null")
- None
- case _ =>
- in.retract()
- Some(A.unsafeDecode(trace, in))
- }
-
- override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): Option[A] =
- json match {
- case Json.Null => None
- case _ => Some(A.unsafeFromJsonAST(trace, json))
- }
+ if (in.nextNonWhitespace() != 'n') {
+ in.retract()
+ new Some(A.unsafeDecode(trace, in))
+ } else if (in.readChar() == 'u' && in.readChar() == 'l' && in.readChar() == 'l') None
+ else Lexer.error("expected 'null'", trace)
+
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): Option[A] =
+ if (json ne Json.Null) new Some(A.unsafeFromJsonAST(trace, json))
+ else None
}
// supports multiple representations for compatibility with other libraries,
// but does not support the "discriminator field" encoding with a field named
// "value" used by some libraries.
- implicit def either[A, B](implicit
- A: JsonDecoder[A],
- B: JsonDecoder[B]
- ): JsonDecoder[Either[A, B]] =
+ implicit def either[A, B](implicit A: JsonDecoder[A], B: JsonDecoder[B]): JsonDecoder[Either[A, B]] =
new JsonDecoder[Either[A, B]] {
-
- val names: Array[String] =
- Array("a", "Left", "left", "b", "Right", "right")
- val matrix: StringMatrix = new StringMatrix(names)
- val spans: Array[JsonError] = names.map(JsonError.ObjectAccess)
-
- def unsafeDecode(
- trace: List[JsonError],
- in: RetractReader
- ): Either[A, B] = {
- Lexer.char(trace, in, '{')
-
- val values: Array[Any] = Array.ofDim(2)
-
+ private[this] val names = Array("a", "Left", "left", "b", "Right", "right")
+ private[this] val matrix = new StringMatrix(names)
+ private[this] val spans = names.map(new JsonError.ObjectAccess(_))
+
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): Either[A, B] = {
+ val c = in.nextNonWhitespace()
+ if (c != '{') Lexer.error("'{'", c, trace)
+ var left: Any = null
+ var right: Any = null
if (Lexer.firstField(trace, in))
while ({
- {
- val field = Lexer.field(trace, in, matrix)
- if (field == -1) Lexer.skipValue(trace, in)
- else {
- val trace_ = spans(field) :: trace
- if (field < 3) {
- if (values(0) != null)
- throw UnsafeJson(JsonError.Message("duplicate") :: trace_)
- values(0) = A.unsafeDecode(trace_, in)
- } else {
- if (values(1) != null)
- throw UnsafeJson(JsonError.Message("duplicate") :: trace_)
- values(1) = B.unsafeDecode(trace_, in)
- }
+ val field = Lexer.field(trace, in, matrix)
+ if (field == -1) Lexer.skipValue(trace, in)
+ else {
+ val trace_ = spans(field) :: trace
+ if (field < 3) {
+ if (left != null) Lexer.error("duplicate", trace_)
+ left = A.unsafeDecode(trace_, in)
+ } else {
+ if (right != null) Lexer.error("duplicate", trace_)
+ right = B.unsafeDecode(trace_, in)
}
- }; Lexer.nextField(trace, in)
+ }
+ Lexer.nextField(trace, in)
}) ()
-
- if (values(0) == null && values(1) == null)
- throw UnsafeJson(JsonError.Message("missing fields") :: trace)
- if (values(0) != null && values(1) != null)
- throw UnsafeJson(
- JsonError.Message("ambiguous either, zip present") :: trace
- )
- if (values(0) != null)
- Left(values(0).asInstanceOf[A])
- else Right(values(1).asInstanceOf[B])
+ if (left == null && right == null) Lexer.error("missing fields", trace)
+ if (left != null && right != null) Lexer.error("ambiguous either, zip present", trace)
+ if (left != null) new Left(left.asInstanceOf[A])
+ else new Right(right.asInstanceOf[B])
}
}
- private[json] def builder[A, T[_]](
+ @inline private[json] def builder[A, T[_]](
trace: List[JsonError],
in: RetractReader,
builder: mutable.Builder[A, T[A]]
)(implicit A: JsonDecoder[A]): T[A] = {
- Lexer.char(trace, in, '[')
- var i: Int = 0
- if (Lexer.firstArrayElement(in)) while ({
- {
- val trace_ = JsonError.ArrayAccess(i) :: trace
- builder += A.unsafeDecode(trace_, in)
+ val c = in.nextNonWhitespace()
+ if (c == '[') {
+ var i = 0
+ if (Lexer.firstArrayElement(in)) while ({
+ builder += A.unsafeDecode(new JsonError.ArrayAccess(i) :: trace, in)
i += 1
- }; Lexer.nextArrayElement(trace, in)
- }) ()
- builder.result()
+ Lexer.nextArrayElement(trace, in)
+ }) ()
+ return builder.result()
+ }
+ Lexer.error("'['", c, trace)
}
- private[json] def keyValueBuilder[K, V, T[X, Y] <: Iterable[(X, Y)]](
+ @inline private[json] def keyValueBuilder[K, V, T[X, Y] <: Iterable[(X, Y)]](
trace: List[JsonError],
in: RetractReader,
builder: mutable.Builder[(K, V), T[K, V]]
)(implicit K: JsonFieldDecoder[K], V: JsonDecoder[V]): T[K, V] = {
- Lexer.char(trace, in, '{')
- if (Lexer.firstField(trace, in))
- while ({
- {
+ var c = in.nextNonWhitespace()
+ if (c == '{') {
+ if (Lexer.firstField(trace, in))
+ while ({
val field = Lexer.string(trace, in).toString
- val trace_ = JsonError.ObjectAccess(field) :: trace
- Lexer.char(trace_, in, ':')
+ val trace_ = new JsonError.ObjectAccess(field) :: trace
+ c = in.nextNonWhitespace()
+ if (c != ':') Lexer.error("':'", c, trace)
val value = V.unsafeDecode(trace_, in)
builder += ((K.unsafeDecodeField(trace_, field), value))
- }; Lexer.nextField(trace, in)
- }) ()
- builder.result()
+ Lexer.nextField(trace, in)
+ }) ()
+ return builder.result()
+ }
+ Lexer.error("'{'", c, trace)
}
- // use this instead of `string.mapOrFail` in supertypes (to prevent class initialization error at runtime)
+ // FIXME: remove in the next major version
private[json] def mapStringOrFail[A](f: String => Either[String, A]): JsonDecoder[A] =
new JsonDecoder[A] {
def unsafeDecode(trace: List[JsonError], in: RetractReader): A =
f(string.unsafeDecode(trace, in)) match {
- case Left(err) => throw UnsafeJson(JsonError.Message(err) :: trace)
case Right(value) => value
+ case Left(err) => Lexer.error(err, trace)
}
override def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
f(string.unsafeFromJsonAST(trace, json)) match {
- case Left(err) => throw UnsafeJson(JsonError.Message(err) :: trace)
case Right(value) => value
+ case Left(err) => Lexer.error(err, trace)
}
}
}
+private[json] trait CollectionJsonDecoder[A] extends JsonDecoder[A]
+private[json] trait OptionJsonDecoder[A] extends JsonDecoder[A]
+private[json] trait MappedJsonDecoder[A] extends JsonDecoder[A] {
+ private[json] def underlying: JsonDecoder[_]
+}
+
private[json] trait DecoderLowPriority1 extends DecoderLowPriority2 {
this: JsonDecoder.type =>
- implicit def array[A: JsonDecoder: reflect.ClassTag]: JsonDecoder[Array[A]] = new JsonDecoder[Array[A]] {
+ implicit def array[A](implicit A: JsonDecoder[A], classTag: reflect.ClassTag[A]): JsonDecoder[Array[A]] =
+ new CollectionJsonDecoder[Array[A]] {
+ override def unsafeDecodeMissing(trace: List[JsonError]): Array[A] = Array.empty
+
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): Array[A] = {
+ val c = in.nextNonWhitespace()
+ if (c == '[') {
+ if (Lexer.firstArrayElement(in)) {
+ var l = 8
+ var x = new Array[A](l)
+ var i = 0
+ while ({
+ if (i == l) {
+ l <<= 1
+ val x1 = new Array[A](l)
+ System.arraycopy(x, 0, x1, 0, i)
+ x = x1
+ }
+ x(i) = A.unsafeDecode(new JsonError.ArrayAccess(i) :: trace, in)
+ i += 1
+ Lexer.nextArrayElement(trace, in)
+ }) ()
+ if (i == l) return x
+ val x1 = new Array[A](i)
+ System.arraycopy(x, 0, x1, 0, i)
+ return x1
+ } else return Array.empty
+ }
+ Lexer.error("'['", c, trace)
+ }
+ }
- def unsafeDecode(trace: List[JsonError], in: RetractReader): Array[A] =
- builder(trace, in, Array.newBuilder[A])
- }
+ implicit def seq[A: JsonDecoder]: JsonDecoder[Seq[A]] =
+ new CollectionJsonDecoder[Seq[A]] {
+ override def unsafeDecodeMissing(trace: List[JsonError]): Seq[A] = Seq.empty
- implicit def seq[A: JsonDecoder]: JsonDecoder[Seq[A]] = new JsonDecoder[Seq[A]] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): Seq[A] =
+ builder(trace, in, immutable.Seq.newBuilder[A])
+ }
- def unsafeDecode(trace: List[JsonError], in: RetractReader): Seq[A] =
- builder(trace, in, immutable.Seq.newBuilder[A])
- }
+ implicit def chunk[A: JsonDecoder]: JsonDecoder[Chunk[A]] =
+ new CollectionJsonDecoder[Chunk[A]] {
+ private[this] val decoder = JsonDecoder[A]
- implicit def chunk[A: JsonDecoder]: JsonDecoder[Chunk[A]] = new JsonDecoder[Chunk[A]] {
- val decoder = JsonDecoder[A]
- def unsafeDecode(trace: List[JsonError], in: RetractReader): Chunk[A] =
- builder(trace, in, zio.ChunkBuilder.make[A]())
+ override def unsafeDecodeMissing(trace: List[JsonError]): Chunk[A] = Chunk.empty
- override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): Chunk[A] =
- json match {
- case Json.Arr(elements) =>
- elements.zipWithIndex.map { case (json, i) =>
- decoder.unsafeFromJsonAST(JsonError.ArrayAccess(i) :: trace, json)
- }
- case _ => throw UnsafeJson(JsonError.Message("Not an array") :: trace)
- }
- }
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): Chunk[A] =
+ builder(trace, in, zio.ChunkBuilder.make[A]())
+
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): Chunk[A] =
+ json match {
+ case a: Json.Arr =>
+ a.elements.map {
+ var i = 0
+ json =>
+ val span = new JsonError.ArrayAccess(i)
+ i += 1
+ decoder.unsafeFromJsonAST(span :: trace, json)
+ }
+ case _ => Lexer.error("Not an array", trace)
+ }
+ }
implicit def nonEmptyChunk[A: JsonDecoder]: JsonDecoder[NonEmptyChunk[A]] =
chunk[A].mapOrFail(NonEmptyChunk.fromChunk(_).toRight("Chunk was empty"))
implicit def indexedSeq[A: JsonDecoder]: JsonDecoder[IndexedSeq[A]] =
- new JsonDecoder[IndexedSeq[A]] {
+ new CollectionJsonDecoder[IndexedSeq[A]] {
+ override def unsafeDecodeMissing(trace: List[JsonError]): IndexedSeq[A] = IndexedSeq.empty
def unsafeDecode(trace: List[JsonError], in: RetractReader): IndexedSeq[A] =
builder(trace, in, IndexedSeq.newBuilder[A])
}
implicit def linearSeq[A: JsonDecoder]: JsonDecoder[immutable.LinearSeq[A]] =
- new JsonDecoder[immutable.LinearSeq[A]] {
+ new CollectionJsonDecoder[immutable.LinearSeq[A]] {
+ override def unsafeDecodeMissing(trace: List[JsonError]): immutable.LinearSeq[A] = immutable.LinearSeq.empty
def unsafeDecode(trace: List[JsonError], in: RetractReader): LinearSeq[A] =
builder(trace, in, immutable.LinearSeq.newBuilder[A])
}
- implicit def listSet[A: JsonDecoder]: JsonDecoder[immutable.ListSet[A]] = new JsonDecoder[immutable.ListSet[A]] {
+ implicit def listSet[A: JsonDecoder]: JsonDecoder[immutable.ListSet[A]] =
+ new CollectionJsonDecoder[immutable.ListSet[A]] {
+ override def unsafeDecodeMissing(trace: List[JsonError]): immutable.ListSet[A] = immutable.ListSet.empty
- def unsafeDecode(trace: List[JsonError], in: RetractReader): ListSet[A] =
- builder(trace, in, immutable.ListSet.newBuilder[A])
- }
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): ListSet[A] =
+ builder(trace, in, immutable.ListSet.newBuilder[A])
+ }
implicit def treeSet[A: JsonDecoder: Ordering]: JsonDecoder[immutable.TreeSet[A]] =
- new JsonDecoder[immutable.TreeSet[A]] {
+ new CollectionJsonDecoder[immutable.TreeSet[A]] {
+ override def unsafeDecodeMissing(trace: List[JsonError]): immutable.TreeSet[A] = immutable.TreeSet.empty
def unsafeDecode(trace: List[JsonError], in: RetractReader): TreeSet[A] =
builder(trace, in, immutable.TreeSet.newBuilder[A])
}
- implicit def list[A: JsonDecoder]: JsonDecoder[List[A]] = new JsonDecoder[List[A]] {
+ implicit def list[A: JsonDecoder]: JsonDecoder[List[A]] =
+ new CollectionJsonDecoder[List[A]] {
+ override def unsafeDecodeMissing(trace: List[JsonError]): List[A] = List.empty
- def unsafeDecode(trace: List[JsonError], in: RetractReader): List[A] =
- builder(trace, in, new mutable.ListBuffer[A])
- }
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): List[A] =
+ builder(trace, in, new mutable.ListBuffer[A])
+ }
- implicit def vector[A: JsonDecoder]: JsonDecoder[Vector[A]] = new JsonDecoder[Vector[A]] {
+ implicit def vector[A: JsonDecoder]: JsonDecoder[Vector[A]] =
+ new CollectionJsonDecoder[Vector[A]] {
+ override def unsafeDecodeMissing(trace: List[JsonError]): Vector[A] = Vector.empty
- def unsafeDecode(trace: List[JsonError], in: RetractReader): Vector[A] =
- builder(trace, in, immutable.Vector.newBuilder[A])
- }
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): Vector[A] =
+ builder(trace, in, immutable.Vector.newBuilder[A])
+ }
- implicit def set[A: JsonDecoder]: JsonDecoder[Set[A]] = new JsonDecoder[Set[A]] {
+ implicit def set[A: JsonDecoder]: JsonDecoder[Set[A]] =
+ new CollectionJsonDecoder[Set[A]] {
+ override def unsafeDecodeMissing(trace: List[JsonError]): Set[A] = Set.empty
- def unsafeDecode(trace: List[JsonError], in: RetractReader): Set[A] =
- builder(trace, in, Set.newBuilder[A])
- }
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): Set[A] =
+ builder(trace, in, Set.newBuilder[A])
+ }
- implicit def hashSet[A: JsonDecoder]: JsonDecoder[immutable.HashSet[A]] = new JsonDecoder[immutable.HashSet[A]] {
+ implicit def hashSet[A: JsonDecoder]: JsonDecoder[immutable.HashSet[A]] =
+ new CollectionJsonDecoder[immutable.HashSet[A]] {
+ override def unsafeDecodeMissing(trace: List[JsonError]): immutable.HashSet[A] = immutable.HashSet.empty
- def unsafeDecode(trace: List[JsonError], in: RetractReader): immutable.HashSet[A] =
- builder(trace, in, immutable.HashSet.newBuilder[A])
- }
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): immutable.HashSet[A] =
+ builder(trace, in, immutable.HashSet.newBuilder[A])
+ }
implicit def map[K: JsonFieldDecoder, V: JsonDecoder]: JsonDecoder[Map[K, V]] =
- new JsonDecoder[Map[K, V]] {
+ new CollectionJsonDecoder[Map[K, V]] {
+ override def unsafeDecodeMissing(trace: List[JsonError]): Map[K, V] = Map.empty
def unsafeDecode(trace: List[JsonError], in: RetractReader): Map[K, V] =
keyValueBuilder(trace, in, Map.newBuilder[K, V])
}
implicit def hashMap[K: JsonFieldDecoder, V: JsonDecoder]: JsonDecoder[immutable.HashMap[K, V]] =
- new JsonDecoder[immutable.HashMap[K, V]] {
+ new CollectionJsonDecoder[immutable.HashMap[K, V]] {
+ override def unsafeDecodeMissing(trace: List[JsonError]): immutable.HashMap[K, V] = immutable.HashMap.empty
def unsafeDecode(trace: List[JsonError], in: RetractReader): immutable.HashMap[K, V] =
keyValueBuilder(trace, in, immutable.HashMap.newBuilder[K, V])
}
implicit def mutableMap[K: JsonFieldDecoder, V: JsonDecoder]: JsonDecoder[mutable.Map[K, V]] =
- new JsonDecoder[mutable.Map[K, V]] {
+ new CollectionJsonDecoder[mutable.Map[K, V]] {
+ override def unsafeDecodeMissing(trace: List[JsonError]): mutable.Map[K, V] = mutable.Map.empty
def unsafeDecode(trace: List[JsonError], in: RetractReader): mutable.Map[K, V] =
keyValueBuilder(trace, in, mutable.Map.newBuilder[K, V])
}
implicit def sortedSet[A: Ordering: JsonDecoder]: JsonDecoder[immutable.SortedSet[A]] =
- new JsonDecoder[immutable.SortedSet[A]] {
+ new CollectionJsonDecoder[immutable.SortedSet[A]] {
+ override def unsafeDecodeMissing(trace: List[JsonError]): immutable.SortedSet[A] = immutable.SortedSet.empty
def unsafeDecode(trace: List[JsonError], in: RetractReader): immutable.SortedSet[A] =
builder(trace, in, immutable.SortedSet.newBuilder[A])
}
implicit def sortedMap[K: JsonFieldDecoder: Ordering, V: JsonDecoder]: JsonDecoder[collection.SortedMap[K, V]] =
- new JsonDecoder[collection.SortedMap[K, V]] {
+ new CollectionJsonDecoder[collection.SortedMap[K, V]] {
+ override def unsafeDecodeMissing(trace: List[JsonError]): collection.SortedMap[K, V] = collection.SortedMap.empty
def unsafeDecode(trace: List[JsonError], in: RetractReader): collection.SortedMap[K, V] =
keyValueBuilder(trace, in, collection.SortedMap.newBuilder[K, V])
}
+
+ implicit def listMap[K: JsonFieldDecoder, V: JsonDecoder]: JsonDecoder[immutable.ListMap[K, V]] =
+ new CollectionJsonDecoder[immutable.ListMap[K, V]] {
+ override def unsafeDecodeMissing(trace: List[JsonError]): immutable.ListMap[K, V] = immutable.ListMap.empty
+
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): immutable.ListMap[K, V] =
+ keyValueBuilder(trace, in, immutable.ListMap.newBuilder[K, V])
+ }
}
// We have a hierarchy of implicits for two reasons:
@@ -613,18 +905,18 @@ private[json] trait DecoderLowPriority1 extends DecoderLowPriority2 {
private[json] trait DecoderLowPriority2 extends DecoderLowPriority3 {
this: JsonDecoder.type =>
- implicit def iterable[A: JsonDecoder]: JsonDecoder[Iterable[A]] = new JsonDecoder[Iterable[A]] {
+ implicit def iterable[A: JsonDecoder]: JsonDecoder[Iterable[A]] =
+ new CollectionJsonDecoder[Iterable[A]] {
+ override def unsafeDecodeMissing(trace: List[JsonError]): Iterable[A] = Iterable.empty
- def unsafeDecode(trace: List[JsonError], in: RetractReader): Iterable[A] =
- builder(trace, in, immutable.Iterable.newBuilder[A])
- }
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): Iterable[A] =
+ builder(trace, in, immutable.Iterable.newBuilder[A])
+ }
// not implicit because this overlaps with decoders for lists of tuples
- def keyValueChunk[K, A](implicit
- K: JsonFieldDecoder[K],
- A: JsonDecoder[A]
- ): JsonDecoder[Chunk[(K, A)]] =
- new JsonDecoder[Chunk[(K, A)]] {
+ def keyValueChunk[K, A](implicit K: JsonFieldDecoder[K], A: JsonDecoder[A]): JsonDecoder[Chunk[(K, A)]] =
+ new CollectionJsonDecoder[Chunk[(K, A)]] {
+ override def unsafeDecodeMissing(trace: List[JsonError]): Chunk[(K, A)] = Chunk.empty
def unsafeDecode(trace: List[JsonError], in: RetractReader): Chunk[(K, A)] =
keyValueBuilder[K, A, ({ type lambda[X, Y] = Chunk[(X, Y)] })#lambda](
@@ -633,72 +925,310 @@ private[json] trait DecoderLowPriority2 extends DecoderLowPriority3 {
zio.ChunkBuilder.make[(K, A)]()
)
}
-
}
private[json] trait DecoderLowPriority3 extends DecoderLowPriority4 {
this: JsonDecoder.type =>
- import java.time.{ DateTimeException, _ }
- import java.time.format.DateTimeParseException
- import java.time.zone.ZoneRulesException
+ import java.time._
- implicit val dayOfWeek: JsonDecoder[DayOfWeek] = mapStringOrFail(s => parseJavaTime(DayOfWeek.valueOf, s.toUpperCase))
- implicit val duration: JsonDecoder[Duration] = mapStringOrFail(parseJavaTime(parsers.unsafeParseDuration, _))
- implicit val instant: JsonDecoder[Instant] = mapStringOrFail(parseJavaTime(parsers.unsafeParseInstant, _))
- implicit val localDate: JsonDecoder[LocalDate] = mapStringOrFail(parseJavaTime(parsers.unsafeParseLocalDate, _))
+ implicit val dayOfWeek: JsonDecoder[DayOfWeek] = new JsonDecoder[DayOfWeek] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): DayOfWeek = Lexer.dayOfWeek(trace, in)
- implicit val localDateTime: JsonDecoder[LocalDateTime] =
- mapStringOrFail(parseJavaTime(parsers.unsafeParseLocalDateTime, _))
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): DayOfWeek = {
+ json match {
+ case s: Json.Str =>
+ try return DayOfWeek.valueOf(s.value.toUpperCase)
+ catch {
+ case _: IllegalArgumentException =>
+ }
+ case _ =>
+ }
+ Lexer.error("expected a DayOfWeek", trace)
+ }
+ }
+ implicit val duration: JsonDecoder[Duration] = new JsonDecoder[Duration] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): Duration = Lexer.duration(trace, in)
- implicit val localTime: JsonDecoder[LocalTime] = mapStringOrFail(parseJavaTime(parsers.unsafeParseLocalTime, _))
- implicit val month: JsonDecoder[Month] = mapStringOrFail(s => parseJavaTime(Month.valueOf, s.toUpperCase))
- implicit val monthDay: JsonDecoder[MonthDay] = mapStringOrFail(parseJavaTime(parsers.unsafeParseMonthDay, _))
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): Duration = {
+ json match {
+ case s: Json.Str =>
+ try return parsers.unsafeParseDuration(s.value)
+ catch {
+ case _: DateTimeException =>
+ }
+ case _ =>
+ }
+ Lexer.error("expected a Duration", trace)
+ }
+ }
+ implicit val instant: JsonDecoder[Instant] = new JsonDecoder[Instant] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): Instant = Lexer.instant(trace, in)
- implicit val offsetDateTime: JsonDecoder[OffsetDateTime] =
- mapStringOrFail(parseJavaTime(parsers.unsafeParseOffsetDateTime, _))
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): Instant = {
+ json match {
+ case s: Json.Str =>
+ try return parsers.unsafeParseInstant(s.value)
+ catch {
+ case _: DateTimeException =>
+ }
+ case _ =>
+ }
+ Lexer.error("expected an Instant", trace)
+ }
+ }
+ implicit val localDate: JsonDecoder[LocalDate] = new JsonDecoder[LocalDate] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): LocalDate = Lexer.localDate(trace, in)
- implicit val offsetTime: JsonDecoder[OffsetTime] = mapStringOrFail(parseJavaTime(parsers.unsafeParseOffsetTime, _))
- implicit val period: JsonDecoder[Period] = mapStringOrFail(parseJavaTime(parsers.unsafeParsePeriod, _))
- implicit val year: JsonDecoder[Year] = mapStringOrFail(parseJavaTime(parsers.unsafeParseYear, _))
- implicit val yearMonth: JsonDecoder[YearMonth] = mapStringOrFail(parseJavaTime(parsers.unsafeParseYearMonth, _))
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): LocalDate = {
+ json match {
+ case s: Json.Str =>
+ try return parsers.unsafeParseLocalDate(s.value)
+ catch {
+ case _: DateTimeException =>
+ }
+ case _ =>
+ }
+ Lexer.error("expected a LocalDate", trace)
+ }
+ }
+ implicit val localDateTime: JsonDecoder[LocalDateTime] = new JsonDecoder[LocalDateTime] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): LocalDateTime = Lexer.localDateTime(trace, in)
- implicit val zonedDateTime: JsonDecoder[ZonedDateTime] =
- mapStringOrFail(parseJavaTime(parsers.unsafeParseZonedDateTime, _))
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): LocalDateTime = {
+ json match {
+ case s: Json.Str =>
+ try return parsers.unsafeParseLocalDateTime(s.value)
+ catch {
+ case _: DateTimeException =>
+ }
+ case _ =>
+ }
+ Lexer.error("expected a LocalDateTime", trace)
+ }
+ }
+ implicit val localTime: JsonDecoder[LocalTime] = new JsonDecoder[LocalTime] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): LocalTime = Lexer.localTime(trace, in)
- implicit val zoneId: JsonDecoder[ZoneId] = mapStringOrFail(parseJavaTime(parsers.unsafeParseZoneId, _))
- implicit val zoneOffset: JsonDecoder[ZoneOffset] = mapStringOrFail(parseJavaTime(parsers.unsafeParseZoneOffset, _))
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): LocalTime = {
+ json match {
+ case s: Json.Str =>
+ try return parsers.unsafeParseLocalTime(s.value)
+ catch {
+ case _: DateTimeException =>
+ }
+ case _ =>
+ }
+ Lexer.error("expected a LocalTime", trace)
+ }
+ }
+ implicit val month: JsonDecoder[Month] = new JsonDecoder[Month] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): Month = Lexer.month(trace, in)
- // Commonized handling for decoding from string to java.time Class
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): Month = {
+ json match {
+ case s: Json.Str =>
+ try return Month.valueOf(s.value.toUpperCase)
+ catch {
+ case _: IllegalArgumentException =>
+ }
+ case _ =>
+ }
+ Lexer.error("expected a Month", trace)
+ }
+ }
+ implicit val monthDay: JsonDecoder[MonthDay] = new JsonDecoder[MonthDay] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): MonthDay = Lexer.monthDay(trace, in)
+
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): MonthDay = {
+ json match {
+ case s: Json.Str =>
+ try return parsers.unsafeParseMonthDay(s.value)
+ catch {
+ case _: DateTimeException =>
+ }
+ case _ =>
+ }
+ Lexer.error("expected a MonthDay", trace)
+ }
+ }
+ implicit val offsetDateTime: JsonDecoder[OffsetDateTime] = new JsonDecoder[OffsetDateTime] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): OffsetDateTime = Lexer.offsetDateTime(trace, in)
+
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): OffsetDateTime = {
+ json match {
+ case s: Json.Str =>
+ try return parsers.unsafeParseOffsetDateTime(s.value)
+ catch {
+ case _: DateTimeException =>
+ }
+ case _ =>
+ }
+ Lexer.error("expected an OffsetDateTime", trace)
+ }
+ }
+ implicit val offsetTime: JsonDecoder[OffsetTime] = new JsonDecoder[OffsetTime] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): OffsetTime = Lexer.offsetTime(trace, in)
+
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): OffsetTime = {
+ json match {
+ case s: Json.Str =>
+ try return parsers.unsafeParseOffsetTime(s.value)
+ catch {
+ case _: DateTimeException =>
+ }
+ case _ =>
+ }
+ Lexer.error("expected an OffsetTime", trace)
+ }
+ }
+ implicit val period: JsonDecoder[Period] = new JsonDecoder[Period] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): Period = Lexer.period(trace, in)
+
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): Period = {
+ json match {
+ case s: Json.Str =>
+ try return parsers.unsafeParsePeriod(s.value)
+ catch {
+ case _: DateTimeException =>
+ }
+ case _ =>
+ }
+ Lexer.error("expected a Period", trace)
+ }
+ }
+ implicit val year: JsonDecoder[Year] = new JsonDecoder[Year] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): Year = Lexer.year(trace, in)
+
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): Year = {
+ json match {
+ case s: Json.Str =>
+ try return parsers.unsafeParseYear(s.value)
+ catch {
+ case _: DateTimeException =>
+ }
+ case _ =>
+ }
+ Lexer.error("expected a Year", trace)
+ }
+ }
+ implicit val yearMonth: JsonDecoder[YearMonth] = new JsonDecoder[YearMonth] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): YearMonth = Lexer.yearMonth(trace, in)
+
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): YearMonth = {
+ json match {
+ case s: Json.Str =>
+ try return parsers.unsafeParseYearMonth(s.value)
+ catch {
+ case _: DateTimeException =>
+ }
+ case _ =>
+ }
+ Lexer.error("expected a YearMonth", trace)
+ }
+ }
+ implicit val zonedDateTime: JsonDecoder[ZonedDateTime] = new JsonDecoder[ZonedDateTime] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): ZonedDateTime = Lexer.zonedDateTime(trace, in)
+
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): ZonedDateTime = {
+ json match {
+ case s: Json.Str =>
+ try return parsers.unsafeParseZonedDateTime(s.value)
+ catch {
+ case _: DateTimeException =>
+ }
+ case _ =>
+ }
+ Lexer.error("expected a ZonedDateTime", trace)
+ }
+ }
+ implicit val zoneId: JsonDecoder[ZoneId] = new JsonDecoder[ZoneId] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): ZoneId =
+ try parsers.unsafeParseZoneId(Lexer.string(trace, in).toString)
+ catch {
+ case _: DateTimeException => Lexer.error("expected a ZoneId", trace)
+ }
+
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): ZoneId = {
+ json match {
+ case s: Json.Str =>
+ try return parsers.unsafeParseZoneId(s.value)
+ catch {
+ case _: DateTimeException =>
+ }
+ case _ =>
+ }
+ Lexer.error("expected a ZoneId", trace)
+ }
+ }
+ implicit val zoneOffset: JsonDecoder[ZoneOffset] = new JsonDecoder[ZoneOffset] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): ZoneOffset = Lexer.zoneOffset(trace, in)
+
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): ZoneOffset = {
+ json match {
+ case s: Json.Str =>
+ try return parsers.unsafeParseZoneOffset(s.value)
+ catch {
+ case _: DateTimeException =>
+ }
+ case _ =>
+ }
+ Lexer.error("expected a ZoneOffset", trace)
+ }
+ }
+
+ // FIXME: remove in the next major version
private[json] def parseJavaTime[A](f: String => A, s: String): Either[String, A] =
- try {
- Right(f(s))
- } catch {
- case zre: ZoneRulesException => Left(s"$s is not a valid ISO-8601 format, ${zre.getMessage}")
- case dtpe: DateTimeParseException => Left(s"$s is not a valid ISO-8601 format, ${dtpe.getMessage}")
- case dte: DateTimeException => Left(s"$s is not a valid ISO-8601 format, ${dte.getMessage}")
- case ex: Exception => Left(ex.getMessage)
+ try new Right(f(s))
+ catch {
+ case ex: DateTimeException =>
+ new Left(s"${strip(s)} is not a valid ISO-8601 format, ${ex.getMessage}")
+ case _: IllegalArgumentException =>
+ new Left(s"${strip(s)} is not a valid ISO-8601 format")
}
- implicit val uuid: JsonDecoder[UUID] =
- mapStringOrFail { str =>
- try {
- Right(UUIDParser.unsafeParse(str))
- } catch {
- case iae: IllegalArgumentException => Left(s"Invalid UUID: ${iae.getMessage}")
+ implicit val uuid: JsonDecoder[UUID] = new JsonDecoder[UUID] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): UUID = Lexer.uuid(trace, in)
+
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): UUID = {
+ json match {
+ case s: Json.Str =>
+ try return UUIDParser.unsafeParse(s.value)
+ catch {
+ case _: IllegalArgumentException =>
+ }
+ case _ =>
}
+ Lexer.error("expected a UUID", trace)
}
+ }
+
+ implicit val currency: JsonDecoder[java.util.Currency] = new JsonDecoder[java.util.Currency] {
+ def unsafeDecode(trace: List[JsonError], in: RetractReader): java.util.Currency =
+ try java.util.Currency.getInstance(Lexer.string(trace, in).toString)
+ catch {
+ case _: IllegalArgumentException => Lexer.error("expected a Currency", trace)
+ }
- implicit val currency: JsonDecoder[java.util.Currency] =
- mapStringOrFail { str =>
- try {
- Right(java.util.Currency.getInstance(str))
- } catch {
- case iae: IllegalArgumentException => Left(s"Invalid Currency: ${iae.getMessage}")
+ override def unsafeFromJsonAST(trace: List[JsonError], json: Json): java.util.Currency = {
+ json match {
+ case s: Json.Str =>
+ try return java.util.Currency.getInstance(s.value)
+ catch {
+ case _: IllegalArgumentException =>
+ }
+ case _ =>
}
+ Lexer.error("expected a Currency", trace)
}
+ }
+
+ // FIXME: remove in the next major version
+ @noinline private[json] def strip(s: String, len: Int = 50): String =
+ if (s.length <= len) s
+ else s.substring(0, len) + "..."
}
-private[json] trait DecoderLowPriority4 {
- implicit def fromCodec[A](implicit codec: JsonCodec[A]): JsonDecoder[A] = codec.decoder
+private[json] trait DecoderLowPriority4 extends DecoderLowPriorityVersionSpecific {
+ @inline implicit def fromCodec[A](implicit codec: JsonCodec[A]): JsonDecoder[A] = codec.decoder
}
diff --git a/zio-json/shared/src/main/scala/zio/json/JsonEncoder.scala b/zio-json/shared/src/main/scala/zio/json/JsonEncoder.scala
index d118674de..55744aa7b 100644
--- a/zio-json/shared/src/main/scala/zio/json/JsonEncoder.scala
+++ b/zio-json/shared/src/main/scala/zio/json/JsonEncoder.scala
@@ -19,50 +19,45 @@ import zio.json.ast.Json
import zio.json.internal.{ FastStringWrite, SafeNumbers, Write }
import zio.json.javatime.serializers
import zio.{ Chunk, NonEmptyChunk }
-
import java.util.UUID
import scala.annotation._
import scala.collection.{ immutable, mutable }
-import scala.reflect.ClassTag
trait JsonEncoder[A] extends JsonEncoderPlatformSpecific[A] {
self =>
/**
- * Returns a new encoder, with a new input type, which can be transformed to the old input type
- * by the specified user-defined function.
+ * Returns a new encoder, with a new input type, which can be transformed to the old input type by the specified
+ * user-defined function.
*/
final def contramap[B](f: B => A): JsonEncoder[B] = new JsonEncoder[B] {
-
override def unsafeEncode(b: B, indent: Option[Int], out: Write): Unit =
self.unsafeEncode(f(b), indent, out)
override def isNothing(b: B): Boolean = self.isNothing(f(b))
- override final def toJsonAST(b: B): Either[String, Json] =
- self.toJsonAST(f(b))
+ override def isEmpty(b: B): Boolean = self.isEmpty(f(b))
+
+ override def toJsonAST(b: B): Either[String, Json] = self.toJsonAST(f(b))
}
/**
- * Returns a new encoder that can accepts an `Either[A, B]` to either, and uses either this
- * encoder or the specified encoder to encode the two different types of values.
+ * Returns a new encoder that can accepts an `Either[A, B]` to either, and uses either this encoder or the specified
+ * encoder to encode the two different types of values.
*/
final def either[B](that: => JsonEncoder[B]): JsonEncoder[Either[A, B]] = JsonEncoder.either[A, B](self, that)
/**
- * Returns a new encoder that can accepts an `Either[A, B]` to either, and uses either this
- * encoder or the specified encoder to encode the two different types of values.
- * The difference with the classic `either` encoder is that the resulting JSON has no field
- * `Left` or `Right`.
- * What should be: `{"Right": "John Doe"}` is encoded as `"John Doe"`
+ * Returns a new encoder that can accepts an `Either[A, B]` to either, and uses either this encoder or the specified
+ * encoder to encode the two different types of values. The difference with the classic `either` encoder is that the
+ * resulting JSON has no field `Left` or `Right`. What should be: `{"Right": "John Doe"}` is encoded as `"John Doe"`
*/
final def orElseEither[B](that: => JsonEncoder[B]): JsonEncoder[Either[A, B]] =
JsonEncoder.orElseEither[A, B](self, that)
/**
- * Returns a new encoder with a new input type, which can be transformed to either the input
- * type of this encoder, or the input type of the specified encoder, using the user-defined
- * transformation function.
+ * Returns a new encoder with a new input type, which can be transformed to either the input type of this encoder, or
+ * the input type of the specified encoder, using the user-defined transformation function.
*/
final def eitherWith[B, C](that: => JsonEncoder[B])(f: C => Either[A, B]): JsonEncoder[C] =
self.either(that).contramap(f)
@@ -71,18 +66,24 @@ trait JsonEncoder[A] extends JsonEncoderPlatformSpecific[A] {
* Encodes the specified value into a JSON string, with the specified indentation level.
*/
final def encodeJson(a: A, indent: Option[Int] = None): CharSequence = {
- val writer = new FastStringWrite(64)
- unsafeEncode(a, indent, writer)
- writer.buffer
+ val writePool = JsonEncoder.writePools.get
+ try {
+ val write = writePool.acquire()
+ unsafeEncode(a, indent, write)
+ write.toString
+ } finally writePool.release()
}
/**
- * This default may be overridden when this value may be missing within a JSON object and still
- * be encoded.
+ * This default may be overridden when this value may be missing within a JSON object and still be encoded.
*/
- @nowarn("msg=is never used")
def isNothing(a: A): Boolean = false
+ /**
+ * This default may be overridden when this value may be empty within a JSON object and still be encoded.
+ */
+ def isEmpty(a: A): Boolean = false
+
/**
* Returns this encoder but narrowed to the its given sub-type
*/
@@ -93,82 +94,131 @@ trait JsonEncoder[A] extends JsonEncoderPlatformSpecific[A] {
/**
* Converts a value to a Json AST
*
- * The default implementation encodes the value to a Json byte stream and
- * uses decode to parse that back to an AST. Override to provide a more performant
- * implementation.
+ * The default implementation encodes the value to a Json byte stream and uses decode to parse that back to an AST.
+ * Override to provide a more performant implementation.
*/
def toJsonAST(a: A): Either[String, Json] = Json.decoder.decodeJson(encodeJson(a, None))
/**
- * Returns a new encoder that is capable of encoding a tuple containing the values of this
- * encoder and the specified encoder.
+ * Returns a new encoder that is capable of encoding a tuple containing the values of this encoder and the specified
+ * encoder.
*/
final def zip[B](that: => JsonEncoder[B]): JsonEncoder[(A, B)] = JsonEncoder.tuple2(self, that)
/**
- * Returns a new encoder that is capable of encoding a user-defined value, which is create from
- * a tuple of the values of this encoder and the specified encoder, from the specified user-
- * defined function.
+ * Returns a new encoder that is capable of encoding a user-defined value, which is create from a tuple of the values
+ * of this encoder and the specified encoder, from the specified user- defined function.
*/
final def zipWith[B, C](that: => JsonEncoder[B])(f: C => (A, B)): JsonEncoder[C] = self.zip(that).contramap(f)
}
object JsonEncoder extends GeneratedTupleEncoders with EncoderLowPriority1 with JsonEncoderVersionSpecific {
- def apply[A](implicit a: JsonEncoder[A]): JsonEncoder[A] = a
+ private class FastStringWritePool {
+ private[this] var weakRef: java.lang.ref.WeakReference[Array[FastStringWrite]] =
+ new java.lang.ref.WeakReference(Array(new FastStringWrite(64)))
+ private[this] var level: Int = 0
+
+ def acquire(): FastStringWrite = {
+ var writes = weakRef.get
+ var level = this.level
+ if (writes eq null) { // the reference was collected by GC
+ level = 0
+ writes = new Array(0)
+ }
+ if (level == writes.length) { // exceding the deepest level of recusion
+ writes = java.util.Arrays.copyOf(writes, level + 1)
+ writes(level) = new FastStringWrite(64)
+ weakRef = new java.lang.ref.WeakReference(writes)
+ }
+ val write = writes(level)
+ this.level = level + 1 // increase the level of recusrion
+ write.reset()
+ write
+ }
- implicit val string: JsonEncoder[String] = new JsonEncoder[String] {
+ def release(): Unit = if (level > 0) level -= 1 // decrease the level of recusrion
+ }
+
+ private val writePools = new ThreadLocal[FastStringWritePool] {
+ override def initialValue(): FastStringWritePool = new FastStringWritePool
+ }
+
+ @inline def apply[A](implicit a: JsonEncoder[A]): JsonEncoder[A] = a
+ implicit val string: JsonEncoder[String] = new JsonEncoder[String] {
override def unsafeEncode(a: String, indent: Option[Int], out: Write): Unit = {
out.write('"')
+ val len = a.length
var i = 0
+ while (i < len) {
+ val c = a.charAt(i)
+ i += 1
+ if (c == '"' || c == '\\' || c < ' ') {
+ writeEncoded(a, out)
+ return
+ }
+ }
+ out.write(a)
+ out.write('"')
+ }
+
+ @inline override def toJsonAST(a: String): Either[String, Json] = new Right(new Json.Str(a))
+
+ private[this] def writeEncoded(a: String, out: Write): Unit = {
val len = a.length
+ var i = 0
while (i < len) {
(a.charAt(i): @switch) match {
- case '"' => out.write("\\\"")
- case '\\' => out.write("\\\\")
- case '\b' => out.write("\\b")
- case '\f' => out.write("\\f")
- case '\n' => out.write("\\n")
- case '\r' => out.write("\\r")
- case '\t' => out.write("\\t")
- case c =>
- if (c < ' ') out.write("\\u%04x".format(c.toInt))
- else out.write(c)
+ case '"' => out.write('\\', '"')
+ case '\\' => out.write('\\', '\\')
+ case '\b' => out.write('\\', 'b')
+ case '\f' => out.write('\\', 'f')
+ case '\n' => out.write('\\', 'n')
+ case '\r' => out.write('\\', 'r')
+ case '\t' => out.write('\\', 't')
+ case c =>
+ if (c >= ' ') out.write(c)
+ else {
+ out.write('\\', 'u')
+ SafeNumbers.writeHex(c, out)
+ }
}
i += 1
}
out.write('"')
}
-
- override final def toJsonAST(a: String): Either[String, Json] =
- Right(Json.Str(a))
}
implicit val char: JsonEncoder[Char] = new JsonEncoder[Char] {
-
- override def unsafeEncode(a: Char, indent: Option[Int], out: Write): Unit = {
- out.write('"')
+ override def unsafeEncode(a: Char, indent: Option[Int], out: Write): Unit =
(a: @switch) match {
- case '"' => out.write("\\\"")
- case '\\' => out.write("\\\\")
- case c =>
- if (c < ' ') out.write("\\u%04x".format(c.toInt))
- else out.write(c)
+ case '"' => out.write('"', '\\', '"', '"')
+ case '\\' => out.write('"', '\\', '\\', '"')
+ case '\b' => out.write('"', '\\', 'b', '"')
+ case '\f' => out.write('"', '\\', 'f', '"')
+ case '\n' => out.write('"', '\\', 'n', '"')
+ case '\r' => out.write('"', '\\', 'r', '"')
+ case '\t' => out.write('"', '\\', 't', '"')
+ case c =>
+ if (c >= ' ') out.write('"', c, '"')
+ else {
+ out.write('"', '\\', 'u')
+ SafeNumbers.writeHex(c, out)
+ out.write('"')
+ }
}
- out.write('"')
- }
- override final def toJsonAST(a: Char): Either[String, Json] =
- Right(Json.Str(a.toString))
+ override def toJsonAST(a: Char): Either[String, Json] = new Right(new Json.Str(a.toString))
}
+ // FIXME: remove in the next major version
private[json] def explicit[A](f: A => String, g: A => Json): JsonEncoder[A] = new JsonEncoder[A] {
def unsafeEncode(a: A, indent: Option[Int], out: Write): Unit = out.write(f(a))
- override final def toJsonAST(a: A): Either[String, Json] =
- Right(g(a))
+ override def toJsonAST(a: A): Either[String, Json] = new Right(g(a))
}
+ // FIXME: remove in the next major version
private[json] def stringify[A](f: A => String): JsonEncoder[A] = new JsonEncoder[A] {
def unsafeEncode(a: A, indent: Option[Int], out: Write): Unit = {
out.write('"')
@@ -176,10 +226,10 @@ object JsonEncoder extends GeneratedTupleEncoders with EncoderLowPriority1 with
out.write('"')
}
- override final def toJsonAST(a: A): Either[String, Json] =
- Right(Json.Str(f(a)))
+ override def toJsonAST(a: A): Either[String, Json] = new Right(new Json.Str(f(a)))
}
+ // FIXME: add tests
def suspend[A](encoder0: => JsonEncoder[A]): JsonEncoder[A] =
new JsonEncoder[A] {
lazy val encoder = encoder0
@@ -188,61 +238,102 @@ object JsonEncoder extends GeneratedTupleEncoders with EncoderLowPriority1 with
override def isNothing(a: A): Boolean = encoder.isNothing(a)
+ override def isEmpty(a: A): Boolean = encoder.isEmpty(a)
+
override def toJsonAST(a: A): Either[String, Json] = encoder.toJsonAST(a)
}
- implicit val boolean: JsonEncoder[Boolean] = explicit(_.toString, Json.Bool.apply)
- implicit val symbol: JsonEncoder[Symbol] = string.contramap(_.name)
- implicit val byte: JsonEncoder[Byte] = explicit(_.toString, n => Json.Num(n))
- implicit val short: JsonEncoder[Short] = explicit(_.toString, n => Json.Num(n))
- implicit val int: JsonEncoder[Int] = explicit(_.toString, n => Json.Num(n))
- implicit val long: JsonEncoder[Long] = explicit(_.toString, n => Json.Num(n))
- implicit val bigInteger: JsonEncoder[java.math.BigInteger] =
- explicit(_.toString, n => Json.Num(new java.math.BigDecimal(n)))
- implicit val scalaBigInt: JsonEncoder[BigInt] =
- explicit(_.toString, n => Json.Num(new java.math.BigDecimal(n.bigInteger)))
- implicit val double: JsonEncoder[Double] =
- explicit(SafeNumbers.toString, n => Json.Num(n))
- implicit val float: JsonEncoder[Float] =
- explicit(SafeNumbers.toString, n => Json.Num(n))
- implicit val bigDecimal: JsonEncoder[java.math.BigDecimal] = explicit(_.toString, Json.Num.apply)
- implicit val scalaBigDecimal: JsonEncoder[BigDecimal] = explicit(_.toString, n => Json.Num(n.bigDecimal))
+ implicit val boolean: JsonEncoder[Boolean] = new JsonEncoder[Boolean] {
+ def unsafeEncode(a: Boolean, indent: Option[Int], out: Write): Unit =
+ if (a) out.write('t', 'r', 'u', 'e')
+ else out.write('f', 'a', 'l', 's', 'e')
- implicit def option[A](implicit A: JsonEncoder[A]): JsonEncoder[Option[A]] = new JsonEncoder[Option[A]] {
+ override def toJsonAST(a: Boolean): Either[String, Json] = new Right(Json.Bool(a))
+ }
+ implicit val symbol: JsonEncoder[Symbol] = string.contramap(_.name)
+ implicit val byte: JsonEncoder[Byte] = new JsonEncoder[Byte] {
+ def unsafeEncode(a: Byte, indent: Option[Int], out: Write): Unit = SafeNumbers.write(a.toInt, out)
- def unsafeEncode(oa: Option[A], indent: Option[Int], out: Write): Unit = oa match {
- case None => out.write("null")
- case Some(a) => A.unsafeEncode(a, indent, out)
- }
+ override def toJsonAST(a: Byte): Either[String, Json] = new Right(Json.Num(a.toInt))
+ }
+ implicit val short: JsonEncoder[Short] = new JsonEncoder[Short] {
+ def unsafeEncode(a: Short, indent: Option[Int], out: Write): Unit = SafeNumbers.write(a.toInt, out)
- override def isNothing(oa: Option[A]): Boolean =
- oa match {
- case None => true
- case Some(a) => A.isNothing(a)
- }
+ override def toJsonAST(a: Short): Either[String, Json] = new Right(Json.Num(a.toInt))
+ }
+ implicit val int: JsonEncoder[Int] = new JsonEncoder[Int] {
+ def unsafeEncode(a: Int, indent: Option[Int], out: Write): Unit = SafeNumbers.write(a, out)
- override final def toJsonAST(oa: Option[A]): Either[String, Json] =
- oa match {
- case None => Right(Json.Null)
- case Some(a) => A.toJsonAST(a)
- }
+ override def toJsonAST(a: Int): Either[String, Json] = new Right(Json.Num(a))
+ }
+ implicit val long: JsonEncoder[Long] = new JsonEncoder[Long] {
+ def unsafeEncode(a: Long, indent: Option[Int], out: Write): Unit = SafeNumbers.write(a, out)
+
+ override def toJsonAST(a: Long): Either[String, Json] = new Right(Json.Num(a))
+ }
+ implicit val bigInteger: JsonEncoder[java.math.BigInteger] = new JsonEncoder[java.math.BigInteger] {
+ def unsafeEncode(a: java.math.BigInteger, indent: Option[Int], out: Write): Unit = SafeNumbers.write(a, out)
+
+ override def toJsonAST(a: java.math.BigInteger): Either[String, Json] = new Right(Json.Num(a))
+ }
+ implicit val scalaBigInt: JsonEncoder[BigInt] = new JsonEncoder[BigInt] {
+ def unsafeEncode(a: BigInt, indent: Option[Int], out: Write): Unit =
+ if (a.isValidLong) SafeNumbers.write(a.longValue, out)
+ else SafeNumbers.write(a.bigInteger, out)
+
+ override def toJsonAST(a: BigInt): Either[String, Json] = new Right(Json.Num(a))
+ }
+ implicit val double: JsonEncoder[Double] = new JsonEncoder[Double] {
+ def unsafeEncode(a: Double, indent: Option[Int], out: Write): Unit = SafeNumbers.write(a, out)
+
+ override def toJsonAST(a: Double): Either[String, Json] = new Right(Json.Num(a))
+ }
+ implicit val float: JsonEncoder[Float] = new JsonEncoder[Float] {
+ def unsafeEncode(a: Float, indent: Option[Int], out: Write): Unit = SafeNumbers.write(a, out)
+
+ override def toJsonAST(a: Float): Either[String, Json] = new Right(Json.Num(a))
+ }
+ implicit val bigDecimal: JsonEncoder[java.math.BigDecimal] = new JsonEncoder[java.math.BigDecimal] {
+ def unsafeEncode(a: java.math.BigDecimal, indent: Option[Int], out: Write): Unit = SafeNumbers.write(a, out)
+
+ override def toJsonAST(a: java.math.BigDecimal): Either[String, Json] = new Right(new Json.Num(a))
+ }
+ implicit val scalaBigDecimal: JsonEncoder[BigDecimal] = new JsonEncoder[BigDecimal] {
+ def unsafeEncode(a: BigDecimal, indent: Option[Int], out: Write): Unit = SafeNumbers.write(a.bigDecimal, out)
+
+ override def toJsonAST(a: BigDecimal): Either[String, Json] = new Right(new Json.Num(a.bigDecimal))
}
- def bump(indent: Option[Int]): Option[Int] = indent match {
- case None => None
- case Some(i) => Some(i + 1)
+ implicit def option[A](implicit A: JsonEncoder[A]): JsonEncoder[Option[A]] = new JsonEncoder[Option[A]] {
+ def unsafeEncode(oa: Option[A], indent: Option[Int], out: Write): Unit =
+ if (oa eq None) out.write('n', 'u', 'l', 'l')
+ else A.unsafeEncode(oa.get, indent, out)
+
+ override def isNothing(oa: Option[A]): Boolean = (oa eq None) || A.isNothing(oa.get)
+
+ override def toJsonAST(oa: Option[A]): Either[String, Json] =
+ if (oa eq None) new Right(Json.Null)
+ else A.toJsonAST(oa.get)
}
- def pad(indent: Option[Int], out: Write): Unit = indent match {
- case None => ()
- case Some(n) =>
+ def bump(indent: Option[Int]): Option[Int] =
+ if (indent ne None) new Some(indent.get + 1)
+ else indent
+
+ def pad(indent: Option[Int], out: Write): Unit =
+ if (indent ne None) {
out.write('\n')
- var i = n
+ var i = indent.get
+ val ws = 8224: Short
+ while (i > 4) {
+ out.write(ws, ws, ws, ws)
+ i -= 4
+ }
while (i > 0) {
- out.write(" ")
+ out.write(ws)
i -= 1
}
- }
+ }
implicit def either[A, B](implicit A: JsonEncoder[A], B: JsonEncoder[B]): JsonEncoder[Either[A, B]] =
new JsonEncoder[Either[A, B]] {
@@ -277,7 +368,7 @@ object JsonEncoder extends GeneratedTupleEncoders with EncoderLowPriority1 with
pad(indent, out)
}
- override final def toJsonAST(eab: Either[A, B]): Either[String, Json] =
+ override def toJsonAST(eab: Either[A, B]): Either[String, Json] =
eab match {
case Left(a) => A.toJsonAST(a).map(v => Json.Obj(Chunk.single("Left" -> v)))
case Right(b) => B.toJsonAST(b).map(v => Json.Obj(Chunk.single("Right" -> v)))
@@ -292,15 +383,18 @@ object JsonEncoder extends GeneratedTupleEncoders with EncoderLowPriority1 with
case Right(b) => B.unsafeEncode(b, indent, out)
}
}
+
}
private[json] trait EncoderLowPriority1 extends EncoderLowPriority2 {
this: JsonEncoder.type =>
- implicit def array[A](implicit A: JsonEncoder[A], classTag: ClassTag[A]): JsonEncoder[Array[A]] =
+ implicit def array[A](implicit A: JsonEncoder[A], classTag: scala.reflect.ClassTag[A]): JsonEncoder[Array[A]] =
new JsonEncoder[Array[A]] {
+ override def isEmpty(as: Array[A]): Boolean = as.isEmpty
+
def unsafeEncode(as: Array[A], indent: Option[Int], out: Write): Unit =
- if (as.isEmpty) out.write("[]")
+ if (as.isEmpty) out.write('[', ']')
else {
out.write('[')
if (indent.isDefined) unsafeEncodePadded(as, indent, out)
@@ -320,26 +414,32 @@ private[json] trait EncoderLowPriority1 extends EncoderLowPriority2 {
private[this] def unsafeEncodePadded(as: Array[A], indent: Option[Int], out: Write): Unit = {
val indent_ = bump(indent)
- pad(indent_, out)
- val len = as.length
- var i = 0
+ val len = as.length
+ var i = 0
while (i < len) {
- if (i != 0) {
- out.write(',')
- pad(indent_, out)
- }
+ if (i != 0) out.write(',')
+ pad(indent_, out)
A.unsafeEncode(as(i), indent_, out)
i += 1
}
pad(indent, out)
}
- override final def toJsonAST(as: Array[A]): Either[String, Json] =
- as.map(A.toJsonAST)
- .foldLeft[Either[String, Chunk[Json]]](Right(Chunk.empty)) { (s, i) =>
- s.flatMap(chunk => i.map(item => chunk :+ item))
+ override def toJsonAST(as: Array[A]): Either[String, Json] = {
+ val len = as.length
+ val buf = new Array[Json](len)
+ var i = 0
+ while (i < len) {
+ A.toJsonAST(as(i)) match {
+ case Right(json) =>
+ buf(i) = json
+ i += 1
+ case left =>
+ return left
}
- .map(Json.Arr(_))
+ }
+ new Right(Json.Arr(Chunk.fromArray(buf)))
+ }
}
implicit def seq[A: JsonEncoder]: JsonEncoder[Seq[A]] = iterable[A, Seq]
@@ -356,7 +456,57 @@ private[json] trait EncoderLowPriority1 extends EncoderLowPriority2 {
implicit def treeSet[A: JsonEncoder]: JsonEncoder[immutable.TreeSet[A]] = iterable[A, immutable.TreeSet]
- implicit def list[A: JsonEncoder]: JsonEncoder[List[A]] = iterable[A, List]
+ implicit def list[A](implicit A: JsonEncoder[A]): JsonEncoder[List[A]] =
+ new JsonEncoder[List[A]] {
+ override def isEmpty(as: List[A]): Boolean = as eq Nil
+
+ def unsafeEncode(as: List[A], indent: Option[Int], out: Write): Unit =
+ if (as eq Nil) out.write('[', ']')
+ else {
+ out.write('[')
+ if (indent.isDefined) unsafeEncodePadded(as, indent, out)
+ else unsafeEncodeCompact(as, indent, out)
+ out.write(']')
+ }
+
+ private[this] def unsafeEncodeCompact(as: List[A], indent: Option[Int], out: Write): Unit = {
+ var as_ = as
+ while (as_ ne Nil) {
+ if (as_ ne as) out.write(',')
+ A.unsafeEncode(as_.head, indent, out)
+ as_ = as_.tail
+ }
+ }
+
+ private[this] def unsafeEncodePadded(as: List[A], indent: Option[Int], out: Write): Unit = {
+ val indent_ = bump(indent)
+ var as_ = as
+ while (as_ ne Nil) {
+ if (as_ ne as) out.write(',')
+ pad(indent_, out)
+ A.unsafeEncode(as_.head, indent_, out)
+ as_ = as_.tail
+ }
+ pad(indent, out)
+ }
+
+ override def toJsonAST(as: List[A]): Either[String, Json] = {
+ var as_ = as
+ val buf = new Array[Json](as_.size)
+ var i = 0
+ while (as_ ne Nil) {
+ A.toJsonAST(as_.head) match {
+ case Right(json) =>
+ as_ = as_.tail
+ buf(i) = json
+ i += 1
+ case left =>
+ return left
+ }
+ }
+ new Right(Json.Arr(Chunk.fromArray(buf)))
+ }
+ }
implicit def vector[A: JsonEncoder]: JsonEncoder[Vector[A]] = iterable[A, Vector]
@@ -367,8 +517,7 @@ private[json] trait EncoderLowPriority1 extends EncoderLowPriority2 {
implicit def sortedSet[A: Ordering: JsonEncoder]: JsonEncoder[immutable.SortedSet[A]] =
iterable[A, immutable.SortedSet]
- implicit def map[K: JsonFieldEncoder, V: JsonEncoder]: JsonEncoder[Map[K, V]] =
- keyValueIterable[K, V, Map]
+ implicit def map[K: JsonFieldEncoder, V: JsonEncoder]: JsonEncoder[Map[K, V]] = keyValueIterable[K, V, Map]
implicit def hashMap[K: JsonFieldEncoder, V: JsonEncoder]: JsonEncoder[immutable.HashMap[K, V]] =
keyValueIterable[K, V, immutable.HashMap]
@@ -378,6 +527,9 @@ private[json] trait EncoderLowPriority1 extends EncoderLowPriority2 {
implicit def sortedMap[K: JsonFieldEncoder, V: JsonEncoder]: JsonEncoder[collection.SortedMap[K, V]] =
keyValueIterable[K, V, collection.SortedMap]
+
+ implicit def listMap[K: JsonFieldEncoder, V: JsonEncoder]: JsonEncoder[immutable.ListMap[K, V]] =
+ keyValueIterable[K, V, immutable.ListMap]
}
private[json] trait EncoderLowPriority2 extends EncoderLowPriority3 {
@@ -385,8 +537,10 @@ private[json] trait EncoderLowPriority2 extends EncoderLowPriority3 {
implicit def iterable[A, T[X] <: Iterable[X]](implicit A: JsonEncoder[A]): JsonEncoder[T[A]] =
new JsonEncoder[T[A]] {
+ override def isEmpty(as: T[A]): Boolean = as.isEmpty
+
def unsafeEncode(as: T[A], indent: Option[Int], out: Write): Unit =
- if (as.isEmpty) out.write("[]")
+ if (as.isEmpty) out.write('[', ']')
else {
out.write('[')
if (indent.isDefined) unsafeEncodePadded(as, indent, out)
@@ -396,35 +550,41 @@ private[json] trait EncoderLowPriority2 extends EncoderLowPriority3 {
private[this] def unsafeEncodeCompact(as: T[A], indent: Option[Int], out: Write): Unit =
as.foreach {
- var first = true
+ var comma = false
a =>
- if (first) first = false
- else out.write(',')
+ if (comma) out.write(',')
+ else comma = true
A.unsafeEncode(a, indent, out)
}
private[this] def unsafeEncodePadded(as: T[A], indent: Option[Int], out: Write): Unit = {
val indent_ = bump(indent)
- pad(indent_, out)
as.foreach {
- var first = true
+ var comma = false
a =>
- if (first) first = false
- else {
- out.write(',')
- pad(indent_, out)
- }
+ if (comma) out.write(',')
+ else comma = true
+ pad(indent_, out)
A.unsafeEncode(a, indent_, out)
}
pad(indent, out)
}
- override final def toJsonAST(as: T[A]): Either[String, Json] =
- as.map(A.toJsonAST)
- .foldLeft[Either[String, Chunk[Json]]](Right(Chunk.empty)) { (s, i) =>
- s.flatMap(chunk => i.map(item => chunk :+ item))
+ override def toJsonAST(as: T[A]): Either[String, Json] = {
+ val it = as.iterator
+ val buf = new Array[Json](as.size)
+ var i = 0
+ while (it.hasNext) {
+ A.toJsonAST(it.next()) match {
+ case Right(json) =>
+ buf(i) = json
+ i += 1
+ case left =>
+ return left
}
- .map(Json.Arr(_))
+ }
+ new Right(Json.Arr(Chunk.fromArray(buf)))
+ }
}
// not implicit because this overlaps with encoders for lists of tuples
@@ -432,8 +592,10 @@ private[json] trait EncoderLowPriority2 extends EncoderLowPriority3 {
K: JsonFieldEncoder[K],
A: JsonEncoder[A]
): JsonEncoder[T[K, A]] = new JsonEncoder[T[K, A]] {
+ override def isEmpty(a: T[K, A]): Boolean = a.isEmpty
+
def unsafeEncode(kvs: T[K, A], indent: Option[Int], out: Write): Unit =
- if (kvs.isEmpty) out.write("{}")
+ if (kvs.isEmpty) out.write('{', '}')
else {
out.write('{')
if (indent.isDefined) unsafeEncodePadded(kvs, indent, out)
@@ -443,11 +605,11 @@ private[json] trait EncoderLowPriority2 extends EncoderLowPriority3 {
private[this] def unsafeEncodeCompact(kvs: T[K, A], indent: Option[Int], out: Write): Unit =
kvs.foreach {
- var first = true
+ var comma = false
kv =>
if (!A.isNothing(kv._2)) {
- if (first) first = false
- else out.write(',')
+ if (comma) out.write(',')
+ else comma = true
string.unsafeEncode(K.unsafeEncodeField(kv._1), indent, out)
out.write(':')
A.unsafeEncode(kv._2, indent, out)
@@ -456,16 +618,13 @@ private[json] trait EncoderLowPriority2 extends EncoderLowPriority3 {
private[this] def unsafeEncodePadded(kvs: T[K, A], indent: Option[Int], out: Write): Unit = {
val indent_ = bump(indent)
- pad(indent_, out)
kvs.foreach {
- var first = true
+ var comman = false
kv =>
if (!A.isNothing(kv._2)) {
- if (first) first = false
- else {
- out.write(',')
- pad(indent_, out)
- }
+ if (comman) out.write(',')
+ else comman = true
+ pad(indent_, out)
string.unsafeEncode(K.unsafeEncodeField(kv._1), indent_, out)
out.write(" : ")
A.unsafeEncode(kv._2, indent_, out)
@@ -474,16 +633,25 @@ private[json] trait EncoderLowPriority2 extends EncoderLowPriority3 {
pad(indent, out)
}
- override final def toJsonAST(kvs: T[K, A]): Either[String, Json] =
- kvs
- .foldLeft[Either[String, Chunk[(String, Json)]]](Right(Chunk.empty)) { case (s, (k, v)) =>
- for {
- chunk <- s
- key = K.unsafeEncodeField(k)
- value <- A.toJsonAST(v)
- } yield if (value == Json.Null) chunk else chunk :+ (key -> value)
+ override def toJsonAST(kvs: T[K, A]): Either[String, Json] = {
+ val it = kvs.iterator
+ var buf = new Array[(String, Json)](kvs.size)
+ var i = 0
+ while (it.hasNext) {
+ val kv = it.next()
+ A.toJsonAST(kv._2) match {
+ case Right(json) =>
+ if (json ne Json.Null) {
+ buf(i) = (K.unsafeEncodeField(kv._1), json)
+ i += 1
+ }
+ case left =>
+ return left
}
- .map(Json.Obj(_))
+ }
+ if (i != buf.length) buf = java.util.Arrays.copyOf(buf, i)
+ new Right(Json.Obj(Chunk.fromArray(buf)))
+ }
}
// not implicit because this overlaps with encoders for lists of tuples
@@ -499,28 +667,205 @@ private[json] trait EncoderLowPriority3 extends EncoderLowPriority4 {
import java.time._
- implicit val dayOfWeek: JsonEncoder[DayOfWeek] = stringify(_.toString)
- implicit val duration: JsonEncoder[Duration] = stringify(serializers.toString)
- implicit val instant: JsonEncoder[Instant] = stringify(serializers.toString)
- implicit val localDate: JsonEncoder[LocalDate] = stringify(serializers.toString)
- implicit val localDateTime: JsonEncoder[LocalDateTime] = stringify(serializers.toString)
- implicit val localTime: JsonEncoder[LocalTime] = stringify(serializers.toString)
- implicit val month: JsonEncoder[Month] = stringify(_.toString)
- implicit val monthDay: JsonEncoder[MonthDay] = stringify(serializers.toString)
- implicit val offsetDateTime: JsonEncoder[OffsetDateTime] = stringify(serializers.toString)
- implicit val offsetTime: JsonEncoder[OffsetTime] = stringify(serializers.toString)
- implicit val period: JsonEncoder[Period] = stringify(serializers.toString)
- implicit val year: JsonEncoder[Year] = stringify(serializers.toString)
- implicit val yearMonth: JsonEncoder[YearMonth] = stringify(serializers.toString)
- implicit val zonedDateTime: JsonEncoder[ZonedDateTime] = stringify(serializers.toString)
- implicit val zoneId: JsonEncoder[ZoneId] = stringify(serializers.toString)
- implicit val zoneOffset: JsonEncoder[ZoneOffset] = stringify(serializers.toString)
-
- implicit val uuid: JsonEncoder[UUID] = stringify(_.toString)
-
- implicit val currency: JsonEncoder[java.util.Currency] = stringify(_.toString)
+ implicit val dayOfWeek: JsonEncoder[DayOfWeek] = new JsonEncoder[DayOfWeek] {
+ def unsafeEncode(a: DayOfWeek, indent: Option[Int], out: Write): Unit = {
+ out.write('"')
+ out.write(a.toString)
+ out.write('"')
+ }
+
+ override def toJsonAST(a: DayOfWeek): Either[String, Json] =
+ new Right(new Json.Str(a.toString))
+ }
+
+ implicit val duration: JsonEncoder[Duration] = new JsonEncoder[Duration] {
+ def unsafeEncode(a: Duration, indent: Option[Int], out: Write): Unit = {
+ out.write('"')
+ serializers.write(a, out)
+ out.write('"')
+ }
+
+ override def toJsonAST(a: Duration): Either[String, Json] =
+ new Right(new Json.Str(serializers.toString(a)))
+ }
+
+ implicit val instant: JsonEncoder[Instant] = new JsonEncoder[Instant] {
+ def unsafeEncode(a: Instant, indent: Option[Int], out: Write): Unit = {
+ out.write('"')
+ serializers.write(a, out)
+ out.write('"')
+ }
+
+ override def toJsonAST(a: Instant): Either[String, Json] =
+ new Right(new Json.Str(serializers.toString(a)))
+ }
+
+ implicit val localDate: JsonEncoder[LocalDate] = new JsonEncoder[LocalDate] {
+ def unsafeEncode(a: LocalDate, indent: Option[Int], out: Write): Unit = {
+ out.write('"')
+ serializers.write(a, out)
+ out.write('"')
+ }
+
+ override def toJsonAST(a: LocalDate): Either[String, Json] =
+ new Right(new Json.Str(serializers.toString(a)))
+ }
+
+ implicit val localDateTime: JsonEncoder[LocalDateTime] = new JsonEncoder[LocalDateTime] {
+ def unsafeEncode(a: LocalDateTime, indent: Option[Int], out: Write): Unit = {
+ out.write('"')
+ serializers.write(a, out)
+ out.write('"')
+ }
+
+ override def toJsonAST(a: LocalDateTime): Either[String, Json] =
+ new Right(new Json.Str(serializers.toString(a)))
+ }
+
+ implicit val localTime: JsonEncoder[LocalTime] = new JsonEncoder[LocalTime] {
+ def unsafeEncode(a: LocalTime, indent: Option[Int], out: Write): Unit = {
+ out.write('"')
+ serializers.write(a, out)
+ out.write('"')
+ }
+
+ override def toJsonAST(a: LocalTime): Either[String, Json] =
+ new Right(new Json.Str(serializers.toString(a)))
+ }
+
+ implicit val month: JsonEncoder[Month] = new JsonEncoder[Month] {
+ def unsafeEncode(a: Month, indent: Option[Int], out: Write): Unit = {
+ out.write('"')
+ out.write(a.toString)
+ out.write('"')
+ }
+
+ override def toJsonAST(a: Month): Either[String, Json] =
+ new Right(new Json.Str(a.toString))
+ }
+
+ implicit val monthDay: JsonEncoder[MonthDay] = new JsonEncoder[MonthDay] {
+ def unsafeEncode(a: MonthDay, indent: Option[Int], out: Write): Unit = {
+ out.write('"')
+ serializers.write(a, out)
+ out.write('"')
+ }
+
+ override def toJsonAST(a: MonthDay): Either[String, Json] =
+ new Right(new Json.Str(serializers.toString(a)))
+ }
+
+ implicit val offsetDateTime: JsonEncoder[OffsetDateTime] = new JsonEncoder[OffsetDateTime] {
+ def unsafeEncode(a: OffsetDateTime, indent: Option[Int], out: Write): Unit = {
+ out.write('"')
+ serializers.write(a, out)
+ out.write('"')
+ }
+
+ override def toJsonAST(a: OffsetDateTime): Either[String, Json] =
+ new Right(new Json.Str(serializers.toString(a)))
+ }
+
+ implicit val offsetTime: JsonEncoder[OffsetTime] = new JsonEncoder[OffsetTime] {
+ def unsafeEncode(a: OffsetTime, indent: Option[Int], out: Write): Unit = {
+ out.write('"')
+ serializers.write(a, out)
+ out.write('"')
+ }
+
+ override def toJsonAST(a: OffsetTime): Either[String, Json] =
+ new Right(new Json.Str(serializers.toString(a)))
+ }
+
+ implicit val period: JsonEncoder[Period] = new JsonEncoder[Period] {
+ def unsafeEncode(a: Period, indent: Option[Int], out: Write): Unit = {
+ out.write('"')
+ serializers.write(a, out)
+ out.write('"')
+ }
+
+ override def toJsonAST(a: Period): Either[String, Json] =
+ new Right(new Json.Str(serializers.toString(a)))
+ }
+
+ implicit val year: JsonEncoder[Year] = new JsonEncoder[Year] {
+ def unsafeEncode(a: Year, indent: Option[Int], out: Write): Unit = {
+ out.write('"')
+ serializers.write(a, out)
+ out.write('"')
+ }
+
+ override def toJsonAST(a: Year): Either[String, Json] =
+ new Right(new Json.Str(serializers.toString(a)))
+ }
+
+ implicit val yearMonth: JsonEncoder[YearMonth] = new JsonEncoder[YearMonth] {
+ def unsafeEncode(a: YearMonth, indent: Option[Int], out: Write): Unit = {
+ out.write('"')
+ serializers.write(a, out)
+ out.write('"')
+ }
+
+ override def toJsonAST(a: YearMonth): Either[String, Json] =
+ new Right(new Json.Str(serializers.toString(a)))
+ }
+
+ implicit val zonedDateTime: JsonEncoder[ZonedDateTime] = new JsonEncoder[ZonedDateTime] {
+ def unsafeEncode(a: ZonedDateTime, indent: Option[Int], out: Write): Unit = {
+ out.write('"')
+ serializers.write(a, out)
+ out.write('"')
+ }
+
+ override def toJsonAST(a: ZonedDateTime): Either[String, Json] =
+ new Right(new Json.Str(serializers.toString(a)))
+ }
+
+ implicit val zoneId: JsonEncoder[ZoneId] = new JsonEncoder[ZoneId] {
+ def unsafeEncode(a: ZoneId, indent: Option[Int], out: Write): Unit = {
+ out.write('"')
+ out.write(a.getId)
+ out.write('"')
+ }
+
+ override def toJsonAST(a: ZoneId): Either[String, Json] =
+ new Right(new Json.Str(a.getId))
+ }
+
+ implicit val zoneOffset: JsonEncoder[ZoneOffset] = new JsonEncoder[ZoneOffset] {
+ def unsafeEncode(a: ZoneOffset, indent: Option[Int], out: Write): Unit = {
+ out.write('"')
+ serializers.write(a, out)
+ out.write('"')
+ }
+
+ override def toJsonAST(a: ZoneOffset): Either[String, Json] =
+ new Right(new Json.Str(serializers.toString(a)))
+ }
+
+ implicit val uuid: JsonEncoder[UUID] = new JsonEncoder[UUID] {
+ def unsafeEncode(a: UUID, indent: Option[Int], out: Write): Unit = {
+ out.write('"')
+ SafeNumbers.write(a, out)
+ out.write('"')
+ }
+
+ override def toJsonAST(a: UUID): Either[String, Json] =
+ new Right(new Json.Str(SafeNumbers.toString(a)))
+ }
+
+ implicit val currency: JsonEncoder[java.util.Currency] = new JsonEncoder[java.util.Currency] {
+ def unsafeEncode(a: java.util.Currency, indent: Option[Int], out: Write): Unit = {
+ out.write('"')
+ out.write(a.toString)
+ out.write('"')
+ }
+
+ override def toJsonAST(a: java.util.Currency): Either[String, Json] =
+ new Right(new Json.Str(a.toString))
+ }
}
-private[json] trait EncoderLowPriority4 {
+private[json] trait EncoderLowPriority4 extends EncoderLowPriorityVersionSpecific {
implicit def fromCodec[A](implicit codec: JsonCodec[A]): JsonEncoder[A] = codec.encoder
}
diff --git a/zio-json/shared/src/main/scala/zio/json/JsonError.scala b/zio-json/shared/src/main/scala/zio/json/JsonError.scala
index b9525ad06..3ee4bb9ef 100644
--- a/zio-json/shared/src/main/scala/zio/json/JsonError.scala
+++ b/zio-json/shared/src/main/scala/zio/json/JsonError.scala
@@ -16,20 +16,23 @@
package zio.json
/**
- * A `JsonError` value describes the ways in which decoding could fail. This structure is used
- * to facilitate human-readable error messages during decoding failures.
+ * A `JsonError` value describes the ways in which decoding could fail. This structure is used to facilitate
+ * human-readable error messages during decoding failures.
*/
sealed abstract class JsonError
object JsonError {
-
def render(trace: List[JsonError]): String =
- trace.reverse.map {
- case Message(txt) => s"($txt)"
- case ArrayAccess(i) => s"[$i]"
- case ObjectAccess(field) => s".$field"
- case SumType(cons) => s"{$cons}"
- }.mkString
+ trace
+ .foldRight(new java.lang.StringBuilder) { (err, sb) =>
+ err match {
+ case o: ObjectAccess => sb.append('.').append(o.field)
+ case a: ArrayAccess => sb.append('[').append(a.i).append(']')
+ case s: SumType => sb.append('{').append(s.cons).append('}')
+ case m: Message => sb.append('(').append(m.txt).append(')')
+ }
+ }
+ .toString
final case class Message(txt: String) extends JsonError
@@ -38,5 +41,4 @@ object JsonError {
final case class ObjectAccess(field: String) extends JsonError
final case class SumType(cons: String) extends JsonError
-
}
diff --git a/zio-json/shared/src/main/scala/zio/json/JsonFieldDecoder.scala b/zio-json/shared/src/main/scala/zio/json/JsonFieldDecoder.scala
deleted file mode 100644
index 68fc213cf..000000000
--- a/zio-json/shared/src/main/scala/zio/json/JsonFieldDecoder.scala
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright 2019-2022 John A. De Goes and the ZIO Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package zio.json
-
-/** When decoding a JSON Object, we only allow the keys that implement this interface. */
-trait JsonFieldDecoder[+A] {
- self =>
-
- final def map[B](f: A => B): JsonFieldDecoder[B] =
- new JsonFieldDecoder[B] {
-
- def unsafeDecodeField(trace: List[JsonError], in: String): B =
- f(self.unsafeDecodeField(trace, in))
- }
-
- final def mapOrFail[B](f: A => Either[String, B]): JsonFieldDecoder[B] =
- new JsonFieldDecoder[B] {
-
- def unsafeDecodeField(trace: List[JsonError], in: String): B =
- f(self.unsafeDecodeField(trace, in)) match {
- case Left(err) =>
- throw JsonDecoder.UnsafeJson(JsonError.Message(err) :: trace)
- case Right(b) => b
- }
- }
-
- def unsafeDecodeField(trace: List[JsonError], in: String): A
-}
-
-object JsonFieldDecoder {
- def apply[A](implicit a: JsonFieldDecoder[A]): JsonFieldDecoder[A] = a
-
- implicit val string: JsonFieldDecoder[String] = new JsonFieldDecoder[String] {
- def unsafeDecodeField(trace: List[JsonError], in: String): String = in
- }
-
- implicit val int: JsonFieldDecoder[Int] =
- JsonFieldDecoder[String].mapOrFail { str =>
- try {
- Right(str.toInt)
- } catch {
- case n: NumberFormatException => Left(s"Invalid Int: '$str': $n")
- }
- }
-
- implicit val long: JsonFieldDecoder[Long] =
- JsonFieldDecoder[String].mapOrFail { str =>
- try {
- Right(str.toLong)
- } catch {
- case n: NumberFormatException => Left(s"Invalid Long: '$str': $n")
- }
- }
-}
diff --git a/zio-json/shared/src/main/scala/zio/json/JsonFieldEncoder.scala b/zio-json/shared/src/main/scala/zio/json/JsonFieldEncoder.scala
index 706bac5ae..a5c19eefb 100644
--- a/zio-json/shared/src/main/scala/zio/json/JsonFieldEncoder.scala
+++ b/zio-json/shared/src/main/scala/zio/json/JsonFieldEncoder.scala
@@ -33,9 +33,15 @@ object JsonFieldEncoder {
def unsafeEncodeField(in: String): String = in
}
- implicit val int: JsonFieldEncoder[Int] =
- JsonFieldEncoder[String].contramap(_.toString)
+ implicit val int: JsonFieldEncoder[Int] = new JsonFieldEncoder[Int] {
+ def unsafeEncodeField(in: Int): String = in.toString
+ }
+
+ implicit val long: JsonFieldEncoder[Long] = new JsonFieldEncoder[Long] {
+ def unsafeEncodeField(in: Long): String = in.toString
+ }
- implicit val long: JsonFieldEncoder[Long] =
- JsonFieldEncoder[String].contramap(_.toString)
+ implicit val uuid: JsonFieldEncoder[java.util.UUID] = new JsonFieldEncoder[java.util.UUID] {
+ def unsafeEncodeField(in: java.util.UUID): String = in.toString
+ }
}
diff --git a/zio-json/shared/src/main/scala/zio/json/ast/JsonCursor.scala b/zio-json/shared/src/main/scala/zio/json/ast/JsonCursor.scala
index 1a5e09012..ba470025f 100644
--- a/zio-json/shared/src/main/scala/zio/json/ast/JsonCursor.scala
+++ b/zio-json/shared/src/main/scala/zio/json/ast/JsonCursor.scala
@@ -19,18 +19,20 @@ sealed trait JsonCursor[-From, +To <: Json] { self =>
final def >>>[Next <: Json](that: JsonCursor[To, Next]): JsonCursor[From, Next] =
(that.asInstanceOf[JsonCursor[_ <: Json, _ <: Json]] match {
case JsonCursor.Identity =>
- that
+ self
- case JsonCursor.DownField(oldParent @ _, name) =>
- JsonCursor.DownField(self.asInstanceOf[JsonCursor[Json, Json.Obj]], name)
+ case JsonCursor.DownField(oldParent: JsonCursor[To, Json.Obj], name) =>
+ JsonCursor.DownField(self >>> oldParent, name)
- case JsonCursor.DownElement(oldParent @ _, index) =>
- JsonCursor.DownElement(self.asInstanceOf[JsonCursor[Json, Json.Arr]], index)
+ case JsonCursor.DownElement(oldParent: JsonCursor[To, Json.Arr], index) =>
+ JsonCursor.DownElement(self >>> oldParent, index)
- case JsonCursor.FilterType(oldParent @ _, tpe) =>
- JsonCursor.FilterType(self.asInstanceOf[JsonCursor[Json, Json]], tpe)
+ case JsonCursor.FilterType(oldParent: JsonCursor[To, _], tpe) =>
+ JsonCursor.FilterType(self >>> oldParent, tpe)
}).asInstanceOf[JsonCursor[From, Next]]
+ final def andThen[Next <: Json](that: JsonCursor[To, Next]): JsonCursor[From, Next] = self >>> that
+
final def isArray: JsonCursor[Json, Json.Arr] = filterType(JsonType.Arr)
final def isBool: JsonCursor[Json, Json.Bool] = filterType(JsonType.Bool)
@@ -57,13 +59,6 @@ sealed trait JsonCursor[-From, +To <: Json] { self =>
}
object JsonCursor {
- def element(index: Int): JsonCursor[Json.Arr, Json] = DownElement(Identity.isArray, index)
-
- def field(name: String): JsonCursor[Json.Obj, Json] = DownField(Identity.isObject, name)
-
- def filter[A <: Json](jsonType: JsonType[A]): JsonCursor[Json, A] =
- identity.filterType(jsonType)
-
val identity: JsonCursor[Json, Json] = Identity
val isArray: JsonCursor[Json, Json.Arr] = filter(JsonType.Arr)
@@ -78,12 +73,13 @@ object JsonCursor {
val isString: JsonCursor[Json, Json.Str] = filter(JsonType.Str)
- case object Identity extends JsonCursor[Json, Json]
+ def filter[A <: Json](jsonType: JsonType[A]): JsonCursor[Json, A] = identity.filterType(jsonType)
+ def element(index: Int): JsonCursor[Json.Arr, Json] = DownElement(isArray, index)
+ def field(name: String): JsonCursor[Json.Obj, Json] = DownField(isObject, name)
+ case object Identity extends JsonCursor[Json, Json]
final case class DownField(parent: JsonCursor[_, Json.Obj], name: String) extends JsonCursor[Json.Obj, Json]
-
final case class DownElement(parent: JsonCursor[_, Json.Arr], index: Int) extends JsonCursor[Json.Arr, Json]
-
final case class FilterType[A <: Json](parent: JsonCursor[_, _ <: Json], jsonType: JsonType[A])
extends JsonCursor[Json, A]
}
diff --git a/zio-json/shared/src/main/scala/zio/json/ast/JsonType.scala b/zio-json/shared/src/main/scala/zio/json/ast/JsonType.scala
index 5ce900564..42b240435 100644
--- a/zio-json/shared/src/main/scala/zio/json/ast/JsonType.scala
+++ b/zio-json/shared/src/main/scala/zio/json/ast/JsonType.scala
@@ -23,48 +23,48 @@ object JsonType {
case object Null extends JsonType[Json.Null] {
def get(json: Json): Either[String, Json.Null] =
json match {
- case Json.Null => Right(Json.Null)
- case _ => Left("Expected null but found " + json)
+ case _: Json.Null.type => new Right(Json.Null)
+ case _ => new Left("expected null")
}
}
case object Bool extends JsonType[Json.Bool] {
def get(json: Json): Either[String, Json.Bool] =
json match {
- case x @ Json.Bool(_) => Right(x)
- case _ => Left("Expected boolean but found " + json)
+ case x: Json.Bool => new Right(x)
+ case _ => new Left("expected boolean")
}
}
case object Obj extends JsonType[Json.Obj] {
def get(json: Json): Either[String, Json.Obj] =
json match {
- case x @ Json.Obj(_) => Right(x)
- case _ => Left("Expected object but found " + json)
+ case x: Json.Obj => new Right(x)
+ case _ => new Left("expected object")
}
}
case object Arr extends JsonType[Json.Arr] {
def get(json: Json): Either[String, Json.Arr] =
json match {
- case x @ Json.Arr(_) => Right(x)
- case _ => Left("Expected array but found " + json)
+ case x: Json.Arr => new Right(x)
+ case _ => new Left("expected array")
}
}
case object Str extends JsonType[Json.Str] {
def get(json: Json): Either[String, Json.Str] =
json match {
- case x @ Json.Str(_) => Right(x)
- case _ => Left("Expected string but found " + json)
+ case x: Json.Str => new Right(x)
+ case _ => new Left("expected string")
}
}
case object Num extends JsonType[Json.Num] {
def get(json: Json): Either[String, Json.Num] =
json match {
- case x @ Json.Num(_) => Right(x)
- case _ => Left("Expected number but found " + json)
+ case x: Json.Num => new Right(x)
+ case _ => new Left("expected number")
}
}
}
diff --git a/zio-json/shared/src/main/scala/zio/json/ast/ast.scala b/zio-json/shared/src/main/scala/zio/json/ast/ast.scala
index d251165f6..24aa303bd 100644
--- a/zio-json/shared/src/main/scala/zio/json/ast/ast.scala
+++ b/zio-json/shared/src/main/scala/zio/json/ast/ast.scala
@@ -16,7 +16,7 @@
package zio.json.ast
import zio.Chunk
-import zio.json.JsonDecoder.{ JsonError, UnsafeJson }
+import zio.json.JsonDecoder.JsonError
import zio.json._
import zio.json.ast.Json._
import zio.json.internal._
@@ -24,19 +24,15 @@ import zio.json.internal._
import scala.annotation._
/**
- * This AST of JSON is made available so that arbitrary JSON may be included as
- * part of a business object, it is not used as an intermediate representation,
- * unlike most other JSON libraries. It is not advised to `.map` or `.mapOrFail`
+ * This AST of JSON is made available so that arbitrary JSON may be included as part of a business object, it is not
+ * used as an intermediate representation, unlike most other JSON libraries. It is not advised to `.map` or `.mapOrFail`
* from these decoders, since a higher performance decoder is often available.
*
- * Beware of the potential for DOS attacks, since an attacker can provide much
- * more data than is perhaps needed.
+ * Beware of the potential for DOS attacks, since an attacker can provide much more data than is perhaps needed.
*
- * Also beware of converting `Num` (a `BigDecimal`) into any other kind of
- * number, since many of the stdlib functions are non-total or are known DOS
- * vectors (e.g. calling `.toBigInteger` on a "1e214748364" will consume an
- * excessive amount of heap memory).
- * JsonValue / Json / JValue
+ * Also beware of converting `Num` (a `BigDecimal`) into any other kind of number, since many of the stdlib functions
+ * are non-total or are known DOS vectors (e.g. calling `.toBigInteger` on a "1e214748364" will consume an excessive
+ * amount of heap memory). JsonValue / Json / JValue
*/
sealed abstract class Json { self =>
final def as[A](implicit decoder: JsonDecoder[A]): Either[String, A] = decoder.fromJsonAST(self)
@@ -63,15 +59,17 @@ sealed abstract class Json { self =>
jsonArray: Chunk[Json] => X,
jsonObject: Json.Obj => X
): X = self match {
- case Json.Arr(a) => jsonArray(a)
+ case a: Json.Arr => jsonArray(a.elements)
case o: Json.Obj => jsonObject(o)
case _ => or
}
/**
* Deletes json node specified by given cursor
- * @param cursor Cursor which specifies node to delete
- * @return Json without specified node if node specified by cursor exists, error otherwise
+ * @param cursor
+ * Cursor which specifies node to delete
+ * @return
+ * Json without specified node if node specified by cursor exists, error otherwise
*/
final def delete(cursor: JsonCursor[_, _]): Either[String, Json] = {
val c = cursor.asInstanceOf[JsonCursor[_, Json]]
@@ -151,15 +149,15 @@ sealed abstract class Json { self =>
case JsonCursor.DownField(parent, field) =>
self.get(parent).flatMap { case Obj(fields) =>
- fields.collectFirst { case (key, value) if key == field => Right(value) } match {
+ fields.collectFirst { case kv if kv._1 == field => Right(kv._2) } match {
case Some(x) => x
case None => Left(s"No such field: '$field'")
}
}
case JsonCursor.DownElement(parent, index) =>
- self.get(parent).flatMap { case Arr(elements) =>
- elements.lift(index).map(Right(_)).getOrElse(Left(s"The array does not have index ${index}"))
+ self.get(parent).flatMap { case a: Arr =>
+ a.elements.lift(index).map(Right(_)).getOrElse(Left(s"The array does not have index ${index}"))
}
case JsonCursor.FilterType(parent, t @ jsonType) =>
@@ -169,30 +167,31 @@ sealed abstract class Json { self =>
override final def hashCode: Int =
31 * {
self match {
- case Obj(fields) =>
+ case s: Str => s.value.hashCode
+ case n: Num => n.value.hashCode
+ case b: Bool => b.value.hashCode
+ case o: Obj =>
var result = 0
- fields.foreach(tuple => result = result ^ tuple.hashCode)
+ o.fields.foreach(tuple => result = result ^ tuple.hashCode)
result
- case Arr(elements) =>
+ case a: Arr =>
var result = 0
var index = 0
- elements.foreach { json =>
+ a.elements.foreach { json =>
result = result ^ (index, json).hashCode
index += 1
}
result
- case Bool(value) => value.hashCode
- case Str(value) => value.hashCode
- case Num(value) => value.hashCode
- case Json.Null => 1
+ case _ => 1
}
}
/**
- * Intersects JSON values. If both values are `Obj` or `Arr` method returns intersections of its fields/elements, otherwise
- * it returns error
+ * Intersects JSON values. If both values are `Obj` or `Arr` method returns intersections of its fields/elements,
+ * otherwise it returns error
* @param that
- * @return Intersected json if type are compatible, error otherwise
+ * @return
+ * Intersected json if type are compatible, error otherwise
*/
final def intersect(that: Json): Either[String, Json] =
(self, that) match {
@@ -204,12 +203,10 @@ sealed abstract class Json { self =>
}
/**
- * - merging objects results in a new objects with all pairs of both sides, with the right hand
- * side being used on key conflicts
- *
- * - merging arrays results in all of the individual elements being merged
- *
- * - scalar values will be replaced by the right hand side
+ * - merging objects results in a new objects with all pairs of both sides, with the right hand side being used on
+ * key conflicts
+ * - merging arrays results in all of the individual elements being merged
+ * - scalar values will be replaced by the right hand side
*/
final def merge(that: Json): Json =
(self, that) match {
@@ -221,10 +218,14 @@ sealed abstract class Json { self =>
/**
* Relocates Json node from location specified by `from` cursor to location specified by `to` cursor.
*
- * @param from Cursor which specifies node to relocate
- * @return Json without specified node if node specified by cursor exists, error otherwise
- * @param to Cursor which specifies location where to relocate node
- * @return Json with relocated node if node specified by cursors exist, error otherwise
+ * @param from
+ * Cursor which specifies node to relocate
+ * @return
+ * Json without specified node if node specified by cursor exists, error otherwise
+ * @param to
+ * Cursor which specifies location where to relocate node
+ * @return
+ * Json with relocated node if node specified by cursors exist, error otherwise
*/
final def relocate(from: JsonCursor[_, _], to: JsonCursor[_, _]): Either[String, Json] = {
val f = from.asInstanceOf[JsonCursor[_, Json]]
@@ -234,10 +235,14 @@ sealed abstract class Json { self =>
/**
* Transforms json node specified by given cursor
- * @param cursor Cursor which specifies node to transform
- * @param f Function used to transform node
- * @tparam A refined node type
- * @return Json with transformed node if node specified by cursor exists, error otherwise
+ * @param cursor
+ * Cursor which specifies node to transform
+ * @param f
+ * Function used to transform node
+ * @tparam A
+ * refined node type
+ * @return
+ * Json with transformed node if node specified by cursor exists, error otherwise
*/
final def transformAt[A <: Json](cursor: JsonCursor[_, A])(f: A => Json): Either[String, Json] =
transformOrDelete(cursor, delete = false)(x => Right(f(x)))
@@ -245,9 +250,9 @@ sealed abstract class Json { self =>
final def transformDown(f: Json => Json): Json = {
def loop(json: Json): Json =
f(json) match {
- case Obj(fields) => Obj(fields.map { case (name, value) => (name, loop(value)) })
- case Arr(elements) => Arr(elements.map(loop(_)))
- case json => json
+ case o: Obj => Obj(o.fields.map(kv => (kv._1, loop(kv._2))))
+ case a: Arr => Arr(a.elements.map(loop(_)))
+ case json => json
}
loop(self)
@@ -297,9 +302,9 @@ sealed abstract class Json { self =>
final def transformUp(f: Json => Json): Json = {
def loop(json: Json): Json =
json match {
- case Obj(fields) => f(Obj(fields.map { case (name, value) => (name, loop(value)) }))
- case Arr(elements) => f(Arr(elements.map(loop(_))))
- case json => f(json)
+ case o: Obj => f(Obj(o.fields.map(kv => (kv._1, loop(kv._2)))))
+ case a: Arr => f(Arr(a.elements.map(loop(_))))
+ case json => f(json)
}
loop(self)
@@ -332,7 +337,7 @@ object Json {
def mapValues(f: Json => Json): Json.Obj = Json.Obj(fields.map(e => e._1 -> f(e._2)))
def filter(pred: ((String, Json)) => Boolean): Json.Obj = Json.Obj(fields.filter(pred))
def filterKeys(pred: String => Boolean): Json.Obj = Json.Obj(fields.filter(e => pred(e._1)))
- def merge(that: Json.Obj): Json.Obj = {
+ def merge(that: Json.Obj): Json.Obj = {
val fields1 = this.fields
val fields2 = that.fields
val leftMap = fields1.toMap
@@ -364,33 +369,46 @@ object Json {
Json.Obj(Chunk.fromArray(array))
}
- override def asObject: Some[Json.Obj] = Some(this)
+ override def asObject: Some[Json.Obj] = new Some(this)
override def mapObject(f: Json.Obj => Json.Obj): Json.Obj = f(this)
override def mapObjectKeys(f: String => String): Json.Obj = Json.Obj(fields.map(e => f(e._1) -> e._2))
override def mapObjectValues(f: Json => Json): Json.Obj = mapValues(f)
override def mapObjectEntries(f: ((String, Json)) => (String, Json)): Json.Obj = Json.Obj(fields.map(f))
}
object Obj {
- def apply(fields: (String, Json)*): Obj = Obj(Chunk(fields: _*))
+ val empty: Obj = new Obj(Chunk.empty)
+
+ def apply(chunk: Chunk[(String, Json)]): Obj =
+ if (chunk.isEmpty) empty
+ else new Obj(chunk)
+
+ def apply(fields: (String, Json)*): Obj =
+ if (fields.isEmpty) Obj.empty
+ else new Obj(Chunk(fields: _*))
+
+ def apply(key: String, value: Json): Obj =
+ new Obj(Chunk.single(key -> value))
- private lazy val objd = JsonDecoder.keyValueChunk[String, Json]
+ private lazy val objd = JsonDecoder.keyValueChunk[String, Json]
implicit val decoder: JsonDecoder[Obj] = new JsonDecoder[Obj] {
def unsafeDecode(trace: List[JsonError], in: RetractReader): Obj =
Obj(objd.unsafeDecode(trace, in))
override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): Obj =
json match {
- case obj @ Obj(_) => obj
- case _ => throw UnsafeJson(JsonError.Message(s"Not an object") :: trace)
+ case obj: Obj => obj
+ case _ => Lexer.error("Not an object", trace)
}
}
- private lazy val obje = JsonEncoder.keyValueChunk[String, Json]
+ private lazy val obje = JsonEncoder.keyValueChunk[String, Json]
implicit val encoder: JsonEncoder[Obj] = new JsonEncoder[Obj] {
def unsafeEncode(a: Obj, indent: Option[Int], out: Write): Unit =
obje.unsafeEncode(a.fields, indent, out)
- override final def toJsonAST(a: Obj): Either[String, Json] = Right(a)
+ override final def toJsonAST(a: Obj): Either[String, Json] = new Right(a)
}
+
+ implicit val codec: JsonCodec[Obj] = JsonCodec(encoder, decoder)
}
final case class Arr(elements: Chunk[Json]) extends Json {
def isEmpty: Boolean = elements.isEmpty
@@ -409,40 +427,54 @@ object Json {
} ++ leftover)
}
- override def asArray: Some[Chunk[Json]] = Some(elements)
+ override def asArray: Some[Chunk[Json]] = new Some(elements)
override def mapArray(f: Chunk[Json] => Chunk[Json]): Json.Arr = Json.Arr(f(elements))
override def mapArrayValues(f: Json => Json): Json.Arr = Json.Arr(elements.map(f))
}
object Arr {
- def apply(elements: Json*): Arr = Arr(Chunk(elements: _*))
+ val empty: Arr = new Arr(Chunk.empty)
+
+ def apply(chunk: Chunk[Json]): Arr =
+ if (chunk.isEmpty) empty
+ else new Arr(chunk)
+
+ def apply(elements: Json*): Arr =
+ if (elements.isEmpty) empty
+ else new Arr(Chunk(elements: _*))
- private lazy val arrd = JsonDecoder.chunk[Json]
+ private lazy val arrd = JsonDecoder.chunk[Json]
implicit val decoder: JsonDecoder[Arr] = new JsonDecoder[Arr] {
def unsafeDecode(trace: List[JsonError], in: RetractReader): Arr =
Arr(arrd.unsafeDecode(trace, in))
override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): Arr =
json match {
- case arr @ Arr(_) => arr
- case _ => throw UnsafeJson(JsonError.Message(s"Not an array") :: trace)
+ case arr: Arr => arr
+ case _ => Lexer.error("Not an array", trace)
}
}
- private lazy val arre = JsonEncoder.chunk[Json]
+ private lazy val arre = JsonEncoder.chunk[Json]
implicit val encoder: JsonEncoder[Arr] = new JsonEncoder[Arr] {
def unsafeEncode(a: Arr, indent: Option[Int], out: Write): Unit =
arre.unsafeEncode(a.elements, indent, out)
- override final def toJsonAST(a: Arr): Either[String, Json] = Right(a)
+ override final def toJsonAST(a: Arr): Either[String, Json] = new Right(a)
}
+
+ implicit val codec: JsonCodec[Arr] = JsonCodec(encoder, decoder)
}
final case class Bool(value: Boolean) extends Json {
- override def asBoolean: Some[Boolean] = Some(value)
+ override def asBoolean: Some[Boolean] = new Some(value)
override def mapBoolean(f: Boolean => Boolean): Json.Bool = Json.Bool(f(value))
}
object Bool {
- val False: Bool = Bool(false)
- val True: Bool = Bool(true)
+ val False: Bool = new Bool(false)
+ val True: Bool = new Bool(true)
+
+ def apply(value: Boolean): Bool =
+ if (value) True
+ else False
implicit val decoder: JsonDecoder[Bool] = new JsonDecoder[Bool] {
def unsafeDecode(trace: List[JsonError], in: RetractReader): Bool =
@@ -450,20 +482,22 @@ object Json {
override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): Bool =
json match {
- case b @ Bool(_) => b
- case _ => throw UnsafeJson(JsonError.Message(s"Not a bool value") :: trace)
+ case b: Bool => b
+ case _ => Lexer.error("Not a bool value", trace)
}
}
implicit val encoder: JsonEncoder[Bool] = new JsonEncoder[Bool] {
def unsafeEncode(a: Bool, indent: Option[Int], out: Write): Unit =
JsonEncoder.boolean.unsafeEncode(a.value, indent, out)
- override final def toJsonAST(a: Bool): Either[String, Json] = Right(a)
+ override final def toJsonAST(a: Bool): Either[String, Json] = new Right(a)
}
+
+ implicit val codec: JsonCodec[Bool] = JsonCodec(encoder, decoder)
}
final case class Str(value: String) extends Json {
- override def asString: Some[String] = Some(value)
- override def mapString(f: String => String): Json.Str = Json.Str(f(value))
+ override def asString: Some[String] = new Some(value)
+ override def mapString(f: String => String): Json.Str = new Json.Str(f(value))
}
object Str {
implicit val decoder: JsonDecoder[Str] = new JsonDecoder[Str] {
@@ -472,29 +506,43 @@ object Json {
override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): Str =
json match {
- case s @ Str(_) => s
- case _ => throw UnsafeJson(JsonError.Message(s"Not a string value") :: trace)
+ case s: Str => s
+ case _ => Lexer.error("Not a string value", trace)
}
}
implicit val encoder: JsonEncoder[Str] = new JsonEncoder[Str] {
def unsafeEncode(a: Str, indent: Option[Int], out: Write): Unit =
JsonEncoder.string.unsafeEncode(a.value, indent, out)
- override final def toJsonAST(a: Str): Either[String, Json] = Right(a)
+ override final def toJsonAST(a: Str): Either[String, Json] = new Right(a)
}
+
+ implicit val codec: JsonCodec[Str] = JsonCodec(encoder, decoder)
}
final case class Num(value: java.math.BigDecimal) extends Json {
- override def asNumber: Some[Json.Num] = Some(this)
- override def mapNumber(f: java.math.BigDecimal => java.math.BigDecimal): Json.Num = Json.Num(f(value))
+ override def asNumber: Some[Json.Num] = new Some(this)
+ override def mapNumber(f: java.math.BigDecimal => java.math.BigDecimal): Json.Num = new Json.Num(f(value))
}
object Num {
- def apply(value: Byte): Num = Num(BigDecimal(value.toInt).bigDecimal)
- def apply(value: Short): Num = Num(BigDecimal(value.toInt).bigDecimal)
- def apply(value: Int): Num = Num(BigDecimal(value).bigDecimal)
- def apply(value: Long): Num = Num(BigDecimal(value).bigDecimal)
- def apply(value: BigDecimal): Num = Num(value.bigDecimal)
- def apply(value: Float): Num = Num(BigDecimal(value).bigDecimal)
- def apply(value: Double): Num = Num(BigDecimal(value).bigDecimal)
+ @inline def apply(value: Byte): Num = apply(value.toInt)
+ @inline def apply(value: Short): Num = apply(value.toInt)
+ def apply(value: Int): Num = new Num({
+ if (value < 512 && value > -512) new java.math.BigDecimal(value)
+ else BigDecimal(value).bigDecimal
+ })
+ def apply(value: Long): Num = new Num({
+ if (value < 512 && value > -512) new java.math.BigDecimal(value)
+ else BigDecimal(value).bigDecimal
+ })
+ @inline def apply(value: BigDecimal): Num = new Num(value.bigDecimal)
+ def apply(value: BigInt): Num =
+ if (value.isValidLong) apply(value.toLong)
+ else new Json.Num(new java.math.BigDecimal(value.bigInteger))
+ def apply(value: java.math.BigInteger): Num =
+ if (value.bitLength < 64) apply(value.longValue)
+ else new Json.Num(new java.math.BigDecimal(value))
+ def apply(value: Float): Num = new Num(new java.math.BigDecimal(SafeNumbers.toString(value)))
+ def apply(value: Double): Num = new Num(new java.math.BigDecimal(SafeNumbers.toString(value)))
implicit val decoder: JsonDecoder[Num] = new JsonDecoder[Num] {
def unsafeDecode(trace: List[JsonError], in: RetractReader): Num =
@@ -502,40 +550,43 @@ object Json {
override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): Num =
json match {
- case n @ Num(_) => n
- case _ => throw UnsafeJson(JsonError.Message(s"Not a number") :: trace)
+ case n: Num => n
+ case _ => Lexer.error("Not a number", trace)
}
}
implicit val encoder: JsonEncoder[Num] = new JsonEncoder[Num] {
def unsafeEncode(a: Num, indent: Option[Int], out: Write): Unit =
JsonEncoder.bigDecimal.unsafeEncode(a.value, indent, out)
- override final def toJsonAST(a: Num): Either[String, Num] = Right(a)
+ override final def toJsonAST(a: Num): Either[String, Num] = new Right(a)
}
+
+ implicit val codec: JsonCodec[Num] = JsonCodec(encoder, decoder)
}
type Null = Null.type
case object Null extends Json {
- private[this] val nullChars: Array[Char] = "null".toCharArray
+ private[this] val nullChars: Array[Char] = "null".toCharArray
implicit val decoder: JsonDecoder[Null.type] = new JsonDecoder[Null.type] {
def unsafeDecode(trace: List[JsonError], in: RetractReader): Null.type = {
Lexer.readChars(trace, in, nullChars, "null")
Null
}
- override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): Null.type =
- json match {
- case Null => Null
- case _ => throw UnsafeJson(JsonError.Message(s"Not null") :: trace)
- }
+ override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): Null.type = {
+ if (json ne Null) Lexer.error("Not null", trace)
+ Null
+ }
}
implicit val encoder: JsonEncoder[Null.type] = new JsonEncoder[Null.type] {
def unsafeEncode(a: Null.type, indent: Option[Int], out: Write): Unit =
out.write("null")
- override final def toJsonAST(a: Null.type): Either[String, Json] = Right(a)
+ override final def toJsonAST(a: Null.type): Either[String, Json] = new Right(a)
}
- override def asNull: Some[Unit] = Some(())
+ implicit val codec: JsonCodec[Null.type] = JsonCodec(encoder, decoder)
+
+ override def asNull: Some[Unit] = new Some(())
}
implicit val decoder: JsonDecoder[Json] = new JsonDecoder[Json] {
@@ -543,15 +594,15 @@ object Json {
val c = in.nextNonWhitespace()
in.retract()
(c: @switch) match {
- case 'n' => Null.decoder.unsafeDecode(trace, in)
- case 'f' | 't' => Bool.decoder.unsafeDecode(trace, in)
- case '{' => Obj.decoder.unsafeDecode(trace, in)
- case '[' => Arr.decoder.unsafeDecode(trace, in)
- case '"' => Str.decoder.unsafeDecode(trace, in)
+ case 'n' => Null.decoder.unsafeDecode(trace, in)
+ case 'f' | 't' => Bool.decoder.unsafeDecode(trace, in)
+ case '{' => Obj.decoder.unsafeDecode(trace, in)
+ case '[' => Arr.decoder.unsafeDecode(trace, in)
+ case '"' => Str.decoder.unsafeDecode(trace, in)
case '-' | '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
Num.decoder.unsafeDecode(trace, in)
case c =>
- throw UnsafeJson(JsonError.Message(s"unexpected '$c'") :: trace)
+ Lexer.error(s"unexpected '$c'", trace)
}
}
@@ -569,8 +620,12 @@ object Json {
case Null => Null.encoder.unsafeEncode(Null, indent, out)
}
- override final def toJsonAST(a: Json): Either[String, Json] = Right(a)
+ override final def toJsonAST(a: Json): Either[String, Json] = new Right(a)
}
- def apply(fields: (String, Json)*): Json = Json.Obj(Chunk(fields: _*))
+ implicit val codec: JsonCodec[Json] = JsonCodec(encoder, decoder)
+
+ def apply(fields: (String, Json)*): Json =
+ if (fields.isEmpty) Obj.empty
+ else new Obj(Chunk(fields: _*))
}
diff --git a/zio-json/shared/src/main/scala/zio/json/codegen/Generator.scala b/zio-json/shared/src/main/scala/zio/json/codegen/Generator.scala
index 523dd35b1..5f908265c 100644
--- a/zio-json/shared/src/main/scala/zio/json/codegen/Generator.scala
+++ b/zio-json/shared/src/main/scala/zio/json/codegen/Generator.scala
@@ -16,8 +16,7 @@ import scala.util.Try
object Generator {
/**
- * Renders the JSON string as a series of Scala case classes derived from the
- * structure of the JSON.
+ * Renders the JSON string as a series of Scala case classes derived from the structure of the JSON.
*
* For example, the following JSON:
*
@@ -91,7 +90,7 @@ private[codegen] sealed trait JsonType extends Product with Serializable { self
case (JNull, right) => JOption(right)
case (left, JNull) => JOption(left)
- case (JArray(left), JArray(right)) => JArray(left unify right)
+ case (JArray(left), JArray(right)) => JArray(left unify right)
case (CaseClass(left, leftFields), CaseClass(right, rightFields)) if left == right =>
CaseClass(left, (leftFields unify rightFields).asInstanceOf[JObject])
case (left, right) =>
@@ -207,10 +206,10 @@ object ${clazz.name} {
def unifyTypes(json: Json, key: Option[String] = None): JsonType =
json match {
- case Json.Null => JNull
+ case Json.Null => JNull
case Json.Arr(elements) =>
JArray(elements.map(unifyTypes(_, key)).reduce(_ unify _))
- case Json.Bool(_) => JBoolean
+ case Json.Bool(_) => JBoolean
case Json.Str(string) =>
val localDateTime =
Try(LocalDateTime.parse(string, DateTimeFormatter.ISO_DATE_TIME)).toOption.map(_ => JLocalDateTime)
diff --git a/zio-json/shared/src/main/scala/zio/json/internal/FieldEncoder.scala b/zio-json/shared/src/main/scala/zio/json/internal/FieldEncoder.scala
new file mode 100644
index 000000000..40559a7c8
--- /dev/null
+++ b/zio-json/shared/src/main/scala/zio/json/internal/FieldEncoder.scala
@@ -0,0 +1,45 @@
+package zio.json.internal
+
+import zio.json._
+import scala.annotation.switch
+
+private[json] class FieldEncoder[T, P](
+ val p: P,
+ val encoder: JsonEncoder[T],
+ val encodedName: String,
+ val prettyEncodedName: String,
+ val name: String,
+ private[this] val flags: Int
+) {
+ def skip(t: T): Boolean = (flags: @switch) match {
+ case 0 => encoder.isEmpty(t) || encoder.isNothing(t)
+ case 1 => encoder.isNothing(t)
+ case 2 => encoder.isEmpty(t)
+ case _ => false
+ }
+}
+
+private[json] object FieldEncoder {
+ def apply[T, P](
+ p: P,
+ name: String,
+ encoder: JsonEncoder[T],
+ withExplicitNulls: Boolean,
+ withExplicitEmptyCollections: Boolean
+ ): FieldEncoder[T, P] = {
+ val encodedName = JsonEncoder.string.encodeJson(name, None).toString
+ new FieldEncoder(
+ p,
+ encoder,
+ encodedName + ':',
+ encodedName + " : ",
+ name, {
+ if (withExplicitNulls) {
+ if (withExplicitEmptyCollections) 3 else 2
+ } else {
+ if (withExplicitEmptyCollections) 1 else 0
+ }
+ }
+ )
+ }
+}
diff --git a/zio-json/shared/src/main/scala/zio/json/internal/lexer.scala b/zio-json/shared/src/main/scala/zio/json/internal/lexer.scala
index c4e66505c..3e42b8958 100644
--- a/zio-json/shared/src/main/scala/zio/json/internal/lexer.scala
+++ b/zio-json/shared/src/main/scala/zio/json/internal/lexer.scala
@@ -16,7 +16,9 @@
package zio.json.internal
import zio.json.JsonDecoder.{ JsonError, UnsafeJson }
-
+import java.time._
+import java.util.UUID
+import java.util.concurrent.ConcurrentHashMap
import scala.annotation._
// tries to stick to the spec, but maybe a bit loose in places (e.g. numbers)
@@ -26,467 +28,1938 @@ object Lexer {
// TODO need a variant that doesn't skip whitespace, so that attack vectors
// consisting of an infinite stream of space can exit early.
- val NumberMaxBits: Int = 128
+ val NumberMaxBits: Int = 256
- // True if we got a string (implies a retraction), False for }
- def firstField(trace: List[JsonError], in: RetractReader): Boolean =
- (in.nextNonWhitespace(): @switch) match {
- case '"' =>
- in.retract()
- true
- case '}' => false
- case c =>
- throw UnsafeJson(
- JsonError.Message(s"expected string or '}' got '$c'") :: trace
- )
- }
+ @noinline def error(msg: String, trace: List[JsonError]): Nothing =
+ throw UnsafeJson(JsonError.Message(msg) :: trace)
+
+ @noinline private[json] def error(expected: String, got: Char, trace: List[JsonError]): Nothing =
+ error(s"expected $expected got '$got'", trace)
+
+ @noinline private[json] def error(c: Char, trace: List[JsonError]): Nothing =
+ error(s"invalid '\\$c' in string", trace)
+
+ // FIXME: remove trace paramenter in the next major version
+ // True if we got anything besides a }, False for }
+ @inline def firstField(trace: List[JsonError], in: RetractReader): Boolean =
+ if (in.nextNonWhitespace() != '}') {
+ in.retract()
+ true
+ } else false
// True if we got a comma, and False for }
- def nextField(trace: List[JsonError], in: OneCharReader): Boolean =
- (in.nextNonWhitespace(): @switch) match {
- case ',' => true
- case '}' => false
- case c =>
- throw UnsafeJson(
- JsonError.Message(s"expected ',' or '}' got '$c'") :: trace
- )
- }
+ @inline def nextField(trace: List[JsonError], in: OneCharReader): Boolean = {
+ val c = in.nextNonWhitespace()
+ if (c == ',') true
+ else if (c == '}') false
+ else error("',' or '}'", c, trace)
+ }
// True if we got anything besides a ], False for ]
- def firstArrayElement(in: RetractReader): Boolean =
- (in.nextNonWhitespace(): @switch) match {
- case ']' => false
- case _ =>
- in.retract()
- true
- }
+ @inline def firstArrayElement(in: RetractReader): Boolean =
+ if (in.nextNonWhitespace() != ']') {
+ in.retract()
+ true
+ } else false
- def nextArrayElement(trace: List[JsonError], in: OneCharReader): Boolean =
- (in.nextNonWhitespace(): @switch) match {
- case ',' => true
- case ']' => false
- case c =>
- throw UnsafeJson(
- JsonError.Message(s"expected ',' or ']' got '$c'") :: trace
- )
- }
-
- // avoids allocating lots of strings (they are often the bulk of incoming
- // messages) by only checking for what we expect to see (Jon Pretty's idea).
- //
- // returns the index of the matched field, or -1
- def field(
- trace: List[JsonError],
- in: OneCharReader,
- matrix: StringMatrix
- ): Int = {
+ @inline def nextArrayElement(trace: List[JsonError], in: OneCharReader): Boolean = {
+ val c = in.nextNonWhitespace()
+ if (c == ',') true
+ else if (c == ']') false
+ else error("',' or ']'", c, trace)
+ }
+
+ @inline def field(trace: List[JsonError], in: OneCharReader, matrix: StringMatrix): Int = {
val f = enumeration(trace, in, matrix)
- char(trace, in, ':')
- f
+ val c = in.nextNonWhitespace()
+ if (c == ':') return f
+ error("':'", c, trace)
}
- def enumeration(
- trace: List[JsonError],
- in: OneCharReader,
- matrix: StringMatrix
- ): Int = {
- val stream = streamingString(trace, in)
-
- var i: Int = 0
- var bs: Long = matrix.initial
- var c: Int = -1
- while ({ c = stream.read(); c != -1 }) {
+ def enumeration(trace: List[JsonError], in: OneCharReader, matrix: StringMatrix): Int = {
+ var c = in.nextNonWhitespace()
+ if (c != '"') error("'\"'", c, trace)
+ var bs = matrix.initial
+ var i = 0
+ while ({
+ c = in.readChar()
+ c != '"'
+ }) {
+ if (c == '\\') c = nextEscaped(trace, in)
+ else if (c < ' ') error("invalid control in string", trace)
bs = matrix.update(bs, i, c)
i += 1
}
- bs = matrix.exact(bs, i)
- matrix.first(bs)
+ matrix.first(matrix.exact(bs, i))
+ }
+
+ def enumeration128(trace: List[JsonError], in: OneCharReader, matrix1: StringMatrix, matrix2: StringMatrix): Int = {
+ var c = in.nextNonWhitespace()
+ if (c != '"') error("'\"'", c, trace)
+ var bs1 = matrix1.initial
+ var bs2 = matrix2.initial
+ var i = 0
+ while ({
+ c = in.readChar()
+ c != '"'
+ }) {
+ if (c == '\\') c = nextEscaped(trace, in)
+ else if (c < ' ') error("invalid control in string", trace)
+ bs1 = matrix1.update(bs1, i, c)
+ bs2 = matrix2.update(bs2, i, c)
+ i += 1
+ }
+ var idx = matrix1.first(matrix1.exact(bs1, i))
+ if (idx < 0) {
+ idx = matrix2.first(matrix2.exact(bs2, i))
+ if (idx >= 0) idx += matrix1.namesLen
+ }
+ idx
}
- private[this] val ull: Array[Char] = "ull".toCharArray
- private[this] val alse: Array[Char] = "alse".toCharArray
- private[this] val rue: Array[Char] = "rue".toCharArray
+ @inline def field128(trace: List[JsonError], in: OneCharReader, matrix1: StringMatrix, matrix2: StringMatrix): Int = {
+ val f = enumeration128(trace, in, matrix1, matrix2)
+ val c = in.nextNonWhitespace()
+ if (c == ':') return f
+ error("':'", c, trace)
+ }
- def skipValue(trace: List[JsonError], in: RetractReader): Unit =
+ @noinline def skipValue(trace: List[JsonError], in: RetractReader): Unit =
(in.nextNonWhitespace(): @switch) match {
- case 'n' => readChars(trace, in, ull, "null")
- case 'f' => readChars(trace, in, alse, "false")
- case 't' => readChars(trace, in, rue, "true")
- case '{' =>
- if (firstField(trace, in)) {
- while ({
- {
- char(trace, in, '"')
- skipString(trace, in)
- char(trace, in, ':')
- skipValue(trace, in)
- }; nextField(trace, in)
- }) ()
- }
- case '[' =>
- if (firstArrayElement(in)) {
- while ({ skipValue(trace, in); nextArrayElement(trace, in) }) ()
- }
- case '"' =>
- skipString(trace, in)
+ case 'n' | 't' => skipFixedChars(in, 3)
+ case 'f' => skipFixedChars(in, 4)
+ case '{' => skipObject(in, 0)
+ case '[' => skipArray(in, 0)
+ case '"' =>
+ skipString(in, evenBackSlashes = true)
case '-' | '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
skipNumber(in)
- case c => throw UnsafeJson(JsonError.Message(s"unexpected '$c'") :: trace)
+ case c => error(s"unexpected '$c'", trace)
}
def skipNumber(in: RetractReader): Unit = {
- while (isNumber(in.readChar())) {}
+ while (isNumber(in.readChar())) ()
in.retract()
}
- def skipString(trace: List[JsonError], in: OneCharReader): Unit = {
- val stream = new EscapedString(trace, in)
- var i: Int = 0
- while ({ i = stream.read(); i != -1 }) ()
+ // FIXME: remove in the next major version
+ def skipString(trace: List[JsonError], in: OneCharReader): Unit =
+ skipString(in, evenBackSlashes = true)
+
+ @tailrec private def skipFixedChars(in: OneCharReader, n: Int): Unit =
+ if (n > 0) {
+ in.readChar()
+ skipFixedChars(in, n - 1)
+ }
+
+ @tailrec private def skipString(in: OneCharReader, evenBackSlashes: Boolean): Unit = {
+ val ch = in.readChar()
+ if (evenBackSlashes) {
+ if (ch != '"') skipString(in, ch != '\\')
+ } else skipString(in, evenBackSlashes = true)
}
- // useful for embedded documents, e.g. CSV contained inside JSON
- def streamingString(
- trace: List[JsonError],
- in: OneCharReader
- ): java.io.Reader = {
- char(trace, in, '"')
- new EscapedString(trace, in)
+ @tailrec private def skipObject(in: OneCharReader, level: Int): Unit = {
+ val ch = in.readChar()
+ if (ch == '"') {
+ skipString(in, evenBackSlashes = true)
+ skipObject(in, level)
+ } else if (ch == '{') skipObject(in, level + 1)
+ else if (ch != '}') skipObject(in, level)
+ else if (level != 0) skipObject(in, level - 1)
}
- def string(trace: List[JsonError], in: OneCharReader): CharSequence = {
- char(trace, in, '"')
- val stream = new EscapedString(trace, in)
+ @tailrec private def skipArray(in: OneCharReader, level: Int): Unit = {
+ val b = in.readChar()
+ if (b == '"') {
+ skipString(in, evenBackSlashes = true)
+ skipArray(in, level)
+ } else if (b == '[') skipArray(in, level + 1)
+ else if (b != ']') skipArray(in, level)
+ else if (level != 0) skipArray(in, level - 1)
+ }
- val sb = new FastStringBuilder(64)
- while (true) {
- val c = stream.read()
- if (c == -1)
- return sb.buffer // mutable thing escapes, but cannot be changed
- sb.append(c.toChar)
- }
- throw UnsafeJson(JsonError.Message("impossible string") :: trace)
+ def boolean(trace: List[JsonError], in: OneCharReader): Boolean = {
+ val c = in.nextNonWhitespace()
+ if (c == 't' && in.readChar() == 'r' && in.readChar() == 'u' && in.readChar() == 'e') true
+ else if (c == 'f' && in.readChar() == 'a' && in.readChar() == 'l' && in.readChar() == 's' && in.readChar() == 'e')
+ false
+ else error("expected a Boolean", trace)
}
- def boolean(trace: List[JsonError], in: OneCharReader): Boolean =
- (in.nextNonWhitespace(): @switch) match {
- case 't' =>
- readChars(trace, in, rue, "true")
- true
- case 'f' =>
- readChars(trace, in, alse, "false")
- false
- case c =>
- throw UnsafeJson(
- JsonError.Message(s"expected 'true' or 'false' got $c") :: trace
- )
- }
-
- def byte(trace: List[JsonError], in: RetractReader): Byte = {
- checkNumber(trace, in)
+ def byte(trace: List[JsonError], in: RetractReader): Byte =
try {
val i = UnsafeNumbers.byte_(in, false)
in.retract()
i
} catch {
- case UnsafeNumbers.UnsafeNumber =>
- throw UnsafeJson(JsonError.Message("expected a Byte") :: trace)
+ case UnsafeNumbers.UnsafeNumber => error("expected a Byte", trace)
}
- }
- def short(trace: List[JsonError], in: RetractReader): Short = {
- checkNumber(trace, in)
+ def short(trace: List[JsonError], in: RetractReader): Short =
try {
val i = UnsafeNumbers.short_(in, false)
in.retract()
i
} catch {
- case UnsafeNumbers.UnsafeNumber =>
- throw UnsafeJson(JsonError.Message("expected a Short") :: trace)
+ case UnsafeNumbers.UnsafeNumber => error("expected a Short", trace)
}
- }
- def int(trace: List[JsonError], in: RetractReader): Int = {
- checkNumber(trace, in)
+ def int(trace: List[JsonError], in: RetractReader): Int =
try {
val i = UnsafeNumbers.int_(in, false)
in.retract()
i
} catch {
- case UnsafeNumbers.UnsafeNumber =>
- throw UnsafeJson(JsonError.Message("expected an Int") :: trace)
+ case UnsafeNumbers.UnsafeNumber => error("expected an Int", trace)
}
- }
- def long(trace: List[JsonError], in: RetractReader): Long = {
- checkNumber(trace, in)
+ def long(trace: List[JsonError], in: RetractReader): Long =
try {
val i = UnsafeNumbers.long_(in, false)
in.retract()
i
} catch {
- case UnsafeNumbers.UnsafeNumber =>
- throw UnsafeJson(JsonError.Message("expected a Long") :: trace)
+ case UnsafeNumbers.UnsafeNumber => error("expected a Long", trace)
}
- }
- def bigInteger(
- trace: List[JsonError],
- in: RetractReader
- ): java.math.BigInteger = {
- checkNumber(trace, in)
+ def bigInteger(trace: List[JsonError], in: RetractReader): java.math.BigInteger =
try {
val i = UnsafeNumbers.bigInteger_(in, false, NumberMaxBits)
in.retract()
i
} catch {
- case UnsafeNumbers.UnsafeNumber =>
- throw UnsafeJson(JsonError.Message(s"expected a $NumberMaxBits bit BigInteger") :: trace)
+ case UnsafeNumbers.UnsafeNumber => error(s"expected a $NumberMaxBits-bit BigInteger", trace)
+ }
+
+ def bigInt(trace: List[JsonError], in: RetractReader): BigInt =
+ try {
+ val i = UnsafeNumbers.bigInt_(in, false, NumberMaxBits)
+ in.retract()
+ i
+ } catch {
+ case UnsafeNumbers.UnsafeNumber => error(s"expected a $NumberMaxBits-bit BigInt", trace)
}
- }
- def float(trace: List[JsonError], in: RetractReader): Float = {
- checkNumber(trace, in)
+ def float(trace: List[JsonError], in: RetractReader): Float =
try {
val i = UnsafeNumbers.float_(in, false, NumberMaxBits)
in.retract()
i
} catch {
- case UnsafeNumbers.UnsafeNumber =>
- throw UnsafeJson(JsonError.Message("expected a Float") :: trace)
+ case UnsafeNumbers.UnsafeNumber => error("expected a Float", trace)
}
- }
- def double(trace: List[JsonError], in: RetractReader): Double = {
- checkNumber(trace, in)
+ def double(trace: List[JsonError], in: RetractReader): Double =
try {
val i = UnsafeNumbers.double_(in, false, NumberMaxBits)
in.retract()
i
} catch {
- case UnsafeNumbers.UnsafeNumber =>
- throw UnsafeJson(JsonError.Message("expected a Double") :: trace)
+ case UnsafeNumbers.UnsafeNumber => error("expected a Double", trace)
}
- }
- def bigDecimal(
- trace: List[JsonError],
- in: RetractReader
- ): java.math.BigDecimal = {
- checkNumber(trace, in)
+ def bigDecimal(trace: List[JsonError], in: RetractReader): java.math.BigDecimal =
try {
val i = UnsafeNumbers.bigDecimal_(in, false, NumberMaxBits)
in.retract()
i
} catch {
- case UnsafeNumbers.UnsafeNumber =>
- throw UnsafeJson(JsonError.Message(s"expected a $NumberMaxBits BigDecimal") :: trace)
+ case UnsafeNumbers.UnsafeNumber => error(s"expected a BigDecimal with $NumberMaxBits-bit mantissa", trace)
+ }
+
+ // FIXME: remove in the next major version
+ def streamingString(trace: List[JsonError], in: OneCharReader): java.io.Reader = {
+ char(trace, in, '"')
+ new OneCharReader {
+ def close(): Unit = in.close()
+
+ private[this] var escaped = false
+
+ @tailrec override def read(): Int = {
+ val c = in.readChar()
+ if (escaped) {
+ escaped = false
+ ((c: @switch) match {
+ case '"' | '\\' | '/' => c
+ case 'b' => '\b'
+ case 'f' => '\f'
+ case 'n' => '\n'
+ case 'r' => '\r'
+ case 't' => '\t'
+ case 'u' => nextHex4(trace, in)
+ case c => error(c, trace)
+ }).toInt
+ } else if (c == '\\') {
+ escaped = true
+ read()
+ } else if (c == '"') -1 // this is the EOS for the caller
+ else if (c < ' ') error("invalid control in string", trace)
+ else c.toInt
+ }
+
+ // callers expect to get an EOB so this is rare
+ def readChar(): Char = {
+ val v = read()
+ if (v == -1) throw new UnexpectedEnd
+ v.toChar
+ }
}
}
- // really just a way to consume the whitespace
- private def checkNumber(trace: List[JsonError], in: RetractReader): Unit = {
- (in.nextNonWhitespace(): @switch) match {
- case '-' | '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' => ()
- case c =>
- throw UnsafeJson(
- JsonError.Message(s"expected a number, got $c") :: trace
- )
+ def string(trace: List[JsonError], in: OneCharReader): CharSequence = {
+ var c = in.nextNonWhitespace()
+ if (c == '"') {
+ var cs = charArrays.get
+ var i = 0
+ while ({
+ c = in.readChar()
+ c != '"'
+ }) {
+ if (c == '\\') c = nextEscaped(trace, in)
+ else if (c < ' ') error("invalid control in string", trace)
+ if (i == cs.length) cs = java.util.Arrays.copyOf(cs, i << 1)
+ cs(i) = c
+ i += 1
+ }
+ return new String(cs, 0, i)
}
- in.retract()
+ error("expected string", trace)
}
- // optional whitespace and then an expected character
- @inline def char(trace: List[JsonError], in: OneCharReader, c: Char): Unit = {
- val got = in.nextNonWhitespace()
- if (got != c)
- throw UnsafeJson(JsonError.Message(s"expected '$c' got '$got'") :: trace)
+ def uuid(trace: List[JsonError], in: OneCharReader): UUID = {
+ var c = in.nextNonWhitespace()
+ if (c == '"') {
+ val cs = charArrays.get
+ var i, m = 0
+ while ({
+ c = in.readChar()
+ i < 1024 && c != '"'
+ }) {
+ if (c == '\\') c = nextEscaped(trace, in)
+ cs(i) = c
+ m |= c
+ i += 1
+ }
+ if (m <= 0xff) {
+ if (
+ i == 36 && {
+ val c1 = cs(8)
+ val c2 = cs(13)
+ val c3 = cs(18)
+ val c4 = cs(23)
+ c1 == '-' && c2 == '-' && c3 == '-' && c4 == '-'
+ }
+ ) {
+ val ds = hexDigits
+ val msb1 =
+ ds(cs(0)).toLong << 28 |
+ (ds(cs(1)) << 24 |
+ ds(cs(2)) << 20 |
+ ds(cs(3)) << 16 |
+ ds(cs(4)) << 12 |
+ ds(cs(5)) << 8 |
+ ds(cs(6)) << 4 |
+ ds(cs(7)))
+ val msb2 =
+ (ds(cs(9)) << 12 |
+ ds(cs(10)) << 8 |
+ ds(cs(11)) << 4 |
+ ds(cs(12))).toLong
+ val msb3 =
+ (ds(cs(14)) << 12 |
+ ds(cs(15)) << 8 |
+ ds(cs(16)) << 4 |
+ ds(cs(17))).toLong
+ val lsb1 =
+ (ds(cs(19)) << 12 |
+ ds(cs(20)) << 8 |
+ ds(cs(21)) << 4 |
+ ds(cs(22))).toLong
+ val lsb2 =
+ (ds(cs(24)) << 16 |
+ ds(cs(25)) << 12 |
+ ds(cs(26)) << 8 |
+ ds(cs(27)) << 4 |
+ ds(cs(28))).toLong << 28 |
+ (ds(cs(29)) << 24 |
+ ds(cs(30)) << 20 |
+ ds(cs(31)) << 16 |
+ ds(cs(32)) << 12 |
+ ds(cs(33)) << 8 |
+ ds(cs(34)) << 4 |
+ ds(cs(35)))
+ if ((msb1 | msb2 | msb3 | lsb1 | lsb2) >= 0L) {
+ return new UUID(msb1 << 32 | msb2 << 16 | msb3, lsb1 << 48 | lsb2)
+ }
+ } else if (i <= 36) {
+ return uuidExtended(trace, cs, i)
+ }
+ }
+ }
+ uuidError(trace)
}
- @inline def charOnly(
- trace: List[JsonError],
- in: OneCharReader,
- c: Char
- ): Unit = {
- val got = in.readChar()
- if (got != c)
- throw UnsafeJson(JsonError.Message(s"expected '$c' got '$got'") :: trace)
+ private[this] def uuidExtended(trace: List[JsonError], cs: Array[Char], len: Int): UUID = {
+ val dash1 = indexOfDash(cs, 1, len)
+ val dash2 = indexOfDash(cs, dash1 + 2, len)
+ val dash3 = indexOfDash(cs, dash2 + 2, len)
+ val dash4 = indexOfDash(cs, dash3 + 2, len)
+ if (dash4 >= 0) {
+ val ds = hexDigits
+ val section1 = uuidSection(trace, ds, cs, 0, dash1, 0xffffffff00000000L)
+ val section2 = uuidSection(trace, ds, cs, dash1 + 1, dash2, 0xffffffffffff0000L)
+ val section3 = uuidSection(trace, ds, cs, dash2 + 1, dash3, 0xffffffffffff0000L)
+ val section4 = uuidSection(trace, ds, cs, dash3 + 1, dash4, 0xffffffffffff0000L)
+ val section5 = uuidSection(trace, ds, cs, dash4 + 1, len, 0xffff000000000000L)
+ return new UUID((section1 << 32) | (section2 << 16) | section3, (section4 << 48) | section5)
+ }
+ uuidError(trace)
}
- // non-positional for performance
- @inline private[this] def isNumber(c: Char): Boolean =
- (c: @switch) match {
- case '+' | '-' | '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' | '.' | 'e' | 'E' =>
- true
- case _ => false
+ private[this] def indexOfDash(cs: Array[Char], from: Int, to: Int): Int = {
+ var i = from
+ while (i < to) {
+ if (cs(i) == '-') return i
+ i += 1
}
+ -1
+ }
- def readChars(
+ private[this] def uuidSection(
trace: List[JsonError],
- in: OneCharReader,
- expect: Array[Char],
- errMsg: String
- ): Unit = {
- var i: Int = 0
- while (i < expect.length) {
- if (in.readChar() != expect(i))
- throw UnsafeJson(JsonError.Message(s"expected '$errMsg'") :: trace)
+ ds: Array[Byte],
+ cs: Array[Char],
+ from: Int,
+ to: Int,
+ mask: Long
+ ): Long = {
+ if (from < to && from + 16 >= to) {
+ var result = 0L
+ var i = from
+ while (i < to) {
+ result = (result << 4) | ds(cs(i))
+ i += 1
+ }
+ if ((result & mask) == 0L) return result
+ }
+ uuidError(trace)
+ }
+
+ def duration(trace: List[JsonError], in: OneCharReader): Duration = {
+ if (in.nextNonWhitespace() == '"') {
+ val cs = charArrays.get
+ val i = readChars(trace, in, cs)
+ var seconds = 0L
+ var nanos, pos, state = 0
+ if (pos >= i) durationError(trace)
+ var ch = cs(pos)
+ pos += 1
+ val isNeg = ch == '-'
+ if (isNeg) {
+ if (pos >= i) durationError(trace)
+ ch = cs(pos)
+ pos += 1
+ }
+ if (ch != 'P' || pos >= i) durationError(trace)
+ ch = cs(pos)
+ pos += 1
+ while ({
+ if (state == 0) {
+ if (ch == 'T') {
+ if (pos >= i) durationError(trace)
+ ch = cs(pos)
+ pos += 1
+ state = 1
+ }
+ } else if (state == 1) {
+ if (ch != 'T' || pos >= i) durationError(trace)
+ ch = cs(pos)
+ pos += 1
+ } else if (state == 4 && pos >= i) durationError(trace)
+ val isNegX = ch == '-'
+ if (isNegX) {
+ if (pos >= i) durationError(trace)
+ ch = cs(pos)
+ pos += 1
+ }
+ if (ch < '0' || ch > '9') durationError(trace)
+ var x: Long = ('0' - ch).toLong
+ while (
+ (pos < i) && {
+ ch = cs(pos)
+ ch >= '0' && ch <= '9'
+ }
+ ) {
+ if (
+ x < -922337203685477580L || {
+ x = x * 10 + ('0' - ch)
+ x > 0
+ }
+ ) durationError(trace)
+ pos += 1
+ }
+ if (!(isNeg ^ isNegX)) {
+ if (x == -9223372036854775808L) durationError(trace)
+ x = -x
+ }
+ if (ch == 'D' && state <= 0) {
+ if (x < -106751991167300L || x > 106751991167300L) durationError(trace)
+ seconds = x * 86400
+ state = 1
+ } else if (ch == 'H' && state <= 1) {
+ if (x < -2562047788015215L || x > 2562047788015215L) durationError(trace)
+ seconds = sumSeconds(x * 3600, seconds, trace)
+ state = 2
+ } else if (ch == 'M' && state <= 2) {
+ if (x < -153722867280912930L || x > 153722867280912930L) durationError(trace)
+ seconds = sumSeconds(x * 60, seconds, trace)
+ state = 3
+ } else if (ch == '.') {
+ pos += 1
+ seconds = sumSeconds(x, seconds, trace)
+ var nanoDigitWeight = 100000000
+ while (
+ (pos < i) && {
+ ch = cs(pos)
+ ch >= '0' && ch <= '9' && nanoDigitWeight != 0
+ }
+ ) {
+ nanos += (ch - '0') * nanoDigitWeight
+ nanoDigitWeight = (nanoDigitWeight * 3435973837L >> 35).toInt // divide a positive int by 10
+ pos += 1
+ }
+ if (ch != 'S') durationError(trace)
+ if (isNeg ^ isNegX) nanos = -nanos
+ state = 4
+ } else if (ch == 'S') {
+ seconds = sumSeconds(x, seconds, trace)
+ state = 4
+ } else durationError(trace)
+ pos += 1
+ (pos < i) && {
+ ch = cs(pos)
+ pos += 1
+ true
+ }
+ }) ()
+ return Duration.ofSeconds(seconds, nanos.toLong)
+ }
+ durationError(trace)
+ }
+
+ def instant(trace: List[JsonError], in: OneCharReader): Instant = {
+ if (in.nextNonWhitespace() == '"') {
+ val cs = charArrays.get
+ val i = readChars(trace, in, cs)
+ var pos, year, month, day = 0
+ if (
+ pos + 4 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ val ch2 = cs(pos + 2)
+ val ch3 = cs(pos + 3)
+ val ch4 = cs(pos + 4)
+ pos += 5
+ ch1 < '0' || ch1 > '9' || ch2 < '0' || ch2 > '9' || ch3 < '0' || ch3 > '9' || {
+ if (ch0 >= '0' && ch0 <= '9') {
+ year = ch0 * 1000 + ch1 * 100 + ch2 * 10 + ch3 - 53328 // 53328 == '0' * 1111
+ ch4 != '-'
+ } else {
+ year = ch1 * 1000 + ch2 * 100 + ch3 * 10 + ch4 - 53328 // 53328 == '0' * 1111
+ val yearNeg = ch0 == '-' || (ch0 != '+' && instantError(trace))
+ ch4 < '0' || ch4 > '9' || {
+ var yearDigits = 4
+ var ch = '0'
+ while (
+ pos < i && {
+ ch = cs(pos)
+ pos += 1
+ ch >= '0' && ch <= '9' && yearDigits < 10
+ }
+ ) {
+ year =
+ if (year > 100000000) 2147483647
+ else year * 10 + (ch - '0')
+ yearDigits += 1
+ }
+ yearDigits == 10 && year > 1000000000 || yearNeg && {
+ year = -year
+ year == 0
+ } || ch != '-'
+ }
+ }
+ }
+ } || pos + 5 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ val ch2 = cs(pos + 2)
+ val ch3 = cs(pos + 3)
+ val ch4 = cs(pos + 4)
+ val ch5 = cs(pos + 5)
+ pos += 6
+ month = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ day = ch3 * 10 + ch4 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch2 != '-' || ch3 < '0' || ch3 > '9' ||
+ ch4 < '0' || ch4 > '9' || ch5 != 'T' || month < 1 || month > 12 || day == 0 ||
+ (day > 28 && day > maxDayForYearMonth(year, month))
+ }
+ ) instantError(trace)
+ val epochDay =
+ epochDayForYear(year) + (dayOfYearForYearMonth(year, month) + day - 719529) // 719528 == days 0000 to 1970
+ var epochSecond = 0
+ if (
+ pos + 4 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ val ch2 = cs(pos + 2)
+ val ch3 = cs(pos + 3)
+ val ch4 = cs(pos + 4)
+ pos += 5
+ val hour = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ epochSecond = hour * 3600 + (ch3 * 10 + ch4 - 528) * 60 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch2 != ':' ||
+ ch3 < '0' || ch3 > '9' || ch4 < '0' || ch4 > '9' || ch3 > '5' || hour > 23
+ }
+ ) instantError(trace)
+ var nano = 0
+ var ch = '0'
+ if (pos < i) {
+ ch = cs(pos)
+ pos += 1
+ if (ch == ':') {
+ if (
+ pos + 1 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ pos += 2
+ epochSecond += ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ }
+ ) instantError(trace)
+ if (pos < i) {
+ ch = cs(pos)
+ pos += 1
+ if (ch == '.') {
+ var nanoDigitWeight = 100000000
+ while (
+ pos < i && {
+ ch = cs(pos)
+ pos += 1
+ ch >= '0' && ch <= '9' && nanoDigitWeight != 0
+ }
+ ) {
+ nano += (ch - '0') * nanoDigitWeight
+ nanoDigitWeight = (nanoDigitWeight * 3435973837L >> 35).toInt // divide a positive int by 10
+ }
+ }
+ }
+ }
+ }
+ var offsetTotal = 0
+ if (ch != 'Z') {
+ val offsetNeg = ch == '-' || (ch != '+' && instantError(trace))
+ if (
+ pos + 1 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ pos += 2
+ offsetTotal = (ch0 * 10 + ch1 - 528) * 3600 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9'
+ }
+ ) instantError(trace)
+ if (
+ pos < i && {
+ ch = cs(pos)
+ pos += 1
+ ch == ':'
+ }
+ ) {
+ if (
+ pos + 1 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ pos += 2
+ offsetTotal += (ch0 * 10 + ch1 - 528) * 60 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ }
+ ) instantError(trace)
+ if (
+ pos < i && {
+ ch = cs(pos)
+ pos += 1
+ ch == ':'
+ }
+ ) {
+ if (
+ pos + 1 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ pos += 2
+ offsetTotal += ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ }
+ ) instantError(trace)
+ }
+ }
+ if (offsetTotal > 64800) instantError(trace) // 64800 == 18 * 60 * 60
+ if (offsetNeg) offsetTotal = -offsetTotal
+ }
+ if (pos == i) return Instant.ofEpochSecond(epochDay * 86400 + (epochSecond - offsetTotal), nano.toLong)
+ }
+ instantError(trace)
+ }
+
+ def localDate(trace: List[JsonError], in: OneCharReader): LocalDate = {
+ var year, month, day = 0
+ if (
+ in.nextNonWhitespace() != '"' || {
+ val cs = charArrays.get
+ val i = readChars(trace, in, cs)
+ var pos = 0
+ pos + 4 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ val ch2 = cs(pos + 2)
+ val ch3 = cs(pos + 3)
+ val ch4 = cs(pos + 4)
+ pos += 5
+ ch1 < '0' || ch1 > '9' || ch2 < '0' || ch2 > '9' || ch3 < '0' || ch3 > '9' || {
+ if (ch0 >= '0' && ch0 <= '9') {
+ year = ch0 * 1000 + ch1 * 100 + ch2 * 10 + ch3 - 53328 // 53328 == '0' * 1111
+ ch4 != '-'
+ } else {
+ year = ch1 * 1000 + ch2 * 100 + ch3 * 10 + ch4 - 53328 // 53328 == '0' * 1111
+ val yearNeg = ch0 == '-' || (ch0 != '+' && localDateError(trace))
+ ch4 < '0' || ch4 > '9' || {
+ var yearDigits = 4
+ var ch = '0'
+ while (
+ pos < i && {
+ ch = cs(pos)
+ pos += 1
+ ch >= '0' && ch <= '9' && yearDigits < 9
+ }
+ ) {
+ year = year * 10 + (ch - '0')
+ yearDigits += 1
+ }
+ yearNeg && {
+ year = -year
+ year == 0
+ } || ch != '-'
+ }
+ }
+ }
+ } || pos + 5 != i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ val ch2 = cs(pos + 2)
+ val ch3 = cs(pos + 3)
+ val ch4 = cs(pos + 4)
+ pos += 5
+ month = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ day = ch3 * 10 + ch4 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch2 != '-' || ch3 < '0' || ch3 > '9' ||
+ ch4 < '0' || ch4 > '9' || month < 1 || month > 12 || day == 0 ||
+ (day > 28 && day > maxDayForYearMonth(year, month))
+ }
+ }
+ ) localDateError(trace)
+ LocalDate.of(year, month, day)
+ }
+
+ def localDateTime(trace: List[JsonError], in: OneCharReader): LocalDateTime = {
+ if (in.nextNonWhitespace() == '"') {
+ val cs = charArrays.get
+ val i = readChars(trace, in, cs)
+ var pos, year, month, day = 0
+ if (
+ pos + 4 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ val ch2 = cs(pos + 2)
+ val ch3 = cs(pos + 3)
+ val ch4 = cs(pos + 4)
+ pos += 5
+ ch1 < '0' || ch1 > '9' || ch2 < '0' || ch2 > '9' || ch3 < '0' || ch3 > '9' || {
+ if (ch0 >= '0' && ch0 <= '9') {
+ year = ch0 * 1000 + ch1 * 100 + ch2 * 10 + ch3 - 53328 // 53328 == '0' * 1111
+ ch4 != '-'
+ } else {
+ year = ch1 * 1000 + ch2 * 100 + ch3 * 10 + ch4 - 53328 // 53328 == '0' * 1111
+ val yearNeg = ch0 == '-' || (ch0 != '+' && localDateTimeError(trace))
+ ch4 < '0' || ch4 > '9' || {
+ var yearDigits = 4
+ var ch = '0'
+ while (
+ pos < i && {
+ ch = cs(pos)
+ pos += 1
+ ch >= '0' && ch <= '9' && yearDigits < 9
+ }
+ ) {
+ year = year * 10 + (ch - '0')
+ yearDigits += 1
+ }
+ yearNeg && {
+ year = -year
+ year == 0
+ } || ch != '-'
+ }
+ }
+ }
+ } || pos + 5 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ val ch2 = cs(pos + 2)
+ val ch3 = cs(pos + 3)
+ val ch4 = cs(pos + 4)
+ val ch5 = cs(pos + 5)
+ pos += 6
+ month = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ day = ch3 * 10 + ch4 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch2 != '-' || ch3 < '0' || ch3 > '9' ||
+ ch4 < '0' || ch4 > '9' || ch5 != 'T' || day == 0 || month < 1 || month > 12 ||
+ (day > 28 && day > maxDayForYearMonth(year, month))
+ }
+ ) localDateTimeError(trace)
+ var hour, minute = 0
+ if (
+ pos + 4 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ val ch2 = cs(pos + 2)
+ val ch3 = cs(pos + 3)
+ val ch4 = cs(pos + 4)
+ pos += 5
+ hour = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ minute = ch3 * 10 + ch4 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch2 != ':' ||
+ ch3 < '0' || ch3 > '9' || ch4 < '0' || ch4 > '9' || ch3 > '5' || hour > 23
+ }
+ ) localDateTimeError(trace)
+ var second, nano = 0
+ if (pos < i) {
+ if (
+ cs(pos) != ':' || {
+ pos += 1
+ pos + 1 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ pos += 2
+ second = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ }
+ }
+ ) localDateTimeError(trace)
+ if (pos < i) {
+ if (
+ cs(pos) != '.' || {
+ pos += 1
+ var nanoDigitWeight = 100000000
+ var ch = '0'
+ while (
+ pos < i && {
+ ch = cs(pos)
+ ch >= '0' && ch <= '9' && nanoDigitWeight != 0
+ }
+ ) {
+ nano += (ch - '0') * nanoDigitWeight
+ nanoDigitWeight = (nanoDigitWeight * 3435973837L >> 35).toInt // divide a positive int by 10
+ pos += 1
+ }
+ pos != i
+ }
+ ) localDateTimeError(trace)
+ }
+ }
+ return LocalDateTime.of(year, month, day, hour, minute, second, nano)
+ }
+ localDateTimeError(trace)
+ }
+
+ def localTime(trace: List[JsonError], in: OneCharReader): LocalTime = {
+ if (in.nextNonWhitespace() == '"') {
+ val cs = charArrays.get
+ val i = readChars(trace, in, cs)
+ var pos, hour, minute = 0
+ if (
+ pos + 4 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ val ch2 = cs(pos + 2)
+ val ch3 = cs(pos + 3)
+ val ch4 = cs(pos + 4)
+ pos += 5
+ hour = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ minute = ch3 * 10 + ch4 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch2 != ':' ||
+ ch3 < '0' || ch3 > '9' || ch4 < '0' || ch4 > '9' || ch3 > '5' || hour > 23
+ }
+ ) localTimeError(trace)
+ var second, nano = 0
+ if (pos < i) {
+ if (
+ cs(pos) != ':' || {
+ pos += 1
+ pos + 1 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ pos += 2
+ second = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ }
+ }
+ ) localTimeError(trace)
+ if (pos < i) {
+ if (
+ cs(pos) != '.' || {
+ pos += 1
+ var nanoDigitWeight = 100000000
+ var ch = '0'
+ while (
+ pos < i && {
+ ch = cs(pos)
+ ch >= '0' && ch <= '9' && nanoDigitWeight != 0
+ }
+ ) {
+ nano += (ch - '0') * nanoDigitWeight
+ nanoDigitWeight = (nanoDigitWeight * 3435973837L >> 35).toInt // divide a positive int by 10
+ pos += 1
+ }
+ pos != i
+ }
+ ) localTimeError(trace)
+ }
+ }
+ return LocalTime.of(hour, minute, second, nano)
+ }
+ localTimeError(trace)
+ }
+
+ def monthDay(trace: List[JsonError], in: OneCharReader): MonthDay = {
+ var month, day = 0
+ if (
+ in.nextNonWhitespace() != '"' || {
+ val cs = charArrays.get
+ val i = readChars(trace, in, cs)
+ i != 7 || {
+ val ch0 = cs(0)
+ val ch1 = cs(1)
+ val ch2 = cs(2)
+ val ch3 = cs(3)
+ val ch4 = cs(4)
+ val ch5 = cs(5)
+ val ch6 = cs(6)
+ month = ch2 * 10 + ch3 - 528 // 528 == '0' * 11
+ day = ch5 * 10 + ch6 - 528 // 528 == '0' * 11
+ ch0 != '-' || ch1 != '-' || ch2 < '0' || ch2 > '9' || ch3 < '0' || ch3 > '9' || ch4 != '-' ||
+ ch5 < '0' || ch5 > '9' || ch6 < '0' || ch6 > '9' || month < 1 || month > 12 || day == 0 ||
+ (day > 28 && day > maxDayForMonth(month))
+ }
+ }
+ ) monthDayError(trace)
+ MonthDay.of(month, day)
+ }
+
+ def offsetDateTime(trace: List[JsonError], in: OneCharReader): OffsetDateTime = {
+ if (in.nextNonWhitespace() == '"') {
+ val cs = charArrays.get
+ val i = readChars(trace, in, cs)
+ var pos, year, month, day = 0
+ if (
+ pos + 4 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ val ch2 = cs(pos + 2)
+ val ch3 = cs(pos + 3)
+ val ch4 = cs(pos + 4)
+ pos += 5
+ ch1 < '0' || ch1 > '9' || ch2 < '0' || ch2 > '9' || ch3 < '0' || ch3 > '9' || {
+ if (ch0 >= '0' && ch0 <= '9') {
+ year = ch0 * 1000 + ch1 * 100 + ch2 * 10 + ch3 - 53328 // 53328 == '0' * 1111
+ ch4 != '-'
+ } else {
+ year = ch1 * 1000 + ch2 * 100 + ch3 * 10 + ch4 - 53328 // 53328 == '0' * 1111
+ val yearNeg = ch0 == '-' || (ch0 != '+' && offsetDateTimeError(trace))
+ ch4 < '0' || ch4 > '9' || {
+ var yearDigits = 4
+ var ch = '0'
+ while (
+ pos < i && {
+ ch = cs(pos)
+ pos += 1
+ ch >= '0' && ch <= '9' && yearDigits < 9
+ }
+ ) {
+ year = year * 10 + (ch - '0')
+ yearDigits += 1
+ }
+ yearNeg && {
+ year = -year
+ year == 0
+ } || ch != '-'
+ }
+ }
+ }
+ } || pos + 5 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ val ch2 = cs(pos + 2)
+ val ch3 = cs(pos + 3)
+ val ch4 = cs(pos + 4)
+ val ch5 = cs(pos + 5)
+ pos += 6
+ month = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ day = ch3 * 10 + ch4 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch2 != '-' || ch3 < '0' || ch3 > '9' ||
+ ch4 < '0' || ch4 > '9' || ch5 != 'T' || month < 1 || month > 12 || day == 0 ||
+ (day > 28 && day > maxDayForYearMonth(year, month))
+ }
+ ) offsetDateTimeError(trace)
+ var hour, minute = 0
+ if (
+ pos + 4 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ val ch2 = cs(pos + 2)
+ val ch3 = cs(pos + 3)
+ val ch4 = cs(pos + 4)
+ pos += 5
+ hour = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ minute = ch3 * 10 + ch4 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch2 != ':' ||
+ ch3 < '0' || ch3 > '9' || ch4 < '0' || ch4 > '9' || ch3 > '5' || hour > 23
+ } || pos >= i
+ ) offsetDateTimeError(trace)
+ var second, nano = 0
+ var ch = cs(pos)
+ pos += 1
+ if (ch == ':') {
+ if (
+ pos + 1 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ pos += 2
+ second = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ } || pos >= i
+ ) offsetDateTimeError(trace)
+ ch = cs(pos)
+ pos += 1
+ if (ch == '.') {
+ var nanoDigitWeight = 100000000
+ while ({
+ if (pos >= i) offsetDateTimeError(trace)
+ ch = cs(pos)
+ pos += 1
+ ch >= '0' && ch <= '9' && nanoDigitWeight != 0
+ }) {
+ nano += (ch - '0') * nanoDigitWeight
+ nanoDigitWeight = (nanoDigitWeight * 3435973837L >> 35).toInt // divide a positive int by 10
+ }
+ }
+ }
+ val zoneOffset =
+ if (ch == 'Z') ZoneOffset.UTC
+ else {
+ val offsetNeg = ch == '-' || (ch != '+' && offsetDateTimeError(trace))
+ var offsetTotal = 0
+ if (
+ pos + 1 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ pos += 2
+ offsetTotal = (ch0 * 10 + ch1 - 528) * 3600 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9'
+ }
+ ) offsetDateTimeError(trace)
+ if (pos < i) {
+ if (
+ cs(pos) != ':' || {
+ pos += 1
+ pos + 1 >= i
+ } || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ pos += 2
+ offsetTotal += (ch0 * 10 + ch1 - 528) * 60 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ }
+ ) offsetDateTimeError(trace)
+ if (pos < i) {
+ if (
+ cs(pos) != ':' || {
+ pos += 1
+ pos + 1 >= i
+ } || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ pos += 2
+ offsetTotal += ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ }
+ ) offsetDateTimeError(trace)
+ }
+ }
+ if (offsetTotal > 64800) offsetDateTimeError(trace)
+ toZoneOffset(offsetNeg, offsetTotal)
+ }
+ if (pos == i) return OffsetDateTime.of(year, month, day, hour, minute, second, nano, zoneOffset)
+ }
+ offsetDateTimeError(trace)
+ }
+
+ def offsetTime(trace: List[JsonError], in: OneCharReader): OffsetTime = {
+ if (in.nextNonWhitespace() == '"') {
+ val cs = charArrays.get
+ val i = readChars(trace, in, cs)
+ var pos, hour, minute = 0
+ if (
+ pos + 4 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ val ch2 = cs(pos + 2)
+ val ch3 = cs(pos + 3)
+ val ch4 = cs(pos + 4)
+ pos += 5
+ hour = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ minute = ch3 * 10 + ch4 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch2 != ':' ||
+ ch3 < '0' || ch3 > '9' || ch4 < '0' || ch4 > '9' || ch3 > '5' || hour > 23
+ } || pos >= i
+ ) offsetTimeError(trace)
+ var second, nano = 0
+ var ch = cs(pos)
+ pos += 1
+ if (ch == ':') {
+ if (
+ pos + 1 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ pos += 2
+ second = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ } || pos >= i
+ ) offsetTimeError(trace)
+ ch = cs(pos)
+ pos += 1
+ if (ch == '.') {
+ var nanoDigitWeight = 100000000
+ while ({
+ if (pos >= i) offsetTimeError(trace)
+ ch = cs(pos)
+ pos += 1
+ ch >= '0' && ch <= '9' && nanoDigitWeight != 0
+ }) {
+ nano += (ch - '0') * nanoDigitWeight
+ nanoDigitWeight = (nanoDigitWeight * 3435973837L >> 35).toInt // divide a positive int by 10
+ }
+ }
+ }
+ val zoneOffset =
+ if (ch == 'Z') ZoneOffset.UTC
+ else {
+ val offsetNeg = ch == '-' || (ch != '+' && offsetTimeError(trace))
+ var offsetTotal = 0
+ if (
+ pos + 1 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ pos += 2
+ offsetTotal = (ch0 * 10 + ch1 - 528) * 3600 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9'
+ }
+ ) offsetTimeError(trace)
+ if (pos < i) {
+ if (
+ cs(pos) != ':' || {
+ pos += 1
+ pos + 1 >= i
+ } || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ pos += 2
+ offsetTotal += (ch0 * 10 + ch1 - 528) * 60 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ }
+ ) offsetTimeError(trace)
+ if (pos < i) {
+ if (
+ cs(pos) != ':' || {
+ pos += 1
+ pos + 1 >= i
+ } || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ pos += 2
+ offsetTotal += ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ }
+ ) offsetTimeError(trace)
+ }
+ }
+ if (offsetTotal > 64800) offsetTimeError(trace)
+ toZoneOffset(offsetNeg, offsetTotal)
+ }
+ if (pos == i) return OffsetTime.of(hour, minute, second, nano, zoneOffset)
+ }
+ offsetTimeError(trace)
+ }
+
+ def period(trace: List[JsonError], in: OneCharReader): Period = {
+ if (in.nextNonWhitespace() == '"') {
+ val cs = charArrays.get
+ val i = readChars(trace, in, cs)
+ var pos, state, years, months, days = 0
+ if (pos >= i) periodError(trace)
+ var ch = cs(pos)
+ pos += 1
+ val isNeg = ch == '-'
+ if (isNeg) {
+ if (pos >= i) periodError(trace)
+ ch = cs(pos)
+ pos += 1
+ }
+ if (ch != 'P' || pos >= i) periodError(trace)
+ ch = cs(pos)
+ pos += 1
+ while ({
+ if (state == 4 && pos >= i) periodError(trace)
+ val isNegX = ch == '-'
+ if (isNegX) {
+ if (pos >= i) periodError(trace)
+ ch = cs(pos)
+ pos += 1
+ }
+ if (ch < '0' || ch > '9') periodError(trace)
+ var x: Int = '0' - ch
+ while (
+ (pos < i) && {
+ ch = cs(pos)
+ ch >= '0' && ch <= '9'
+ }
+ ) {
+ if (
+ x < -214748364 || {
+ x = x * 10 + ('0' - ch)
+ x > 0
+ }
+ ) periodError(trace)
+ pos += 1
+ }
+ if (!(isNeg ^ isNegX)) {
+ if (x == -2147483648) periodError(trace)
+ x = -x
+ }
+ if (ch == 'Y' && state <= 0) {
+ years = x
+ state = 1
+ } else if (ch == 'M' && state <= 1) {
+ months = x
+ state = 2
+ } else if (ch == 'W' && state <= 2) {
+ if (x < -306783378 || x > 306783378) periodError(trace)
+ days = x * 7
+ state = 3
+ } else if (ch == 'D') {
+ val ds = x.toLong + days
+ if (ds != ds.toInt) periodError(trace)
+ days = ds.toInt
+ state = 4
+ } else periodError(trace)
+ pos += 1
+ (pos < i) && {
+ ch = cs(pos)
+ pos += 1
+ true
+ }
+ }) ()
+ return Period.of(years, months, days)
+ }
+ periodError(trace)
+ }
+
+ def year(trace: List[JsonError], in: OneCharReader): Year = {
+ if (in.nextNonWhitespace() == '"') {
+ val cs = charArrays.get
+ val i = readChars(trace, in, cs)
+ var pos, year = 0
+ if (
+ pos + 3 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ val ch2 = cs(pos + 2)
+ val ch3 = cs(pos + 3)
+ pos += 4
+ ch1 < '0' || ch1 > '9' || ch2 < '0' || ch2 > '9' || ch3 < '0' || ch3 > '9' || {
+ if (ch0 >= '0' && ch0 <= '9') {
+ year = ch0 * 1000 + ch1 * 100 + ch2 * 10 + ch3 - 53328 // 53328 == '0' * 1111
+ pos != i
+ } else {
+ val yearNeg = ch0 == '-' || (ch0 != '+' && yearError(trace))
+ year = ch1 * 100 + ch2 * 10 + ch3 - 5328 // 53328 == '0' * 111
+ var yearDigits = 3
+ var ch = '0'
+ while (
+ pos < i && {
+ ch = cs(pos)
+ ch >= '0' && ch <= '9' && yearDigits < 9
+ }
+ ) {
+ year = year * 10 + (ch - '0')
+ yearDigits += 1
+ pos += 1
+ }
+ yearNeg && {
+ year = -year
+ year == 0
+ } || pos != i
+ }
+ }
+ }
+ ) yearError(trace)
+ return Year.of(year)
+ }
+ yearError(trace)
+ }
+
+ def yearMonth(trace: List[JsonError], in: OneCharReader): YearMonth = {
+ if (in.nextNonWhitespace() == '"') {
+ val cs = charArrays.get
+ val i = readChars(trace, in, cs)
+ var pos, year, month = 0
+ if (
+ pos + 4 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ val ch2 = cs(pos + 2)
+ val ch3 = cs(pos + 3)
+ val ch4 = cs(pos + 4)
+ pos += 5
+ ch1 < '0' || ch1 > '9' || ch2 < '0' || ch2 > '9' || ch3 < '0' || ch3 > '9' || {
+ if (ch0 >= '0' && ch0 <= '9') {
+ year = ch0 * 1000 + ch1 * 100 + ch2 * 10 + ch3 - 53328 // 53328 == '0' * 1111
+ ch4 != '-'
+ } else {
+ val yearNeg = ch0 == '-' || (ch0 != '+' && yearMonthError(trace))
+ year = ch1 * 1000 + ch2 * 100 + ch3 * 10 + ch4 - 53328 // 53328 == '0' * 1111
+ ch4 < '0' || ch4 > '9' || {
+ var yearDigits = 4
+ var ch = '0'
+ while ({
+ if (pos >= i) yearMonthError(trace)
+ ch = cs(pos)
+ pos += 1
+ ch >= '0' && ch <= '9' && yearDigits < 9
+ }) {
+ year = year * 10 + (ch - '0')
+ yearDigits += 1
+ }
+ yearNeg && {
+ year = -year
+ year == 0
+ } || ch != '-'
+ }
+ }
+ }
+ } || pos + 2 != i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ pos += 2
+ month = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || month < 1 || month > 12
+ }
+ ) yearMonthError(trace)
+ return YearMonth.of(year, month)
+ }
+ yearMonthError(trace)
+ }
+
+ def zonedDateTime(trace: List[JsonError], in: OneCharReader): ZonedDateTime = {
+ if (in.nextNonWhitespace() == '"') {
+ val cs = charArrays.get
+ val i = readChars(trace, in, cs)
+ var pos, year, month, day, hour, minute = 0
+ if (
+ pos + 4 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ val ch2 = cs(pos + 2)
+ val ch3 = cs(pos + 3)
+ val ch4 = cs(pos + 4)
+ pos += 5
+ ch1 < '0' || ch1 > '9' || ch2 < '0' || ch2 > '9' || ch3 < '0' || ch3 > '9' || {
+ if (ch0 >= '0' && ch0 <= '9') {
+ year = ch0 * 1000 + ch1 * 100 + ch2 * 10 + ch3 - 53328 // 53328 == '0' * 1111
+ ch4 != '-'
+ } else {
+ val yearNeg = ch0 == '-' || (ch0 != '+' && zonedDateTimeError(trace))
+ year = ch1 * 1000 + ch2 * 100 + ch3 * 10 + ch4 - 53328 // 53328 == '0' * 1111
+ ch4 < '0' || ch4 > '9' || {
+ var yearDigits = 4
+ var ch = '0'
+ while ({
+ if (pos >= i) zonedDateTimeError(trace)
+ ch = cs(pos)
+ pos += 1
+ ch >= '0' && ch <= '9' && yearDigits < 9
+ }) {
+ year = year * 10 + (ch - '0')
+ yearDigits += 1
+ }
+ yearNeg && {
+ year = -year
+ year == 0
+ } || ch != '-'
+ }
+ }
+ }
+ } || pos + 5 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ val ch2 = cs(pos + 2)
+ val ch3 = cs(pos + 3)
+ val ch4 = cs(pos + 4)
+ val ch5 = cs(pos + 5)
+ pos += 6
+ month = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ day = ch3 * 10 + ch4 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch2 != '-' || ch3 < '0' || ch3 > '9' ||
+ ch4 < '0' || ch4 > '9' || ch5 != 'T' || month < 1 || month > 12 || day == 0 ||
+ (day > 28 && day > maxDayForYearMonth(year, month))
+ } || pos + 4 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ val ch2 = cs(pos + 2)
+ val ch3 = cs(pos + 3)
+ val ch4 = cs(pos + 4)
+ pos += 5
+ hour = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ minute = ch3 * 10 + ch4 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch2 != ':' ||
+ ch3 < '0' || ch3 > '9' || ch4 < '0' || ch4 > '9' || ch3 > '5' || hour > 23
+ } || pos >= i
+ ) zonedDateTimeError(trace)
+ var second, nano = 0
+ var ch = cs(pos)
+ pos += 1
+ if (ch == ':') {
+ if (
+ pos + 1 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ pos += 2
+ second = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ } || pos >= i
+ ) zonedDateTimeError(trace)
+ ch = cs(pos)
+ pos += 1
+ if (ch == '.') {
+ var nanoDigitWeight = 100000000
+ while ({
+ if (pos >= i) zonedDateTimeError(trace)
+ ch = cs(pos)
+ pos += 1
+ ch >= '0' && ch <= '9' && nanoDigitWeight != 0
+ }) {
+ nano += (ch - '0') * nanoDigitWeight
+ nanoDigitWeight = (nanoDigitWeight * 3435973837L >> 35).toInt // divide a positive int by 10
+ }
+ }
+ }
+ val localDateTime = LocalDateTime.of(year, month, day, hour, minute, second, nano)
+ val zoneOffset =
+ if (ch == 'Z') {
+ if (pos < i) {
+ ch = cs(pos)
+ if (ch != '[') zonedDateTimeError(trace)
+ pos += 1
+ }
+ ZoneOffset.UTC
+ } else {
+ val offsetNeg = ch == '-' || (ch != '+' && zonedDateTimeError(trace))
+ var offsetTotal = 0
+ if (
+ pos + 1 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ pos += 2
+ offsetTotal = (ch0 * 10 + ch1 - 528) * 3600 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9'
+ }
+ ) zonedDateTimeError(trace)
+ if (
+ pos < i && {
+ ch = cs(pos)
+ pos += 1
+ ch == ':' || ch != '[' && zonedDateTimeError(trace)
+ }
+ ) {
+ if (
+ pos + 1 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ pos += 2
+ offsetTotal += (ch0 * 10 + ch1 - 528) * 60 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ }
+ ) zonedDateTimeError(trace)
+ if (
+ pos < i && {
+ ch = cs(pos)
+ pos += 1
+ ch == ':' || ch != '[' && zonedDateTimeError(trace)
+ }
+ ) {
+ if (
+ pos + 1 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ pos += 2
+ offsetTotal += ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ }
+ ) zonedDateTimeError(trace)
+ if (pos < i) {
+ ch = cs(pos)
+ if (ch != '[') zonedDateTimeError(trace)
+ pos += 1
+ }
+ }
+ }
+ if (offsetTotal > 64800) zonedDateTimeError(trace)
+ toZoneOffset(offsetNeg, offsetTotal)
+ }
+ if (ch == '[') {
+ var zoneId: ZoneId = null
+ val from = pos
+ while ({
+ if (pos >= i) zonedDateTimeError(trace)
+ ch = cs(pos)
+ ch != ']'
+ }) pos += 1
+ val key = new String(cs, from, pos - from)
+ zoneId = zoneIds.get(key)
+ if (
+ (zoneId eq null) && {
+ try zoneId = ZoneId.of(key)
+ catch {
+ case _: DateTimeException => zonedDateTimeError(trace)
+ }
+ !zoneId.isInstanceOf[ZoneOffset] || zoneId.asInstanceOf[ZoneOffset].getTotalSeconds % 900 == 0
+ }
+ ) zoneIds.put(key, zoneId)
+ if (pos + 1 == i) return ZonedDateTime.ofInstant(localDateTime, zoneOffset, zoneId)
+ } else {
+ if (pos == i) return ZonedDateTime.ofLocal(localDateTime, zoneOffset, null)
+ }
+ }
+ zonedDateTimeError(trace)
+ }
+
+ def zoneOffset(trace: List[JsonError], in: OneCharReader): ZoneOffset = {
+ if (in.nextNonWhitespace() == '"') {
+ val cs = charArrays.get
+ val i = readChars(trace, in, cs)
+ var pos = 0
+ if (pos >= i) zoneOffsetError(trace)
+ val ch = cs(pos)
+ pos += 1
+ if (ch == 'Z') {
+ if (pos == i) return ZoneOffset.UTC
+ } else {
+ val offsetNeg = ch == '-' || (ch != '+' && zoneOffsetError(trace))
+ var offsetTotal = 0
+ if (
+ pos + 1 >= i || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ offsetTotal = (ch0 * 10 + ch1 - 528) * 3600 // 528 == '0' * 11
+ pos += 2
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9'
+ }
+ ) zoneOffsetError(trace)
+ if (pos < i) {
+ if (
+ cs(pos) != ':' || {
+ pos += 1
+ pos + 1 >= i
+ } || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ pos += 2
+ offsetTotal += (ch0 * 10 + ch1 - 528) * 60 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ }
+ ) zoneOffsetError(trace)
+ if (pos < i) {
+ if (
+ cs(pos) != ':' || {
+ pos += 1
+ pos + 1 >= i
+ } || {
+ val ch0 = cs(pos)
+ val ch1 = cs(pos + 1)
+ pos += 2
+ offsetTotal += ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ }
+ ) zoneOffsetError(trace)
+ }
+ }
+ if (offsetTotal <= 64800 && pos == i) return toZoneOffset(offsetNeg, offsetTotal)
+ }
+ }
+ zoneOffsetError(trace)
+ }
+
+ private[this] def readChars(trace: List[JsonError], in: OneCharReader, cs: Array[Char]): Int = {
+ val len = cs.length
+ var c = '0'
+ var i = 0
+ while ({
+ c = in.readChar()
+ i < len && c != '"'
+ }) {
+ if (c == '\\') c = nextEscaped(trace, in)
+ cs(i) = c
i += 1
}
+ i
}
-}
+ private[this] def toZoneOffset(offsetNeg: Boolean, offsetTotal: Int): ZoneOffset = {
+ var qp = offsetTotal * 37283
+ if ((qp & 0x1ff8000) == 0) { // check if offsetTotal divisible by 900
+ qp >>>= 25 // divide offsetTotal by 900
+ if (offsetNeg) qp = -qp
+ var zoneOffset = zoneOffsets(qp + 72)
+ if (zoneOffset ne null) zoneOffset
+ else {
+ zoneOffset = ZoneOffset.ofTotalSeconds(if (offsetNeg) -offsetTotal else offsetTotal)
+ zoneOffsets(qp + 72) = zoneOffset
+ zoneOffset
+ }
+ } else ZoneOffset.ofTotalSeconds(if (offsetNeg) -offsetTotal else offsetTotal)
+ }
-// A Reader for the contents of a string, taking care of the escaping.
-//
-// `read` can throw extra exceptions on badly formed input.
-private final class EscapedString(trace: List[JsonError], in: OneCharReader) extends java.io.Reader with OneCharReader {
-
- def close(): Unit = in.close()
-
- private[this] var escaped = false
-
- override def read(): Int = {
- val c = in.readChar()
- if (escaped) {
- escaped = false
- (c: @switch) match {
- case '"' | '\\' | '/' => c.toInt
- case 'b' => '\b'.toInt
- case 'f' => '\f'.toInt
- case 'n' => '\n'.toInt
- case 'r' => '\r'.toInt
- case 't' => '\t'.toInt
- case 'u' => nextHex4()
- case _ =>
- throw UnsafeJson(
- JsonError.Message(s"invalid '\\${c.toChar}' in string") :: trace
- )
+ private[this] def sumSeconds(s1: Long, s2: Long, trace: List[JsonError]): Long = {
+ val s = s1 + s2
+ if (((s1 ^ s) & (s2 ^ s)) < 0) durationError(trace)
+ s
+ }
+
+ private[this] def epochDayForYear(year: Int): Long =
+ year * 365L + ((year + 3 >> 2) - {
+ val cp = year * 1374389535L
+ if (year < 0) (cp >> 37) - (cp >> 39) // year / 100 - year / 400
+ else (cp + 136064563965L >> 37) - (cp + 548381424465L >> 39) // (year + 99) / 100 - (year + 399) / 400
+ }.toInt)
+
+ private[this] def dayOfYearForYearMonth(year: Int, month: Int): Int =
+ (month * 1002277 - 988622 >> 15) - // (month * 367 - 362) / 12
+ (if (month <= 2) 0
+ else if (isLeap(year)) 1
+ else 2)
+
+ private[this] def maxDayForMonth(month: Int): Int =
+ if (month != 2) ((month >> 3) ^ (month & 0x1)) + 30
+ else 29
+
+ private[this] def maxDayForYearMonth(year: Int, month: Int): Int =
+ if (month != 2) ((month >> 3) ^ (month & 0x1)) + 30
+ else if (isLeap(year)) 29
+ else 28
+
+ private[this] def isLeap(year: Int): Boolean = (year & 0x3) == 0 && { // (year % 100 != 0 || year % 400 == 0)
+ val cp = year * 1374389535L
+ val cc = year >> 31
+ ((cp ^ cc) & 0x1fc0000000L) != 0 || (((cp >> 37).toInt - cc) & 0x3) == 0
+ }
+
+ @noinline private[this] def uuidError(trace: List[JsonError]): Nothing = error("expected a UUID", trace)
+
+ @noinline private[this] def durationError(trace: List[JsonError]): Nothing = error("expected a Duration", trace)
+
+ @noinline private[this] def instantError(trace: List[JsonError]): Nothing = error("expected an Instant", trace)
+
+ @noinline private[this] def localDateError(trace: List[JsonError]): Nothing = error("expected a LocalDate", trace)
+
+ @noinline private[this] def localDateTimeError(trace: List[JsonError]): Nothing =
+ error("expected a LocalDateTime", trace)
+
+ @noinline private[this] def localTimeError(trace: List[JsonError]): Nothing = error("expected a LocalTime", trace)
+
+ @noinline private[this] def monthDayError(trace: List[JsonError]): Nothing = error("expected a MonthDay", trace)
+
+ @noinline private[this] def offsetDateTimeError(trace: List[JsonError]): Nothing =
+ error("expected an OffsetDateTime", trace)
+
+ @noinline private[this] def offsetTimeError(trace: List[JsonError]): Nothing = error("expected an OffsetTime", trace)
+
+ @noinline private[this] def periodError(trace: List[JsonError]): Nothing = error("expected a Period", trace)
+
+ @noinline private[this] def yearError(trace: List[JsonError]): Nothing = error("expected a Year", trace)
+
+ @noinline private[this] def yearMonthError(trace: List[JsonError]): Nothing = error("expected a YearMonth", trace)
+
+ @noinline private[this] def zonedDateTimeError(trace: List[JsonError]): Nothing =
+ error("expected a ZonedDateTime", trace)
+
+ @noinline private[this] def zoneIdError(trace: List[JsonError]): Nothing = error("expected a ZoneId", trace)
+
+ @noinline private[this] def zoneOffsetError(trace: List[JsonError]): Nothing = error("expected a ZoneOffset", trace)
+
+ private[this] val charArrays = new ThreadLocal[Array[Char]] {
+ override def initialValue(): Array[Char] = new Array[Char](1024)
+ }
+
+ private[this] val hexDigits: Array[Byte] = {
+ val ns = new Array[Byte](256)
+ java.util.Arrays.fill(ns, -1: Byte)
+ ns('0') = 0
+ ns('1') = 1
+ ns('2') = 2
+ ns('3') = 3
+ ns('4') = 4
+ ns('5') = 5
+ ns('6') = 6
+ ns('7') = 7
+ ns('8') = 8
+ ns('9') = 9
+ ns('A') = 10
+ ns('B') = 11
+ ns('C') = 12
+ ns('D') = 13
+ ns('E') = 14
+ ns('F') = 15
+ ns('a') = 10
+ ns('b') = 11
+ ns('c') = 12
+ ns('d') = 13
+ ns('e') = 14
+ ns('f') = 15
+ ns
+ }
+
+ private[this] final val zoneOffsets: Array[ZoneOffset] = new Array(145)
+
+ private[this] final val zoneIds: ConcurrentHashMap[String, ZoneId] = new ConcurrentHashMap(256)
+
+ def char(trace: List[JsonError], in: OneCharReader): Char = {
+ var c = in.nextNonWhitespace()
+ if (c != '"') error("'\"'", c, trace)
+ c = in.readChar()
+ if (
+ c == '"' || {
+ if (c == '\\') c = nextEscaped(trace, in)
+ else if (c < ' ') error("invalid control in string", trace)
+ in.readChar() != '"'
}
- } else if (c == '\\') {
- escaped = true
- read()
- } else if (c == '"') -1 // this is the EOS for the caller
- else if (c < ' ')
- throw UnsafeJson(JsonError.Message("invalid control in string") :: trace)
- else c.toInt
- }
-
- // callers expect to get an EOB so this is rare
- def readChar(): Char = {
- val v = read()
- if (v == -1) throw new UnexpectedEnd
- v.toChar
- }
-
- // consumes 4 hex characters after current
- def nextHex4(): Int = {
- var i: Int = 0
- var accum: Int = 0
+ ) error("expected single character string", trace)
+ c
+ }
+
+ @noinline private[this] def nextEscaped(trace: List[JsonError], in: OneCharReader): Char =
+ (in.readChar(): @switch) match {
+ case '"' => '"'
+ case '\\' => '\\'
+ case '/' => '/'
+ case 'b' => '\b'
+ case 'f' => '\f'
+ case 'n' => '\n'
+ case 'r' => '\r'
+ case 't' => '\t'
+ case 'u' => nextHex4(trace, in)
+ case c => error(c, trace)
+ }
+
+ private[this] def nextHex4(trace: List[JsonError], in: OneCharReader): Char = {
+ var i, accum = 0
while (i < 4) {
- var c: Int = in.read()
- if (c == -1)
- throw UnsafeJson(JsonError.Message("unexpected EOB in string") :: trace)
- c =
- if ('0' <= c && c <= '9') c - '0'
- else if ('A' <= c && c <= 'F') c - 'A' + 10
- else if ('a' <= c && c <= 'f') c - 'a' + 10
- else
- throw UnsafeJson(
- JsonError.Message("invalid charcode in string") :: trace
- )
- accum = accum * 16 + c
+ val c = in.readChar() | 0x20
+ accum = (accum << 4) + c
i += 1
+ if ('0' <= c && c <= '9') accum -= 48
+ else if ('a' <= c && c <= 'f') accum -= 87
+ else error("invalid charcode in string", trace)
+ }
+ accum.toChar
+ }
+
+ def dayOfWeek(trace: List[JsonError], in: OneCharReader): DayOfWeek = {
+ var c = in.nextNonWhitespace()
+ if (c == '"') {
+ var bs = dayOfWeekMatrix.initial
+ var i = 0
+ while ({
+ c = in.readChar()
+ c != '"'
+ }) {
+ if (c == '\\') c = nextEscaped(trace, in)
+ bs = dayOfWeekMatrix.update(bs, i, (c & 0xffdf).toChar)
+ i += 1
+ }
+ val dayOfWeek = dayOfWeekMatrix.first(dayOfWeekMatrix.exact(bs, i)) + 1
+ if (dayOfWeek > 0) return DayOfWeek.of(dayOfWeek)
+ }
+ error("expected a DayOfWeek", trace)
+ }
+
+ private[this] val dayOfWeekMatrix = new StringMatrix(DayOfWeek.values.map(_.toString))
+
+ def month(trace: List[JsonError], in: OneCharReader): Month = {
+ var c = in.nextNonWhitespace()
+ if (c == '"') {
+ var bs = monthMatrix.initial
+ var i = 0
+ while ({
+ c = in.readChar()
+ c != '"'
+ }) {
+ if (c == '\\') c = nextEscaped(trace, in)
+ bs = monthMatrix.update(bs, i, (c & 0xffdf).toChar)
+ i += 1
+ }
+ val month = monthMatrix.first(monthMatrix.exact(bs, i)) + 1
+ if (month > 0) return Month.of(month)
}
- accum
+ error("expected a Month", trace)
}
+ private[this] val monthMatrix = new StringMatrix(Month.values.map(_.toString))
+
+ @inline def char(trace: List[JsonError], in: OneCharReader, c: Char): Unit = {
+ val got = in.nextNonWhitespace()
+ if (got != c) error(s"'$c'", got, trace)
+ }
+
+ // FIXME: remove on next major version release
+ @inline def charOnly(trace: List[JsonError], in: OneCharReader, c: Char): Unit = {
+ val got = in.readChar()
+ if (got != c) error(s"'$c'", got, trace)
+ }
+
+ @inline private[this] def isNumber(c: Char): Boolean =
+ (c: @switch) match {
+ case '+' | '-' | '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' | '.' | 'e' | 'E' => true
+ case _ => false
+ }
+
+ def readChars(trace: List[JsonError], in: OneCharReader, expect: Array[Char], errMsg: String): Unit = {
+ var i: Int = 0
+ while (i < expect.length) {
+ if (in.readChar() != expect(i)) error(s"expected '$errMsg'", trace)
+ i += 1
+ }
+ }
}
// A data structure encoding a simple algorithm for Trie pruning: Given a list
// of strings, and a sequence of incoming characters, find the strings that
// match, by manually maintaining a bitset. Empty strings are not allowed.
-final class StringMatrix(val xs: Array[String], aliases: Array[(String, Int)] = Array.empty) {
- require(xs.forall(_.nonEmpty))
- require(xs.nonEmpty)
- require(xs.length + aliases.length < 64)
- require(aliases.forall(_._1.nonEmpty))
- require(aliases.forall(p => p._2 >= 0 && p._2 < xs.length))
-
- val width = xs.length + aliases.length
- val height: Int = xs.map(_.length).max max (if (aliases.isEmpty) 0 else aliases.map(_._1.length).max)
- val lengths: Array[Int] = xs.map(_.length) ++ aliases.map(_._1.length)
- val initial: Long = (0 until width).foldLeft(0L)((bs, r) => bs | (1L << r))
-
- private val matrix: Array[Int] = {
- val m = Array.fill[Int](width * height)(-1)
- var string: Int = 0
- while (string < width) {
- val s = if (string < xs.length) xs(string) else aliases(string - xs.length)._1
- val len = s.length
- var char: Int = 0
+final class StringMatrix(names: Array[String], aliases: Array[(String, Int)] = Array.empty) {
+ val namesLen: Int = names.length
+ private[this] val width: Int = namesLen + aliases.length
+ val initial: Long = -1L >>> (64 - width)
+ private[this] val lengths: Array[Int] = {
+ require(namesLen > 0 && width <= 64)
+ val ls = new Array[Int](width)
+ var string = 0
+ while (string < namesLen) {
+ val l = names(string).length
+ if (l == 0) require(false)
+ ls(string) = l
+ string += 1
+ }
+ while (string < ls.length) {
+ val l = aliases(string - namesLen)._1.length
+ if (l == 0) require(false)
+ ls(string) = l
+ string += 1
+ }
+ ls
+ }
+ private[this] val height: Int = lengths.max
+ private[this] val matrix: Array[Char] = {
+ val w = width
+ val m = new Array[Char](height * w)
+ var string = 0
+ while (string < w) {
+ val s =
+ if (string < namesLen) names(string)
+ else aliases(string - namesLen)._1
+ val len = s.length
+ var char, base = 0
while (char < len) {
- m(width * char + string) = s.codePointAt(char)
+ m(base + string) = s.charAt(char)
+ base += w
char += 1
}
string += 1
}
m
}
-
- private val resolve: Array[Int] = {
- val r = Array.tabulate[Int](xs.length + aliases.length)(identity)
- aliases.zipWithIndex.foreach { case ((_, pi), i) => r(xs.length + i) = pi }
- r
+ private[this] val resolvers: Array[Byte] = {
+ val rs = new Array[Byte](width)
+ var string = 0
+ while (string < namesLen) {
+ rs(string) = string.toByte
+ string += 1
+ }
+ while (string < rs.length) {
+ val x = aliases(string - namesLen)._2
+ if (x < 0 || x > namesLen) require(false)
+ rs(string) = x.toByte
+ string += 1
+ }
+ rs
}
// must be called with increasing `char` (starting with bitset obtained from a
// call to 'initial', char = 0)
- def update(bitset: Long, char: Int, c: Int): Long =
- if (char >= height) 0L // too long
- else if (bitset == 0L) 0L // everybody lost
- else {
- var latest: Long = bitset
- val base: Int = width * char
-
- if (bitset == initial) { // special case when it is dense since it is simple
- var string: Int = 0
- while (string < width) {
- if (matrix(base + string) != c)
- latest = latest ^ (1L << string)
+ def update(bitset: Long, char: Int, c: Char): Long =
+ if (char < height) {
+ val w = width
+ val m = matrix
+ val base = char * w
+ var latest = bitset
+ if (initial == bitset) { // special case when it is dense since it is simple
+ var string = 0
+ while (string < w) {
+ if (m(base + string) != c) latest ^= 1L << string
string += 1
}
} else {
- var remaining: Long = bitset
+ var remaining = bitset
while (remaining != 0L) {
- val string: Int = java.lang.Long.numberOfTrailingZeros(remaining)
- val bit: Long = 1L << string
- if (matrix(base + string) != c)
- latest = latest ^ bit
- remaining = remaining ^ bit
+ val string = java.lang.Long.numberOfTrailingZeros(remaining)
+ val bit = 1L << string
+ remaining ^= bit
+ if (m(base + string) != c) latest ^= bit
}
}
-
latest
- }
+ } else 0L // too long
// excludes entries that are not the given exact length
def exact(bitset: Long, length: Int): Long =
- if (length > height) 0L // too long
- else {
- var latest: Long = bitset
- var remaining: Long = bitset
+ if (length <= height) {
+ var remaining, latest = bitset
+ val ls = lengths
while (remaining != 0L) {
- val string: Int = java.lang.Long.numberOfTrailingZeros(remaining)
- val bit: Long = 1L << string
- if (lengths(string) != length)
- latest = latest ^ bit
- remaining = remaining ^ bit
+ val string = java.lang.Long.numberOfTrailingZeros(remaining)
+ val bit = 1L << string
+ remaining ^= bit
+ if (ls(string) != length) latest ^= bit
}
latest
- }
+ } else 0L // too long
def first(bitset: Long): Int =
- if (bitset == 0L) -1
- else resolve(java.lang.Long.numberOfTrailingZeros(bitset)) // never returns 64
+ if (bitset != 0L) resolvers(java.lang.Long.numberOfTrailingZeros(bitset)).toInt // never returns 64
+ else -1
}
diff --git a/zio-json/shared/src/main/scala/zio/json/internal/numbers.scala b/zio-json/shared/src/main/scala/zio/json/internal/numbers.scala
index c869f113e..84eceb3b2 100644
--- a/zio-json/shared/src/main/scala/zio/json/internal/numbers.scala
+++ b/zio-json/shared/src/main/scala/zio/json/internal/numbers.scala
@@ -15,564 +15,6 @@
*/
package zio.json.internal
-import java.io._
-import scala.util.control.NoStackTrace
-
-/**
- * Total, fast, number parsing.
- *
- * The Java and Scala standard libraries throw exceptions when we attempt to
- * parse an invalid number. Unfortunately, exceptions are very expensive, and
- * untrusted data can be maliciously constructed to DOS a server.
- *
- * This suite of functions mitigates against such attacks by building up the
- * numbers one character at a time, which has been shown through extensive
- * benchmarking to be orders of magnitude faster than exception-throwing stdlib
- * parsers, for valid and invalid inputs. This approach, proposed by alexknvl,
- * was also benchmarked against regexp-based pre-validation.
- *
- * Note that although the behaviour is identical to the Java stdlib when given
- * the canonical form of a primitive (i.e. the .toString) of a number there may
- * be differences in behaviour for non-canonical forms. e.g. the Java stdlib
- * may reject "1.0" when parsed as an `BigInteger` but we may parse it as a
- * `1`, although "1.1" would be rejected. Parsing of `BigDecimal` preserves the
- * trailing zeros on the right but not on the left, e.g. "000.00001000" will be
- * "1.000e-5", which is useful in cases where the trailing zeros denote
- * measurement accuracy.
- *
- * `BigInteger`, `BigDecimal`, `Float` and `Double` have a configurable bit
- * limit on the size of the significand, to avoid OOM style attacks, which is
- * 128 bits by default.
- *
- * Results are contained in a specialisation of Option that avoids boxing.
- */
-// TODO hex radix
-// TODO octal radix
-object SafeNumbers {
- import UnsafeNumbers.UnsafeNumber
-
- def byte(num: String): ByteOption =
- try ByteSome(UnsafeNumbers.byte(num))
- catch { case UnsafeNumber => ByteNone }
-
- def short(num: String): ShortOption =
- try ShortSome(UnsafeNumbers.short(num))
- catch { case UnsafeNumber => ShortNone }
-
- def int(num: String): IntOption =
- try IntSome(UnsafeNumbers.int(num))
- catch { case UnsafeNumber => IntNone }
-
- def long(num: String): LongOption =
- try LongSome(UnsafeNumbers.long(num))
- catch { case UnsafeNumber => LongNone }
-
- def bigInteger(
- num: String,
- max_bits: Int = 128
- ): Option[java.math.BigInteger] =
- try Some(UnsafeNumbers.bigInteger(num, max_bits))
- catch { case UnsafeNumber => None }
-
- def float(num: String, max_bits: Int = 128): FloatOption =
- try FloatSome(UnsafeNumbers.float(num, max_bits))
- catch { case UnsafeNumber => FloatNone }
-
- def double(num: String, max_bits: Int = 128): DoubleOption =
- try DoubleSome(UnsafeNumbers.double(num, max_bits))
- catch { case UnsafeNumber => DoubleNone }
-
- def bigDecimal(
- num: String,
- max_bits: Int = 128
- ): Option[java.math.BigDecimal] =
- try Some(UnsafeNumbers.bigDecimal(num, max_bits))
- catch { case UnsafeNumber => None }
-
- // Based on the amazing work of Raffaello Giulietti
- // "The Schubfach way to render doubles": https://drive.google.com/file/d/1luHhyQF9zKlM8yJ1nebU0OgVYhfC6CBN/view
- // Sources with the license are here: https://github.com/c4f7fcce9cb06515/Schubfach/blob/3c92d3c9b1fead540616c918cdfef432bca53dfa/todec/src/math/DoubleToDecimal.java
- def toString(x: Double): String = {
- val bits = java.lang.Double.doubleToLongBits(x)
- val ieeeExponent = (bits >> 52).toInt & 0x7ff
- val ieeeMantissa = bits & 0xfffffffffffffL
- if (ieeeExponent == 2047) {
- if (x != x) """"NaN""""
- else if (bits < 0) """"-Infinity""""
- else """"Infinity""""
- } else {
- val s = new java.lang.StringBuilder(24)
- if (bits < 0) s.append('-')
- if (x == 0.0f) s.append('0').append('.').append('0')
- else {
- var e = ieeeExponent - 1075
- var m = ieeeMantissa | 0x10000000000000L
- var dv = 0L
- var exp = 0
- if (e == 0) dv = m
- else if (e >= -52 && e < 0 && m << e == 0) dv = m >> -e
- else {
- var expShift, expCorr = 0
- var cblShift = 2
- if (ieeeExponent == 0) {
- e = -1074
- m = ieeeMantissa
- if (ieeeMantissa < 3) {
- m *= 10
- expShift = 1
- }
- } else if (ieeeMantissa == 0 && ieeeExponent > 1) {
- expCorr = 131007
- cblShift = 1
- }
- exp = e * 315653 - expCorr >> 20
- val i = exp + 324 << 1
- val g1 = gs(i)
- val g0 = gs(i + 1)
- val h = (-exp * 108853 >> 15) + e + 2
- val cb = m << 2
- val outm1 = (m.toInt & 0x1) - 1
- val vb = rop(g1, g0, cb << h)
- val vbls = rop(g1, g0, cb - cblShift << h) + outm1
- val vbrd = outm1 - rop(g1, g0, cb + 2 << h)
- val s = vb >> 2
- if (
- s < 100 || {
- dv = s / 10 // FIXME: Use Math.multiplyHigh(s, 1844674407370955168L) instead after dropping JDK 8 support
- val sp40 = dv * 40
- val upin = (vbls - sp40).toInt
- (((sp40 + vbrd).toInt + 40) ^ upin) >= 0 || {
- dv += ~upin >>> 31
- exp += 1
- false
- }
- }
- ) {
- val s4 = s << 2
- val uin = (vbls - s4).toInt
- dv = (~ {
- if ((((s4 + vbrd).toInt + 4) ^ uin) < 0) uin
- else (vb.toInt & 0x3) + (s.toInt & 0x1) - 3
- } >>> 31) + s
- exp -= expShift
- }
- }
- val len = digitCount(dv)
- exp += len - 1
- if (exp < -3 || exp >= 7) {
- val dotOff = s.length + 1
- s.append(dv)
- var i = s.length - 1
- while (i > dotOff && s.charAt(i) == '0') i -= 1
- s.setLength(i + 1)
- s.insert(dotOff, '.').append('E').append(exp)
- } else if (exp < 0) {
- s.append('0').append('.')
- while ({
- exp += 1
- exp != 0
- }) s.append('0')
- s.append(dv)
- var i = s.length - 1
- while (s.charAt(i) == '0') i -= 1
- s.setLength(i + 1)
- s
- } else if (exp + 1 < len) {
- val dotOff = s.length + exp + 1
- s.append(dv)
- var i = s.length - 1
- while (s.charAt(i) == '0') i -= 1
- s.setLength(i + 1)
- s.insert(dotOff, '.')
- } else s.append(dv).append('.').append('0')
- }
- }.toString
- }
-
- def toString(x: Float): String = {
- val bits = java.lang.Float.floatToIntBits(x)
- val ieeeExponent = (bits >> 23) & 0xff
- val ieeeMantissa = bits & 0x7fffff
- if (ieeeExponent == 255) {
- if (x != x) """"NaN""""
- else if (bits < 0) """"-Infinity""""
- else """"Infinity""""
- } else {
- val s = new java.lang.StringBuilder(16)
- if (bits < 0) s.append('-')
- if (x == 0.0f) s.append('0').append('.').append('0')
- else {
- var e = ieeeExponent - 150
- var m = ieeeMantissa | 0x800000
- var dv, exp = 0
- if (e == 0) dv = m
- else if (e >= -23 && e < 0 && m << e == 0) dv = m >> -e
- else {
- var expShift, expCorr = 0
- var cblShift = 2
- if (ieeeExponent == 0) {
- e = -149
- m = ieeeMantissa
- if (ieeeMantissa < 8) {
- m *= 10
- expShift = 1
- }
- } else if (ieeeMantissa == 0 && ieeeExponent > 1) {
- expCorr = 131007
- cblShift = 1
- }
- exp = e * 315653 - expCorr >> 20
- val g1 = gs(exp + 324 << 1) + 1
- val h = (-exp * 108853 >> 15) + e + 1
- val cb = m << 2
- val outm1 = (m & 0x1) - 1
- val vb = rop(g1, cb << h)
- val vbls = rop(g1, cb - cblShift << h) + outm1
- val vbrd = outm1 - rop(g1, cb + 2 << h)
- val s = vb >> 2
- if (
- s < 100 || {
- dv = (s * 3435973837L >>> 35).toInt // divide a positive int by 10
- val sp40 = dv * 40
- val upin = vbls - sp40
- ((sp40 + vbrd + 40) ^ upin) >= 0 || {
- dv += ~upin >>> 31
- exp += 1
- false
- }
- }
- ) {
- val s4 = s << 2
- val uin = vbls - s4
- dv = (~ {
- if (((s4 + vbrd + 4) ^ uin) < 0) uin
- else (vb & 0x3) + (s & 0x1) - 3
- } >>> 31) + s
- exp -= expShift
- }
- }
- val len = digitCount(dv.toLong)
- exp += len - 1
- if (exp < -3 || exp >= 7) {
- val dotOff = s.length + 1
- s.append(dv)
- var i = s.length - 1
- while (i > dotOff && s.charAt(i) == '0') i -= 1
- s.setLength(i + 1)
- s.insert(dotOff, '.').append('E').append(exp)
- } else if (exp < 0) {
- s.append('0').append('.')
- while ({
- exp += 1
- exp != 0
- }) s.append('0')
- s.append(dv)
- var i = s.length - 1
- while (s.charAt(i) == '0') i -= 1
- s.setLength(i + 1)
- s
- } else if (exp + 1 < len) {
- val dotOff = s.length + exp + 1
- s.append(dv)
- var i = s.length - 1
- while (s.charAt(i) == '0') i -= 1
- s.setLength(i + 1)
- s.insert(dotOff, '.')
- } else s.append(dv).append('.').append('0')
- }
- }.toString
- }
-
- private[this] def rop(g1: Long, g0: Long, cp: Long): Long = {
- val x1 = multiplyHigh(g0, cp) // FIXME: Use Math.multiplyHigh after dropping JDK 8 support
- val z = (g1 * cp >>> 1) + x1
- val y1 = multiplyHigh(g1, cp) // FIXME: Use Math.multiplyHigh after dropping JDK 8 support
- (z >>> 63) + y1 | -(z & 0x7fffffffffffffffL) >>> 63
- }
-
- private[this] def rop(g: Long, cp: Int): Int = {
- val x1 =
- ((g & 0xffffffffL) * cp >>> 32) + (g >>> 32) * cp // FIXME: Use Math.multiplyHigh after dropping JDK 8 support
- (x1 >>> 31).toInt | -x1.toInt >>> 31
- }
-
- private[this] def multiplyHigh(x: Long, y: Long): Long = {
- val x2 = x & 0xffffffffL
- val y2 = y & 0xffffffffL
- val b = x2 * y2
- val x1 = x >>> 32
- val y1 = y >>> 32
- val a = x1 * y1
- (((b >>> 32) + (x1 + x2) * (y1 + y2) - b - a) >>> 32) + a
- }
-
- // Adoption of a nice trick form Daniel Lemire's blog that works for numbers up to 10^18:
- // https://lemire.me/blog/2021/06/03/computing-the-number-of-digits-of-an-integer-even-faster/
- private[this] def digitCount(x: Long): Int = (offsets(java.lang.Long.numberOfLeadingZeros(x)) + x >> 58).toInt
-
- private final val offsets = Array(
- 5088146770730811392L, 5088146770730811392L, 5088146770730811392L, 5088146770730811392L, 5088146770730811392L,
- 5088146770730811392L, 5088146770730811392L, 5088146770730811392L, 4889916394579099648L, 4889916394579099648L,
- 4889916394579099648L, 4610686018427387904L, 4610686018427387904L, 4610686018427387904L, 4610686018427387904L,
- 4323355642275676160L, 4323355642275676160L, 4323355642275676160L, 4035215266123964416L, 4035215266123964416L,
- 4035215266123964416L, 3746993889972252672L, 3746993889972252672L, 3746993889972252672L, 3746993889972252672L,
- 3458764413820540928L, 3458764413820540928L, 3458764413820540928L, 3170534127668829184L, 3170534127668829184L,
- 3170534127668829184L, 2882303760517117440L, 2882303760517117440L, 2882303760517117440L, 2882303760517117440L,
- 2594073385265405696L, 2594073385265405696L, 2594073385265405696L, 2305843009203693952L, 2305843009203693952L,
- 2305843009203693952L, 2017612633060982208L, 2017612633060982208L, 2017612633060982208L, 2017612633060982208L,
- 1729382256910170464L, 1729382256910170464L, 1729382256910170464L, 1441151880758548720L, 1441151880758548720L,
- 1441151880758548720L, 1152921504606845976L, 1152921504606845976L, 1152921504606845976L, 1152921504606845976L,
- 864691128455135132L, 864691128455135132L, 864691128455135132L, 576460752303423478L, 576460752303423478L,
- 576460752303423478L, 576460752303423478L, 576460752303423478L, 576460752303423478L, 576460752303423478L
- )
-
- private[this] val gs: Array[Long] = Array(
- 5696189077778435540L, 6557778377634271669L, 9113902524445496865L, 1269073367360058862L, 7291122019556397492L,
- 1015258693888047090L, 5832897615645117993L, 6346230177223303157L, 4666318092516094394L, 8766332956520552849L,
- 7466108948025751031L, 8492109508320019073L, 5972887158420600825L, 4949013199285060097L, 4778309726736480660L,
- 3959210559428048077L, 7645295562778369056L, 6334736895084876923L, 6116236450222695245L, 3223115108696946377L,
- 4892989160178156196L, 2578492086957557102L, 7828782656285049914L, 436238524390181040L, 6263026125028039931L,
- 2193665226883099993L, 5010420900022431944L, 9133629810990300641L, 8016673440035891111L, 9079784475471615541L,
- 6413338752028712889L, 5419153173006337271L, 5130671001622970311L, 6179996945776024979L, 8209073602596752498L,
- 6198646298499729642L, 6567258882077401998L, 8648265853541694037L, 5253807105661921599L, 1384589460720489745L,
- 8406091369059074558L, 5904691951894693915L, 6724873095247259646L, 8413102376257665455L, 5379898476197807717L,
- 4885807493635177203L, 8607837561916492348L, 438594360332462878L, 6886270049533193878L, 4040224303007880625L,
- 5509016039626555102L, 6921528257148214824L, 8814425663402488164L, 3695747581953323071L, 7051540530721990531L,
- 4801272472933613619L, 5641232424577592425L, 1996343570975935733L, 9025971879324147880L, 3194149713561497173L,
- 7220777503459318304L, 2555319770849197738L, 5776622002767454643L, 3888930224050313352L, 4621297602213963714L,
- 6800492993982161005L, 7394076163542341943L, 5346765568258592123L, 5915260930833873554L, 7966761269348784022L,
- 4732208744667098843L, 8218083422849982379L, 7571533991467358150L, 2080887032334240837L, 6057227193173886520L,
- 1664709625867392670L, 4845781754539109216L, 1331767700693914136L, 7753250807262574745L, 7664851543223128102L,
- 6202600645810059796L, 6131881234578502482L, 4962080516648047837L, 3060830580291846824L, 7939328826636876539L,
- 6742003335837910079L, 6351463061309501231L, 7238277076041283225L, 5081170449047600985L, 3945947253462071419L,
- 8129872718476161576L, 6313515605539314269L, 6503898174780929261L, 3206138077060496254L, 5203118539824743409L,
- 720236054277441842L, 8324989663719589454L, 4841726501585817270L, 6659991730975671563L, 5718055608639608977L,
- 5327993384780537250L, 8263793301653597505L, 8524789415648859601L, 3998697245790980200L, 6819831532519087681L,
- 1354283389261828999L, 5455865226015270144L, 8462124340893283845L, 8729384361624432231L, 8005375723316388668L,
- 6983507489299545785L, 4559626171282155773L, 5586805991439636628L, 3647700937025724618L, 8938889586303418605L,
- 3991647091870204227L, 7151111669042734884L, 3193317673496163382L, 5720889335234187907L, 4399328546167885867L,
- 9153422936374700651L, 8883600081239572549L, 7322738349099760521L, 5262205657620702877L, 5858190679279808417L,
- 2365090118725607140L, 4686552543423846733L, 7426095317093351197L, 7498484069478154774L, 813706063123630946L,
- 5998787255582523819L, 2495639257869859918L, 4799029804466019055L, 3841185813666843096L, 7678447687145630488L,
- 6145897301866948954L, 6142758149716504390L, 8606066656235469486L, 4914206519773203512L, 6884853324988375589L,
- 7862730431637125620L, 3637067690497580296L, 6290184345309700496L, 2909654152398064237L, 5032147476247760397L,
- 483048914547496228L, 8051435961996416635L, 2617552670646949126L, 6441148769597133308L, 2094042136517559301L,
- 5152919015677706646L, 5364582523955957764L, 8244670425084330634L, 4893983223587622099L, 6595736340067464507L,
- 5759860986241052841L, 5276589072053971606L, 918539974250931950L, 8442542515286354569L, 7003687180914356604L,
- 6754034012229083655L, 7447624152102440445L, 5403227209783266924L, 5958099321681952356L, 8645163535653227079L,
- 3998935692578258285L, 6916130828522581663L, 5043822961433561789L, 5532904662818065330L, 7724407183888759755L,
- 8852647460508904529L, 3135679457367239799L, 7082117968407123623L, 4353217973264747001L, 5665694374725698898L,
- 7171923193353707924L, 9065110999561118238L, 407030665140201709L, 7252088799648894590L, 4014973346854071690L,
- 5801671039719115672L, 3211978677483257352L, 4641336831775292537L, 8103606164099471367L, 7426138930840468060L,
- 5587072233075333540L, 5940911144672374448L, 4469657786460266832L, 4752728915737899558L, 7265075043910123789L,
- 7604366265180639294L, 556073626030467093L, 6083493012144511435L, 2289533308195328836L, 4866794409715609148L,
- 1831626646556263069L, 7786871055544974637L, 1085928227119065748L, 6229496844435979709L, 6402765803808118083L,
- 4983597475548783767L, 6966887050417449628L, 7973755960878054028L, 3768321651184098759L, 6379004768702443222L,
- 6704006135689189330L, 5103203814961954578L, 1673856093809441141L, 8165126103939127325L, 833495342724150664L,
- 6532100883151301860L, 666796274179320531L, 5225680706521041488L, 533437019343456425L, 8361089130433666380L,
- 8232196860433350926L, 6688871304346933104L, 6585757488346680741L, 5351097043477546483L, 7113280398048299755L,
- 8561755269564074374L, 313202192651548637L, 6849404215651259499L, 2095236161492194072L, 5479523372521007599L,
- 3520863336564710419L, 8767237396033612159L, 99358116390671185L, 7013789916826889727L, 1924160900483492110L,
- 5611031933461511781L, 7073351942499659173L, 8977651093538418850L, 7628014293257544353L, 7182120874830735080L,
- 6102411434606035483L, 5745696699864588064L, 4881929147684828386L, 9193114719783340903L, 2277063414182859933L,
- 7354491775826672722L, 5510999546088198270L, 5883593420661338178L, 719450822128648293L, 4706874736529070542L,
- 4264909472444828957L, 7530999578446512867L, 8668529563282681493L, 6024799662757210294L, 3245474835884234871L,
- 4819839730205768235L, 4441054276078343059L, 7711743568329229176L, 7105686841725348894L, 6169394854663383341L,
- 3839875066009323953L, 4935515883730706673L, 1227225645436504001L, 7896825413969130677L, 118886625327451240L,
- 6317460331175304541L, 5629132522374826477L, 5053968264940243633L, 2658631610528906020L, 8086349223904389813L,
- 2409136169475294470L, 6469079379123511850L, 5616657750322145900L, 5175263503298809480L, 4493326200257716720L,
- 8280421605278095168L, 7189321920412346751L, 6624337284222476135L, 217434314217011916L, 5299469827377980908L,
- 173947451373609533L, 8479151723804769452L, 7657013551681595899L, 6783321379043815562L, 2436262026603366396L,
- 5426657103235052449L, 7483032843395558602L, 8682651365176083919L, 6438829327320028278L, 6946121092140867135L,
- 6995737869226977784L, 5556896873712693708L, 5596590295381582227L, 8891034997940309933L, 7109870065239576402L,
- 7112827998352247947L, 153872830078795637L, 5690262398681798357L, 5657121486175901994L, 9104419837890877372L,
- 1672696748397622544L, 7283535870312701897L, 6872180620830963520L, 5826828696250161518L, 1808395681922860493L,
- 4661462957000129214L, 5136065360280198718L, 7458340731200206743L, 2683681354335452463L, 5966672584960165394L,
- 5836293898210272294L, 4773338067968132315L, 6513709525939172997L, 7637340908749011705L, 1198563204647900987L,
- 6109872726999209364L, 958850563718320789L, 4887898181599367491L, 2611754858345611793L, 7820637090558987986L,
- 489458958611068546L, 6256509672447190388L, 7770264796372675483L, 5005207737957752311L, 682188614985274902L,
- 8008332380732403697L, 6625525006089305327L, 6406665904585922958L, 1611071190129533939L, 5125332723668738366L,
- 4978205766845537474L, 8200532357869981386L, 4275780412210949635L, 6560425886295985109L, 1575949922397804547L,
- 5248340709036788087L, 3105434345289198799L, 8397345134458860939L, 6813369359833673240L, 6717876107567088751L,
- 7295369895237893754L, 5374300886053671001L, 3991621508819359841L, 8598881417685873602L, 2697245599369065423L,
- 6879105134148698881L, 7691819701608117823L, 5503284107318959105L, 4308781353915539097L, 8805254571710334568L,
- 6894050166264862555L, 7044203657368267654L, 9204588947753800367L, 5635362925894614123L, 9208345565573995455L,
- 9016580681431382598L, 3665306460692661759L, 7213264545145106078L, 6621593983296039730L, 5770611636116084862L,
- 8986624001378742108L, 4616489308892867890L, 3499950386361083363L, 7386382894228588624L, 5599920618177733380L,
- 5909106315382870899L, 6324610901913141866L, 4727285052306296719L, 6904363128901468655L, 7563656083690074751L,
- 5512957784129484362L, 6050924866952059801L, 2565691819932632328L, 4840739893561647841L, 207879048575150701L,
- 7745183829698636545L, 5866629699833106606L, 6196147063758909236L, 4693303759866485285L, 4956917651007127389L,
- 1909968600522233067L, 7931068241611403822L, 6745298575577483229L, 6344854593289123058L, 1706890045720076260L,
- 5075883674631298446L, 5054860851317971332L, 8121413879410077514L, 4398428547366843807L, 6497131103528062011L,
- 5363417245264430207L, 5197704882822449609L, 2446059388840589004L, 8316327812515919374L, 7603043836886852730L,
- 6653062250012735499L, 7927109476880437346L, 5322449800010188399L, 8186361988875305038L, 8515919680016301439L,
- 7564155960087622576L, 6812735744013041151L, 7895999175441053223L, 5450188595210432921L, 4472124932981887417L,
- 8720301752336692674L, 3466051078029109543L, 6976241401869354139L, 4617515269794242796L, 5580993121495483311L,
- 5538686623206349399L, 8929588994392773298L, 5172549782388248714L, 7143671195514218638L, 7827388640652509295L,
- 5714936956411374911L, 727887690409141951L, 9143899130258199857L, 6698643526767492606L, 7315119304206559886L,
- 1669566006672083762L, 5852095443365247908L, 8714350434821487656L, 4681676354692198327L, 1437457125744324640L,
- 7490682167507517323L, 4144605808561874585L, 5992545734006013858L, 7005033461591409992L, 4794036587204811087L,
- 70003547160262509L, 7670458539527697739L, 1956680082827375175L, 6136366831622158191L, 3410018473632855302L,
- 4909093465297726553L, 883340371535329080L, 7854549544476362484L, 8792042223940347174L, 6283639635581089987L,
- 8878308186523232901L, 5026911708464871990L, 3413297734476675998L, 8043058733543795184L, 5461276375162681596L,
- 6434446986835036147L, 6213695507501100438L, 5147557589468028918L, 1281607591258970028L, 8236092143148846269L,
- 205897738643396882L, 6588873714519077015L, 2009392598285672668L, 5271098971615261612L, 1607514078628538134L,
- 8433758354584418579L, 4416696933176616176L, 6747006683667534863L, 5378031953912248102L, 5397605346934027890L,
- 7991774377871708805L, 8636168555094444625L, 3563466967739958280L, 6908934844075555700L, 2850773574191966624L,
- 5527147875260444560L, 2280618859353573299L, 8843436600416711296L, 3648990174965717279L, 7074749280333369037L,
- 1074517732601618662L, 5659799424266695229L, 6393637408194160414L, 9055679078826712367L, 4695796630997791177L,
- 7244543263061369894L, 67288490056322619L, 5795634610449095915L, 1898505199416013257L, 4636507688359276732L,
- 1518804159532810606L, 7418412301374842771L, 4274761062623452130L, 5934729841099874217L, 1575134442727806543L,
- 4747783872879899373L, 6794130776295110719L, 7596454196607838997L, 9025934834701221989L, 6077163357286271198L,
- 3531399053019067268L, 4861730685829016958L, 6514468057157164137L, 7778769097326427133L, 8578474484080507458L,
- 6223015277861141707L, 1328756365151540482L, 4978412222288913365L, 6597028314234097870L, 7965459555662261385L,
- 1331873265919780784L, 6372367644529809108L, 1065498612735824627L, 5097894115623847286L, 4541747704930570025L,
- 8156630584998155658L, 3577447513147001717L, 6525304467998524526L, 6551306825259511697L, 5220243574398819621L,
- 3396371052836654196L, 8352389719038111394L, 1744844869796736390L, 6681911775230489115L, 3240550303208344274L,
- 5345529420184391292L, 2592440242566675419L, 8552847072295026067L, 5992578795477635832L, 6842277657836020854L,
- 1104714221640198342L, 5473822126268816683L, 2728445784683113836L, 8758115402030106693L, 2520838848122026975L,
- 7006492321624085354L, 5706019893239531903L, 5605193857299268283L, 6409490321962580684L, 8968310171678829253L,
- 8410510107769173933L, 7174648137343063403L, 1194384864102473662L, 5739718509874450722L, 4644856706023889253L,
- 9183549615799121156L, 53073100154402158L, 7346839692639296924L, 7421156109607342373L, 5877471754111437539L,
- 7781599295056829060L, 4701977403289150031L, 8069953843416418410L, 7523163845262640050L, 9222577334724359132L,
- 6018531076210112040L, 7378061867779487306L, 4814824860968089632L, 5902449494223589845L, 7703719777548943412L,
- 2065221561273923105L, 6162975822039154729L, 7186200471132003969L, 4930380657631323783L, 7593634784276558337L,
- 7888609052210118054L, 1081769210616762369L, 6310887241768094443L, 2710089775864365057L, 5048709793414475554L,
- 5857420635433402369L, 8077935669463160887L, 3837849794580578305L, 6462348535570528709L, 8604303057777328129L,
- 5169878828456422967L, 8728116853592817665L, 8271806125530276748L, 6586289336264687617L, 6617444900424221398L,
- 8958380283753660417L, 5293955920339377119L, 1632681004890062849L, 8470329472543003390L, 6301638422566010881L,
- 6776263578034402712L, 5041310738052808705L, 5421010862427522170L, 343699775700336641L, 8673617379884035472L,
- 549919641120538625L, 6938893903907228377L, 5973958935009296385L, 5551115123125782702L, 1089818333265526785L,
- 8881784197001252323L, 3588383740595798017L, 7105427357601001858L, 6560055807218548737L, 5684341886080801486L,
- 8937393460516749313L, 9094947017729282379L, 1387108685230112769L, 7275957614183425903L, 2954361355555045377L,
- 5820766091346740722L, 6052837899185946625L, 4656612873077392578L, 1152921504606846977L, 7450580596923828125L, 1L,
- 5960464477539062500L, 1L, 4768371582031250000L, 1L, 7629394531250000000L, 1L, 6103515625000000000L, 1L,
- 4882812500000000000L, 1L, 7812500000000000000L, 1L, 6250000000000000000L, 1L, 5000000000000000000L, 1L,
- 8000000000000000000L, 1L, 6400000000000000000L, 1L, 5120000000000000000L, 1L, 8192000000000000000L, 1L,
- 6553600000000000000L, 1L, 5242880000000000000L, 1L, 8388608000000000000L, 1L, 6710886400000000000L, 1L,
- 5368709120000000000L, 1L, 8589934592000000000L, 1L, 6871947673600000000L, 1L, 5497558138880000000L, 1L,
- 8796093022208000000L, 1L, 7036874417766400000L, 1L, 5629499534213120000L, 1L, 9007199254740992000L, 1L,
- 7205759403792793600L, 1L, 5764607523034234880L, 1L, 4611686018427387904L, 1L, 7378697629483820646L,
- 3689348814741910324L, 5902958103587056517L, 1106804644422573097L, 4722366482869645213L, 6419466937650923963L,
- 7555786372591432341L, 8426472692870523179L, 6044629098073145873L, 4896503746925463381L, 4835703278458516698L,
- 7606551812282281028L, 7737125245533626718L, 1102436455425918676L, 6189700196426901374L, 4571297979082645264L,
- 4951760157141521099L, 5501712790637071373L, 7922816251426433759L, 3268717242906448711L, 6338253001141147007L,
- 4459648201696114131L, 5070602400912917605L, 9101741783469756789L, 8112963841460668169L, 5339414816696835055L,
- 6490371073168534535L, 6116206260728423206L, 5192296858534827628L, 4892965008582738565L, 8307674973655724205L,
- 5984069606361426541L, 6646139978924579364L, 4787255685089141233L, 5316911983139663491L, 5674478955442268148L,
- 8507059173023461586L, 5389817513965718714L, 6805647338418769269L, 2467179603801619810L, 5444517870735015415L,
- 3818418090412251009L, 8711228593176024664L, 6109468944659601615L, 6968982874540819731L, 6732249563098636453L,
- 5575186299632655785L, 3541125243107954001L, 8920298079412249256L, 5665800388972726402L, 7136238463529799405L,
- 2687965903807225960L, 5708990770823839524L, 2150372723045780768L, 9134385233318143238L, 7129945171615159552L,
- 7307508186654514591L, 169932915179262157L, 5846006549323611672L, 7514643961627230372L, 4676805239458889338L,
- 2322366354559873974L, 7482888383134222941L, 1871111759924843197L, 5986310706507378352L, 8875587037423695204L,
- 4789048565205902682L, 3411120815197045840L, 7662477704329444291L, 7302467711686228506L, 6129982163463555433L,
- 3997299761978027643L, 4903985730770844346L, 6887188624324332438L, 7846377169233350954L, 7330152984177021577L,
- 6277101735386680763L, 7708796794712572423L, 5021681388309344611L, 633014213657192454L, 8034690221294951377L,
- 6546845963964373411L, 6427752177035961102L, 1548127956429588405L, 5142201741628768881L, 6772525587256536209L,
- 8227522786606030210L, 7146692124868547611L, 6582018229284824168L, 5717353699894838089L, 5265614583427859334L,
- 8263231774657780795L, 8424983333484574935L, 7687147617339583786L, 6739986666787659948L, 6149718093871667029L,
- 5391989333430127958L, 8609123289839243947L, 8627182933488204734L, 2706550819517059345L, 6901746346790563787L,
- 4009915062984602637L, 5521397077432451029L, 8741955272500547595L, 8834235323891921647L, 8453105213888010667L,
- 7067388259113537318L, 3073135356368498210L, 5653910607290829854L, 6147857099836708891L, 9046256971665327767L,
- 4302548137625868741L, 7237005577332262213L, 8976061732213560478L, 5789604461865809771L, 1646826163657982898L,
- 4631683569492647816L, 8696158560410206965L, 7410693711188236507L, 1001132845059645012L, 5928554968950589205L,
- 6334929498160581494L, 4742843975160471364L, 5067943598528465196L, 7588550360256754183L, 2574686535532678828L,
- 6070840288205403346L, 5749098043168053386L, 4856672230564322677L, 2754604027163487547L, 7770675568902916283L,
- 6252040850832535236L, 6216540455122333026L, 8690981495407938512L, 4973232364097866421L, 5108110788955395648L,
- 7957171782556586274L, 4483628447586722714L, 6365737426045269019L, 5431577165440333333L, 5092589940836215215L,
- 6189936139723221828L, 8148143905337944345L, 680525786702379117L, 6518515124270355476L, 544420629361903293L,
- 5214812099416284380L, 7814234132973343281L, 8343699359066055009L, 3279402575902573442L, 6674959487252844007L,
- 4468196468093013915L, 5339967589802275205L, 9108580396587276617L, 8543948143683640329L, 5350356597684866779L,
- 6835158514946912263L, 6124959685518848585L, 5468126811957529810L, 8589316563156989191L, 8749002899132047697L,
- 4519534464196406897L, 6999202319305638157L, 9149650793469991003L, 5599361855444510526L, 3630371820034082479L,
- 8958978968711216842L, 2119246097312621643L, 7167183174968973473L, 7229420099962962799L, 5733746539975178779L,
- 249512857857504755L, 9173994463960286046L, 4088569387313917931L, 7339195571168228837L, 1426181102480179183L,
- 5871356456934583069L, 6674968104097008831L, 4697085165547666455L, 7184648890648562227L, 7515336264876266329L,
- 2272066188182923754L, 6012269011901013063L, 3662327357917294165L, 4809815209520810450L, 6619210701075745655L,
- 7695704335233296721L, 1367365084866417240L, 6156563468186637376L, 8472589697376954439L, 4925250774549309901L,
- 4933397350530608390L, 7880401239278895842L, 4204086946107063100L, 6304320991423116673L, 8897292778998515965L,
- 5043456793138493339L, 1583811001085947287L, 8069530869021589342L, 6223446416479425982L, 6455624695217271474L,
- 1289408318441630463L, 5164499756173817179L, 2876201062124259532L, 8263199609878107486L, 8291270514140725574L,
- 6610559687902485989L, 4788342003941625298L, 5288447750321988791L, 5675348010524255400L, 8461516400515182066L,
- 5391208002096898316L, 6769213120412145653L, 2468291994306563491L, 5415370496329716522L, 5663982410187161116L,
- 8664592794127546436L, 1683674226815637140L, 6931674235302037148L, 8725637010936330358L, 5545339388241629719L,
- 1446486386636198802L, 8872543021186607550L, 6003727033359828406L, 7098034416949286040L, 4802981626687862725L,
- 5678427533559428832L, 3842385301350290180L, 9085484053695086131L, 7992490889531419449L, 7268387242956068905L,
- 4549318304254180398L, 5814709794364855124L, 3639454643403344318L, 4651767835491884099L, 4756238122093630616L,
- 7442828536787014559L, 2075957773236943501L, 5954262829429611647L, 3505440625960509963L, 4763410263543689317L,
- 8338375722881273455L, 7621456421669902908L, 5962703527126216881L, 6097165137335922326L, 8459511636442883828L,
- 4877732109868737861L, 4922934901783351901L, 7804371375789980578L, 4187347028111452718L, 6243497100631984462L,
- 7039226437231072498L, 4994797680505587570L, 1942032335042947675L, 7991676288808940112L, 3107251736068716280L,
- 6393341031047152089L, 8019824610967838509L, 5114672824837721671L, 8260534096145225969L, 8183476519740354675L,
- 304133702235675419L, 6546781215792283740L, 243306961788540335L, 5237424972633826992L, 194645569430832268L,
- 8379879956214123187L, 2156107318460286790L, 6703903964971298549L, 7258909076881094917L, 5363123171977038839L,
- 7651801668875831096L, 8580997075163262143L, 6708859448088464268L, 6864797660130609714L, 9056436373212681737L,
- 5491838128104487771L, 9089823505941100552L, 8786941004967180435L, 1630996757909074751L, 7029552803973744348L,
- 1304797406327259801L, 5623642243178995478L, 4733186739803718164L, 8997827589086392765L, 5728424376314993901L,
- 7198262071269114212L, 4582739501051995121L, 5758609657015291369L, 9200214822954461581L, 9213775451224466191L,
- 9186320494614273045L, 7371020360979572953L, 5504381988320463275L, 5896816288783658362L, 8092854405398280943L,
- 4717453031026926690L, 2784934709576714431L, 7547924849643082704L, 4455895535322743090L, 6038339879714466163L,
- 5409390835629149634L, 4830671903771572930L, 8016861483245230030L, 7729075046034516689L, 3603606336337592240L,
- 6183260036827613351L, 4727559476441028954L, 4946608029462090681L, 1937373173781868001L, 7914572847139345089L,
- 8633820300163854287L, 6331658277711476071L, 8751730647502038591L, 5065326622169180857L, 5156710110630675711L,
- 8104522595470689372L, 872038547525260492L, 6483618076376551497L, 6231654060133073878L, 5186894461101241198L,
- 1295974433364548779L, 8299031137761985917L, 228884686012322885L, 6639224910209588733L, 5717130970922723793L,
- 5311379928167670986L, 8263053591480089358L, 8498207885068273579L, 308164894771456841L, 6798566308054618863L,
- 2091206323188120634L, 5438853046443695090L, 5362313873292406831L, 8702164874309912144L, 8579702197267850929L,
- 6961731899447929715L, 8708436165185235905L, 5569385519558343772L, 6966748932148188724L, 8911016831293350036L,
- 3768100661953281312L, 7128813465034680029L, 1169806122191669888L, 5703050772027744023L, 2780519305124291072L,
- 9124881235244390437L, 2604156480827910553L, 7299904988195512349L, 7617348406775193928L, 5839923990556409879L,
- 7938553132791110304L, 4671939192445127903L, 8195516913603843405L, 7475102707912204646L, 2044780617540418478L,
- 5980082166329763716L, 9014522123516155429L, 4784065733063810973L, 5366943291441969181L, 7654505172902097557L,
- 6742434858936195528L, 6123604138321678046L, 1704599072407046100L, 4898883310657342436L, 8742376887409457526L,
- 7838213297051747899L, 1075082168258445910L, 6270570637641398319L, 2704740141977711890L, 5016456510113118655L,
- 4008466520953124674L, 8026330416180989848L, 6413546433524999478L, 6421064332944791878L, 8820185961561909905L,
- 5136851466355833503L, 1522125547136662440L, 8218962346169333605L, 590726468047704741L, 6575169876935466884L,
- 472581174438163793L, 5260135901548373507L, 2222739346921486196L, 8416217442477397611L, 5401057362445333075L,
- 6732973953981918089L, 2476171482585311299L, 5386379163185534471L, 3825611593439204201L, 8618206661096855154L,
- 2431629734760816398L, 6894565328877484123L, 3789978195179608280L, 5515652263101987298L, 6721331370885596947L,
- 8825043620963179677L, 8909455786045999954L, 7060034896770543742L, 3438215814094889640L, 5648027917416434993L,
- 8284595873388777197L, 9036844667866295990L, 2187306953196312545L, 7229475734293036792L, 1749845562557050036L,
- 5783580587434429433L, 6933899672158505514L, 4626864469947543547L, 13096515613938926L, 7402983151916069675L,
- 1865628832353257443L, 5922386521532855740L, 1492503065882605955L, 4737909217226284592L, 1194002452706084764L,
- 7580654747562055347L, 3755078331700690783L, 6064523798049644277L, 8538085887473418112L, 4851619038439715422L,
- 3141119895236824166L, 7762590461503544675L, 6870466239749873827L, 6210072369202835740L, 5496372991799899062L,
- 4968057895362268592L, 4397098393439919250L, 7948892632579629747L, 8880031836874825961L, 6359114106063703798L,
- 3414676654757950445L, 5087291284850963038L, 6421090138548270680L, 8139666055761540861L, 8429069814306277926L,
- 6511732844609232689L, 4898581444074067179L, 5209386275687386151L, 5763539562630208905L, 8335018041099817842L,
- 5532314485466423924L, 6668014432879854274L, 736502773631228816L, 5334411546303883419L, 2433876626275938215L,
- 8535058474086213470L, 7583551416783411467L, 6828046779268970776L, 6066841133426729173L, 5462437423415176621L,
- 3008798499370428177L, 8739899877464282594L, 1124728784250774760L, 6991919901971426075L, 2744457434771574970L,
- 5593535921577140860L, 2195565947817259976L, 8949657474523425376L, 3512905516507615961L, 7159725979618740301L,
- 965650005835137607L, 5727780783694992240L, 8151217634151930732L, 9164449253911987585L, 3818576177788313364L,
- 7331559403129590068L, 3054860942230650691L, 5865247522503672054L, 6133237568526430876L, 4692198018002937643L,
- 6751264462192099863L, 7507516828804700229L, 8957348732136404618L, 6006013463043760183L, 9010553393080078856L,
- 4804810770435008147L, 1674419492351197600L, 7687697232696013035L, 4523745595132871322L, 6150157786156810428L,
- 3618996476106297057L, 4920126228925448342L, 6584545995626947969L, 7872201966280717348L, 3156575963519296104L,
- 6297761573024573878L, 6214609585557347207L, 5038209258419659102L, 8661036483187788089L, 8061134813471454564L,
- 6478960743616640295L, 6448907850777163651L, 7027843002264267398L, 5159126280621730921L, 3777599994440458757L,
- 8254602048994769474L, 2354811176362823687L, 6603681639195815579L, 3728523348461214111L, 5282945311356652463L,
- 4827493086139926451L, 8452712498170643941L, 5879314530452927160L, 6762169998536515153L, 2858777216991386566L,
- 5409735998829212122L, 5976370588335019576L, 8655577598126739396L, 2183495311852210675L, 6924462078501391516L,
- 9125493878965589187L, 5539569662801113213L, 5455720695801516188L, 8863311460481781141L, 6884478705911470739L,
- 7090649168385424913L, 3662908557358221429L, 5672519334708339930L, 6619675660628487467L, 9076030935533343889L,
- 1368109020150804139L, 7260824748426675111L, 2939161623491598473L, 5808659798741340089L, 506654891422323617L,
- 4646927838993072071L, 2249998320508814055L, 7435084542388915313L, 9134020534926967972L, 5948067633911132251L,
- 1773193205828708893L, 4758454107128905800L, 8797252194146787761L, 7613526571406249281L, 4852231473780084609L,
- 6090821257124999425L, 2037110771653112526L, 4872657005699999540L, 1629688617322490021L, 7796251209119999264L,
- 2607501787715984033L, 6237000967295999411L, 3930675837543742388L, 4989600773836799529L, 1299866262664038749L,
- 7983361238138879246L, 5769134835004372321L, 6386688990511103397L, 2770633460632542696L, 5109351192408882717L,
- 7750529990618899641L, 8174961907854212348L, 5022150355506418780L, 6539969526283369878L, 7707069099147045347L,
- 5231975621026695903L, 631632057204770793L, 8371160993642713444L, 8389308921011453915L, 6696928794914170755L,
- 8556121544180118293L, 5357543035931336604L, 6844897235344094635L, 8572068857490138567L, 5417812354437685931L,
- 6857655085992110854L, 644901068808238421L, 5486124068793688683L, 2360595262417545899L, 8777798510069901893L,
- 1932278012497118276L, 7022238808055921514L, 5235171224739604944L, 5617791046444737211L, 6032811387162639117L,
- 8988465674311579538L, 5963149404718312264L, 7190772539449263630L, 8459868338516560134L, 5752618031559410904L,
- 6767894670813248108L, 9204188850495057447L, 5294608251188331487L
- )
-}
-
// specialised Options to avoid boxing. Prefer .isEmpty guarded access to .value
// for higher performance: pattern matching is slightly slower.
@@ -580,10 +22,12 @@ sealed abstract class ByteOption {
def isEmpty: Boolean
def value: Byte
}
+
case object ByteNone extends ByteOption {
def isEmpty = true
def value: Byte = throw new java.util.NoSuchElementException
}
+
case class ByteSome(value: Byte) extends ByteOption {
def isEmpty = false
}
@@ -592,10 +36,12 @@ sealed abstract class ShortOption {
def isEmpty: Boolean
def value: Short
}
+
case object ShortNone extends ShortOption {
def isEmpty = true
def value: Short = throw new java.util.NoSuchElementException
}
+
case class ShortSome(value: Short) extends ShortOption {
def isEmpty = false
}
@@ -604,10 +50,12 @@ sealed abstract class IntOption {
def isEmpty: Boolean
def value: Int
}
+
case object IntNone extends IntOption {
def isEmpty = true
def value: Int = throw new java.util.NoSuchElementException
}
+
case class IntSome(value: Int) extends IntOption {
def isEmpty = false
}
@@ -616,10 +64,12 @@ sealed abstract class LongOption {
def isEmpty: Boolean
def value: Long
}
+
case object LongNone extends LongOption {
def isEmpty = true
def value: Long = throw new java.util.NoSuchElementException
}
+
case class LongSome(value: Long) extends LongOption {
def isEmpty = false
}
@@ -628,10 +78,12 @@ sealed abstract class FloatOption {
def isEmpty: Boolean
def value: Float
}
+
case object FloatNone extends FloatOption {
def isEmpty = true
def value: Float = throw new java.util.NoSuchElementException
}
+
case class FloatSome(value: Float) extends FloatOption {
def isEmpty = false
}
@@ -640,350 +92,12 @@ sealed abstract class DoubleOption {
def isEmpty: Boolean
def value: Double
}
+
case object DoubleNone extends DoubleOption {
def isEmpty = true
def value: Double = throw new java.util.NoSuchElementException
}
+
case class DoubleSome(value: Double) extends DoubleOption {
def isEmpty = false
}
-
-// The underlying implementation uses an exception that has no stack trace for
-// the failure case, which is 20x faster than retaining stack traces. Therefore,
-// we require no boxing of the results on the happy path. This slows down the
-// unhappy path a little bit, but it's still on the same order of magnitude as
-// the happy path.
-//
-// This API should only be used by people who know what they are doing. Note
-// that Reader implementations consume one character beyond the number that is
-// parsed, because there is no terminator character.
-object UnsafeNumbers {
-
- // should never escape into user code
- case object UnsafeNumber
- extends Exception(
- "if you see this a dev made a mistake using UnsafeNumbers"
- )
- with NoStackTrace
-
- def byte(num: String): Byte =
- byte_(new FastStringReader(num), true)
- def byte_(in: Reader, consume: Boolean): Byte =
- long__(in, Byte.MinValue, Byte.MaxValue, consume).toByte
-
- def short(num: String): Short =
- short_(new FastStringReader(num), true)
- def short_(in: Reader, consume: Boolean): Short =
- long__(in, Short.MinValue, Short.MaxValue, consume).toShort
-
- def int(num: String): Int =
- int_(new FastStringReader(num), true)
- def int_(in: Reader, consume: Boolean): Int =
- long__(in, Int.MinValue, Int.MaxValue, consume).toInt
-
- def long(num: String): Long =
- long_(new FastStringReader(num), true)
- def long_(in: Reader, consume: Boolean): Long =
- long__(in, Long.MinValue, Long.MaxValue, consume)
-
- def bigInteger(num: String, max_bits: Int): java.math.BigInteger =
- bigInteger_(new FastStringReader(num), true, max_bits)
- def bigInteger_(
- in: Reader,
- consume: Boolean,
- max_bits: Int
- ): java.math.BigInteger = {
- var current: Int = in.read()
- var negative = false
-
- if (current == '-') {
- negative = true
- current = in.read()
- } else if (current == '+')
- current = in.read()
- if (current == -1) throw UnsafeNumber
-
- bigDecimal__(in, consume, negative, current, true, max_bits).unscaledValue
- }
-
- // measured faster than Character.isDigit
- @inline private[this] def isDigit(i: Int): Boolean =
- '0' <= i && i <= '9'
-
- // is it worth keeping this custom long__ instead of using bigInteger since it
- // is approximately double the performance.
- def long__(in: Reader, lower: Long, upper: Long, consume: Boolean): Long = {
- var current: Int = 0
-
- current = in.read()
- if (current == -1) throw UnsafeNumber
- var negative = false
- if (current == '-') {
- negative = true
- current = in.read()
- if (current == -1) throw UnsafeNumber
- } else if (current == '+') {
- current = in.read()
- if (current == -1) throw UnsafeNumber
- }
-
- if (!isDigit(current))
- throw UnsafeNumber
-
- var accum: Long = 0L
- while ({
- {
- val c = current - '0'
- if (accum <= longunderflow)
- if (accum < longunderflow)
- throw UnsafeNumber
- else if (accum == longunderflow && c == 9)
- throw UnsafeNumber
- // count down, not up, because it is larger
- accum = accum * 10 - c // should never underflow
- current = in.read()
- }; current != -1 && isDigit(current)
- }) ()
-
- if (consume && current != -1) throw UnsafeNumber
-
- if (negative)
- if (accum < lower || upper < accum) throw UnsafeNumber
- else accum
- else if (accum == Long.MinValue)
- throw UnsafeNumber
- else {
- accum = -accum
- if (accum < lower || upper < accum) throw UnsafeNumber
- else accum
- }
- }
-
- def float(num: String, max_bits: Int): Float =
- float_(new FastStringReader(num), true, max_bits)
-
- def float_(in: Reader, consume: Boolean, max_bits: Int): Float = {
- var current: Int = in.read()
- var negative = false
-
- def readAll(s: String): Unit = {
- var i = 0
- val len = s.length
-
- while (i < len) {
- current = in.read()
- if (current != s(i)) throw UnsafeNumber
- i += 1
- }
-
- current = in.read() // to be consistent read the terminator
-
- if (consume && current != -1)
- throw UnsafeNumber
- }
-
- if (current == 'N') {
- readAll("aN")
- return Float.NaN
- }
-
- if (current == '-') {
- negative = true
- current = in.read()
- } else if (current == '+') {
- current = in.read()
- }
-
- if (current == 'I') {
- readAll("nfinity")
-
- if (negative) return Float.NegativeInfinity
- else return Float.PositiveInfinity
- }
-
- if (current == -1)
- throw UnsafeNumber
-
- val res = bigDecimal__(in, consume, negative = negative, initial = current, int_only = false, max_bits = max_bits)
-
- if (negative && res.unscaledValue == java.math.BigInteger.ZERO) -0.0f
- else res.floatValue
- }
-
- def double(num: String, max_bits: Int): Double =
- double_(new FastStringReader(num), true, max_bits)
-
- def double_(in: Reader, consume: Boolean, max_bits: Int): Double = {
- var current: Int = in.read()
- var negative = false
-
- def readall(s: String): Unit = {
- var i = 0
- val len = s.length
- while (i < len) {
- current = in.read()
- if (current != s(i)) throw UnsafeNumber
- i += 1
- }
- current = in.read() // to be consistent read the terminator
- if (consume && current != -1) throw UnsafeNumber
- }
-
- if (current == 'N') {
- readall("aN")
- return Double.NaN
- }
-
- if (current == '-') {
- negative = true
- current = in.read()
- } else if (current == '+')
- current = in.read()
-
- if (current == 'I') {
- readall("nfinity")
- if (negative) return Double.NegativeInfinity
- else return Double.PositiveInfinity
- }
-
- if (current == -1) throw UnsafeNumber
-
- // we could avoid going via BigDecimal if we wanted to do something like
- // https://github.com/plokhotnyuk/jsoniter-scala/blob/56ff2a60e28aa27bd4788caf3b1557a558c00fa1/jsoniter-scala-core/jvm/src/main/scala/com/github/plokhotnyuk/jsoniter_scala/core/JsonReader.scala#L1395-L1425
- // based on
- // https://www.reddit.com/r/rust/comments/a6j5j1/making_rust_float_parsing_fast_and_correct
- //
- // the fallback of .doubleValue tends to call out to parseDouble which
- // ultimately uses strtod from the system libraries and they may loop until
- // the answer converges
- // https://github.com/rust-lang/rust/pull/27307/files#diff-fe6c36003393c49bf7e5c413458d6d9cR43-R84
- val res = bigDecimal__(in, consume, negative, current, false, max_bits)
- // BigDecimal doesn't have a negative zero, so we need to apply manually
- if (negative && res.unscaledValue == java.math.BigInteger.ZERO) -0.0
- // TODO implement Algorithm M or Bigcomp and avoid going via BigDecimal
- else res.doubleValue
- }
-
- def bigDecimal(num: String, max_bits: Int): java.math.BigDecimal =
- bigDecimal_(new FastStringReader(num), true, max_bits)
- def bigDecimal_(
- in: Reader,
- consume: Boolean,
- max_bits: Int
- ): java.math.BigDecimal = {
- var current: Int = in.read()
- var negative = false
-
- if (current == '-') {
- negative = true
- current = in.read()
- } else if (current == '+')
- current = in.read()
- if (current == -1) throw UnsafeNumber
-
- bigDecimal__(in, consume, negative, current, false, max_bits)
- }
-
- def bigDecimal__(
- in: Reader,
- consume: Boolean,
- negative: Boolean,
- initial: Int,
- int_only: Boolean,
- max_bits: Int
- ): java.math.BigDecimal = {
- var current: Int = initial
- // record the significand as Long until it overflows, then swap to BigInteger
- var sig: Long = -1 // -1 means it hasn't been seen yet
- var sig_ : java.math.BigInteger = null // non-null wins over sig
- var dot: Int = 0 // counts from the right
- var exp: Int = 0 // implied
-
- def advance(): Boolean = {
- current = in.read()
- current != -1
- }
-
- // skip trailing zero on the left
- while (current == '0') {
- sig = 0
- if (!advance())
- return java.math.BigDecimal.ZERO
- }
-
- def push_sig(): Unit = {
- val c = current - '0'
- // would be nice if there was a fused instruction...
- if (sig_ != null) {
- sig_ = sig_
- .multiply(java.math.BigInteger.TEN)
- .add(bigIntegers(c))
- // arbitrary limit on BigInteger size to avoid OOM attacks
- if (sig_.bitLength >= max_bits)
- throw UnsafeNumber
- } else if (sig >= longoverflow)
- sig_ = java.math.BigInteger
- .valueOf(sig)
- .multiply(java.math.BigInteger.TEN)
- .add(bigIntegers(c))
- else if (sig < 0) sig = c.toLong
- else sig = sig * 10 + c
- }
-
- def significand() =
- if (sig <= 0) java.math.BigDecimal.ZERO
- else {
- val res =
- if (sig_ != null)
- new java.math.BigDecimal(sig_)
- else
- new java.math.BigDecimal(sig)
- if (negative) res.negate else res
- }
-
- while (isDigit(current)) {
- push_sig()
- if (!advance())
- return significand()
- }
-
- if (int_only) {
- if (consume && current != -1)
- throw UnsafeNumber
- return significand()
- }
-
- if (current == '.') {
- if (sig < 0) sig = 0 // e.g. ".1" is shorthand for "0.1"
- if (!advance())
- return significand()
- while (isDigit(current)) {
- dot += 1
- if (sig > 0 || current != '0')
- push_sig()
- // overflowed...
- if (dot < 0) throw UnsafeNumber
- advance()
- }
- }
-
- if (sig < 0) throw UnsafeNumber // no significand
-
- if (current == 'E' || current == 'e')
- exp = int_(in, consume)
- else if (consume && current != -1)
- throw UnsafeNumber
-
- val scale = if (dot < 1) exp else exp - dot
- val res = significand()
- if (scale != 0)
- res.scaleByPowerOfTen(scale)
- else
- res
- }
- // note that bigDecimal does not have a negative zero
- private[this] val bigIntegers: Array[java.math.BigInteger] =
- (0L to 9L).map(java.math.BigInteger.valueOf).toArray
- private[this] val longunderflow: Long = Long.MinValue / 10L
- private[this] val longoverflow: Long = Long.MaxValue / 10L
-}
diff --git a/zio-json/shared/src/main/scala/zio/json/internal/readers.scala b/zio-json/shared/src/main/scala/zio/json/internal/readers.scala
index 6d81eb43a..b12228aab 100644
--- a/zio-json/shared/src/main/scala/zio/json/internal/readers.scala
+++ b/zio-json/shared/src/main/scala/zio/json/internal/readers.scala
@@ -71,13 +71,13 @@ private[zio] final class RewindTwice
extends Exception(
"RecordingReader's rewind was called twice"
)
+ with NoStackTrace
/**
* A Reader that can retract and replay the last char that it read.
*
- * This is essential when parsing contents that do not have a terminator
- * character, e.g. numbers, whilst preserving the non-significant character for
- * further processing.
+ * This is essential when parsing contents that do not have a terminator character, e.g. numbers, whilst preserving the
+ * non-significant character for further processing.
*/
sealed trait RetractReader extends OneCharReader {
@@ -86,8 +86,8 @@ sealed trait RetractReader extends OneCharReader {
}
final class FastCharSequence(s: Array[Char]) extends CharSequence {
- def length: Int = s.length
- def charAt(i: Int): Char = s(i)
+ def length: Int = s.length
+ def charAt(i: Int): Char = s(i)
def subSequence(start: Int, end: Int): CharSequence =
new FastCharSequence(Arrays.copyOfRange(s, start, end))
}
@@ -95,28 +95,43 @@ final class FastCharSequence(s: Array[Char]) extends CharSequence {
// java.io.StringReader uses a lock, which reduces perf by x2, this also allows
// fast retraction and access to raw char arrays (which are faster than Strings)
private[zio] final class FastStringReader(s: CharSequence) extends RetractReader with PlaybackReader {
- private[this] var i: Int = 0
- def offset(): Int = i
- private val len: Int = s.length
- def close(): Unit = ()
+ private[this] var i: Int = 0
+ private[this] val len: Int = s.length
+
+ def offset(): Int = i
+
+ def close(): Unit = ()
+
override def read(): Int = {
- i += 1
- if (i > len) -1
- else history(i - 1).toInt // -1 is faster than assigning a temp value
+ val i = this.i
+ if (i < len) {
+ this.i = i + 1
+ return s.charAt(i).toInt
+ }
+ -1
}
+
override def readChar(): Char = {
- i += 1
- if (i > len) throw new UnexpectedEnd
- else history(i - 1)
+ val i = this.i
+ if (i < len) {
+ this.i = i + 1
+ return s.charAt(i)
+ }
+ throw new UnexpectedEnd
}
+
override def nextNonWhitespace(): Char = {
- while ({
- {
- i += 1
- if (i > len) throw new UnexpectedEnd
- }; isWhitespace(history(i - 1))
- }) ()
- history(i - 1)
+ var i = this.i
+ while (i < len) {
+ val c = s.charAt(i)
+ i += 1
+ if (c != ' ' && c != '\n' && (c | 0x4) != '\r') {
+ this.i = i
+ return c
+ }
+ }
+ this.i = i
+ throw new UnexpectedEnd
}
def retract(): Unit = i -= 1
@@ -150,20 +165,17 @@ final class WithRetractReader(in: java.io.Reader) extends RetractReader with Aut
}
/**
- * Records the contents of an underlying Reader and allows rewinding back to
- * the beginning once. If rewound and reading continues past the
- * recording, the recording no longer continues.
+ * Records the contents of an underlying Reader and allows rewinding back to the beginning once. If rewound and reading
+ * continues past the recording, the recording no longer continues.
*
- * To avoid feature interaction edge cases, `retract` is not allowed as the
- * first action nor is `retract` allowed to happen immediately before or after
- * a `rewind`.
+ * To avoid feature interaction edge cases, `retract` is not allowed as the first action nor is `retract` allowed to
+ * happen immediately before or after a `rewind`.
*/
private[zio] sealed trait RecordingReader extends RetractReader {
def rewind(): Unit
}
private[zio] object RecordingReader {
- def apply(in: OneCharReader): RecordingReader =
- new WithRecordingReader(in, 64)
+ @inline def apply(in: OneCharReader): RecordingReader = new WithRecordingReader(in, 64)
}
// used to optimise RecordingReader
@@ -182,66 +194,70 @@ private[zio] sealed trait PlaybackReader extends OneCharReader {
private[zio] final class WithRecordingReader(in: OneCharReader, initial: Int)
extends RecordingReader
with PlaybackReader {
- private[this] var tape: Array[Char] = Array.ofDim(Math.max(initial, 1))
- private[this] var eob: Int = -1
+ private[this] var state: Int = 0 // -1: neither recording nor replaying, 0: recording, 1: replaying
+ private[this] var tape: Array[Char] = new Array(Math.max(initial, 1))
+ private[this] var reading: Int = 0
private[this] var writing: Int = 0
- private[this] var reading: Int = -1
def close(): Unit = in.close()
override def read(): Int =
- try readChar().toInt
- catch {
- case _: UnexpectedEnd =>
- eob = reading
- -1
+ if (state < 0) in.read()
+ else if (state > 0) {
+ var reading = this.reading
+ val c = tape(reading).toInt
+ reading += 1
+ this.reading = reading
+ if (reading == writing) state = -1 // chatch up, stop replaying
+ c
+ } else {
+ val writing = this.writing
+ if (writing == tape.length) tape = Arrays.copyOf(tape, writing << 1)
+ val c = in.read()
+ if (c >= 0) {
+ tape(writing) = c.toChar
+ this.writing = writing + 1
+ }
+ c
}
+
override def readChar(): Char =
- if (reading != -1) {
- if (reading == eob) throw new UnexpectedEnd
- val v = tape(reading)
+ if (state < 0) in.readChar()
+ else if (state > 0) {
+ var reading = this.reading
+ val c = tape(reading)
reading += 1
- if (reading >= writing) {
- reading = -1 // caught up
- writing = -1 // stop recording
- }
- v
+ this.reading = reading
+ if (reading == writing) state = -1 // chatch up, stop replaying
+ c
} else {
- val v = in.readChar()
- if (writing != -1) {
- tape(writing) = v
- writing += 1
- if (writing == tape.length)
- tape = Arrays.copyOf(tape, tape.length * 2)
- }
- v
+ val writing = this.writing
+ if (writing == tape.length) tape = Arrays.copyOf(tape, writing << 1)
+ val c = in.readChar()
+ tape(writing) = c
+ this.writing = writing + 1
+ c
}
def rewind(): Unit =
- if (writing != -1)
- reading = 0
+ if (state == 0) state = 1 // start replaying
else throw new RewindTwice
def retract(): Unit =
- if (reading == -1) {
+ if (state > 0) reading -= 1
+ else {
in match {
case rr: RetractReader =>
rr.retract()
- if (writing != -1) {
- writing -= 1 // factor in retracted delegate
- }
-
+ if (state == 0) writing -= 1 // factor in retracted delegate
case _ =>
- reading = writing - 1
+ throw new UnsupportedOperationException("underlying reader does not support retract")
}
- } else
- reading -= 1
+ }
def offset(): Int =
- if (reading == -1)
- writing
- else
- reading
+ if (state > 0) reading
+ else writing
def history(idx: Int): Char = tape(idx)
}
diff --git a/zio-json/shared/src/main/scala/zio/json/internal/writers.scala b/zio-json/shared/src/main/scala/zio/json/internal/writers.scala
index 482e384c7..b4130ba08 100644
--- a/zio-json/shared/src/main/scala/zio/json/internal/writers.scala
+++ b/zio-json/shared/src/main/scala/zio/json/internal/writers.scala
@@ -25,6 +25,63 @@ import java.util.Arrays
trait Write {
def write(c: Char): Unit
def write(s: String): Unit
+ def write(cs: Array[Char], from: Int, to: Int): Unit = {
+ var i = from
+ while (i < to) {
+ write(cs(i))
+ i += 1
+ }
+ }
+ @inline def write(c1: Char, c2: Char): Unit = {
+ write(c1)
+ write(c2)
+ }
+ @inline def write(c1: Char, c2: Char, c3: Char): Unit = {
+ write(c1)
+ write(c2)
+ write(c3)
+ }
+ @inline def write(c1: Char, c2: Char, c3: Char, c4: Char): Unit = {
+ write(c1)
+ write(c2)
+ write(c3)
+ write(c4)
+ }
+ @inline def write(c1: Char, c2: Char, c3: Char, c4: Char, c5: Char): Unit = {
+ write(c1)
+ write(c2)
+ write(c3)
+ write(c4)
+ write(c5)
+ }
+ @inline def write(s: Short): Unit = {
+ write((s & 0xff).toChar)
+ write((s >> 8).toChar)
+ }
+ @inline def write(s1: Short, s2: Short): Unit = {
+ write((s1 & 0xff).toChar)
+ write((s1 >> 8).toChar)
+ write((s2 & 0xff).toChar)
+ write((s2 >> 8).toChar)
+ }
+ @inline def write(s1: Short, s2: Short, s3: Short): Unit = {
+ write((s1 & 0xff).toChar)
+ write((s1 >> 8).toChar)
+ write((s2 & 0xff).toChar)
+ write((s2 >> 8).toChar)
+ write((s3 & 0xff).toChar)
+ write((s3 >> 8).toChar)
+ }
+ @inline def write(s1: Short, s2: Short, s3: Short, s4: Short): Unit = {
+ write((s1 & 0xff).toChar)
+ write((s1 >> 8).toChar)
+ write((s2 & 0xff).toChar)
+ write((s2 >> 8).toChar)
+ write((s3 & 0xff).toChar)
+ write((s3 >> 8).toChar)
+ write((s4 & 0xff).toChar)
+ write((s4 >> 8).toChar)
+ }
}
// wrapper to implement the legacy Java API
@@ -33,24 +90,14 @@ final class WriteWriter(out: java.io.Writer) extends Write {
def write(c: Char): Unit = out.write(c.toInt)
}
-final class FastStringWrite(initial: Int) extends Write {
- private[this] val sb: java.lang.StringBuilder = new java.lang.StringBuilder(initial)
-
- def write(s: String): Unit = sb.append(s): Unit
-
- def write(c: Char): Unit = sb.append(c): Unit
-
- def buffer: CharSequence = sb
-}
-
-// like StringBuilder but doesn't have any encoding or range checks
+// FIXME: remove in the next major version
private[zio] final class FastStringBuilder(initial: Int) {
private[this] var chars: Array[Char] = new Array[Char](initial)
private[this] var i: Int = 0
+ @inline
def append(c: Char): Unit = {
- if (i == chars.length)
- chars = Arrays.copyOf(chars, chars.length * 2)
+ if (i == chars.length) chars = Arrays.copyOf(chars, chars.length << 1)
chars(i) = c
i += 1
}
diff --git a/zio-json/shared/src/main/scala/zio/json/javatime/parsers.scala b/zio-json/shared/src/main/scala/zio/json/javatime/parsers.scala
index 026849d35..83904a81f 100644
--- a/zio-json/shared/src/main/scala/zio/json/javatime/parsers.scala
+++ b/zio-json/shared/src/main/scala/zio/json/javatime/parsers.scala
@@ -15,25 +15,8 @@
*/
package zio.json.javatime
-import java.time.{
- DateTimeException,
- Duration,
- Instant,
- LocalDate,
- LocalDateTime,
- LocalTime,
- MonthDay,
- OffsetDateTime,
- OffsetTime,
- Period,
- Year,
- YearMonth,
- ZoneId,
- ZoneOffset,
- ZonedDateTime
-}
+import java.time._
import java.util.concurrent.ConcurrentHashMap
-import scala.annotation.switch
import scala.util.control.NoStackTrace
private[json] object parsers {
@@ -45,40 +28,38 @@ private[json] object parsers {
var pos = 0
var seconds = 0L
var nanos, state = 0
- if (pos >= len) durationError(pos)
+ if (pos >= len) durationError()
var ch = input.charAt(pos)
pos += 1
val isNeg = ch == '-'
if (isNeg) {
- if (pos >= len) durationError(pos)
+ if (pos >= len) durationError()
ch = input.charAt(pos)
pos += 1
}
- if (ch != 'P') durationOrPeriodStartError(isNeg, pos - 1)
- if (pos >= len) durationError(pos)
+ if (ch != 'P' || pos >= len) durationError()
ch = input.charAt(pos)
pos += 1
while ({
if (state == 0) {
if (ch == 'T') {
- if (pos >= len) durationError(pos)
+ if (pos >= len) durationError()
ch = input.charAt(pos)
pos += 1
state = 1
}
} else if (state == 1) {
- if (ch != 'T') charsError('T', '"', pos - 1)
- if (pos >= len) durationError(pos)
+ if (ch != 'T' || pos >= len) durationError()
ch = input.charAt(pos)
pos += 1
- } else if (state == 4 && pos >= len) durationError(pos - 1)
+ } else if (state == 4 && pos >= len) durationError()
val isNegX = ch == '-'
if (isNegX) {
- if (pos >= len) durationError(pos)
+ if (pos >= len) durationError()
ch = input.charAt(pos)
pos += 1
}
- if (ch < '0' || ch > '9') durationOrPeriodDigitError(isNegX, state <= 1, pos - 1)
+ if (ch < '0' || ch > '9') durationError()
var x: Long = ('0' - ch).toLong
while (
(pos < len) && {
@@ -91,31 +72,28 @@ private[json] object parsers {
x = x * 10 + ('0' - ch)
x > 0
}
- ) durationError(pos)
+ ) durationError()
pos += 1
}
if (!(isNeg ^ isNegX)) {
- if (x == -9223372036854775808L) durationError(pos)
+ if (x == -9223372036854775808L) durationError()
x = -x
}
if (ch == 'D' && state <= 0) {
- if (x < -106751991167300L || x > 106751991167300L)
- durationError(pos) // -106751991167300L == Long.MinValue / 86400
+ if (x < -106751991167300L || x > 106751991167300L) durationError()
seconds = x * 86400
state = 1
} else if (ch == 'H' && state <= 1) {
- if (x < -2562047788015215L || x > 2562047788015215L)
- durationError(pos) // -2562047788015215L == Long.MinValue / 3600
- seconds = sumSeconds(x * 3600, seconds, pos)
+ if (x < -2562047788015215L || x > 2562047788015215L) durationError()
+ seconds = sumSeconds(x * 3600, seconds)
state = 2
} else if (ch == 'M' && state <= 2) {
- if (x < -153722867280912930L || x > 153722867280912930L)
- durationError(pos) // -153722867280912930L == Long.MinValue / 60
- seconds = sumSeconds(x * 60, seconds, pos)
+ if (x < -153722867280912930L || x > 153722867280912930L) durationError()
+ seconds = sumSeconds(x * 60, seconds)
state = 3
} else if (ch == '.') {
pos += 1
- seconds = sumSeconds(x, seconds, pos)
+ seconds = sumSeconds(x, seconds)
var nanoDigitWeight = 100000000
while (
(pos < len) && {
@@ -127,13 +105,13 @@ private[json] object parsers {
nanoDigitWeight = (nanoDigitWeight * 3435973837L >> 35).toInt // divide a positive int by 10
pos += 1
}
- if (ch != 'S') nanoError(nanoDigitWeight, 'S', pos)
+ if (ch != 'S') durationError()
if (isNeg ^ isNegX) nanos = -nanos
state = 4
} else if (ch == 'S') {
- seconds = sumSeconds(x, seconds, pos)
+ seconds = sumSeconds(x, seconds)
state = 4
- } else durationError(state, pos)
+ } else durationError()
pos += 1
(pos < len) && {
ch = input.charAt(pos)
@@ -145,126 +123,97 @@ private[json] object parsers {
}
def unsafeParseInstant(input: String): Instant = {
- val len = input.length
- var pos = 0
- val year = {
- if (pos + 4 >= len) instantError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val ch2 = input.charAt(pos + 2)
- val ch3 = input.charAt(pos + 3)
- val ch4 = input.charAt(pos + 4)
- if (ch0 >= '0' && ch0 <= '9') {
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch2 < '0' || ch2 > '9') digitError(pos + 2)
- if (ch3 < '0' || ch3 > '9') digitError(pos + 3)
- if (ch4 != '-') charError('-', pos + 4)
- pos += 5
- ch0 * 1000 + ch1 * 100 + ch2 * 10 + ch3 - 53328 // 53328 == '0' * 1111
- } else {
- val yearNeg = ch0 == '-' || (ch0 != '+' && charsOrDigitError('-', '+', pos))
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch2 < '0' || ch2 > '9') digitError(pos + 2)
- if (ch3 < '0' || ch3 > '9') digitError(pos + 3)
- if (ch4 < '0' || ch4 > '9') digitError(pos + 4)
+ val len = input.length
+ var pos, year, month, day = 0
+ if (
+ pos + 4 >= len || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ val ch2 = input.charAt(pos + 2)
+ val ch3 = input.charAt(pos + 3)
+ val ch4 = input.charAt(pos + 4)
pos += 5
- var year = ch1 * 1000 + ch2 * 100 + ch3 * 10 + ch4 - 53328 // 53328 == '0' * 1111
- var yearDigits = 4
- var ch: Char = '0'
- while ({
- if (pos >= len) instantError(pos)
- ch = input.charAt(pos)
- pos += 1
- ch >= '0' && ch <= '9' && yearDigits < 10
- }) {
- year =
- if (year > 100000000) 2147483647
- else year * 10 + (ch - '0')
- yearDigits += 1
- }
- if (yearDigits == 10 && year > 1000000000) yearError(pos - 2)
- if (yearNeg) {
- if (year == 0) yearError(pos - 2)
- year = -year
+ ch1 < '0' || ch1 > '9' || ch2 < '0' || ch2 > '9' || ch3 < '0' || ch3 > '9' || {
+ if (ch0 >= '0' && ch0 <= '9') {
+ year = ch0 * 1000 + ch1 * 100 + ch2 * 10 + ch3 - 53328 // 53328 == '0' * 1111
+ ch4 != '-'
+ } else {
+ year = ch1 * 1000 + ch2 * 100 + ch3 * 10 + ch4 - 53328 // 53328 == '0' * 1111
+ val yearNeg = ch0 == '-' || (ch0 != '+' && instantError())
+ ch4 < '0' || ch4 > '9' || {
+ var yearDigits = 4
+ var ch = '0'
+ while (
+ pos < len && {
+ ch = input.charAt(pos)
+ pos += 1
+ ch >= '0' && ch <= '9' && yearDigits < 10
+ }
+ ) {
+ year =
+ if (year > 100000000) 2147483647
+ else year * 10 + (ch - '0')
+ yearDigits += 1
+ }
+ yearDigits == 10 && year > 1000000000 || yearNeg && {
+ year = -year
+ year == 0
+ } || ch != '-'
+ }
+ }
}
- if (ch != '-') yearError(yearNeg, yearDigits, pos - 1)
- year
+ } || pos + 5 >= len || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ val ch2 = input.charAt(pos + 2)
+ val ch3 = input.charAt(pos + 3)
+ val ch4 = input.charAt(pos + 4)
+ val ch5 = input.charAt(pos + 5)
+ pos += 6
+ month = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ day = ch3 * 10 + ch4 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch2 != '-' || ch3 < '0' || ch3 > '9' ||
+ ch4 < '0' || ch4 > '9' || ch5 != 'T' || month < 1 || month > 12 || day == 0 ||
+ (day > 28 && day > maxDayForYearMonth(year, month))
}
- }
- val month = {
- if (pos + 2 >= len) instantError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val ch2 = input.charAt(pos + 2)
- val month = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (month < 1 || month > 12) monthError(pos + 1)
- if (ch2 != '-') charError('-', pos + 2)
- pos += 3
- month
- }
- val day = {
- if (pos + 2 >= len) instantError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val ch2 = input.charAt(pos + 2)
- val day = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (day == 0 || (day > 28 && day > maxDayForYearMonth(year, month))) dayError(pos + 1)
- if (ch2 != 'T') charError('T', pos + 2)
- pos += 3
- day
- }
+ ) instantError()
val epochDay =
epochDayForYear(year) + (dayOfYearForYearMonth(year, month) + day - 719529) // 719528 == days 0000 to 1970
- var epochSecond = {
- if (pos + 2 >= len) instantError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val ch2 = input.charAt(pos + 2)
- val hour = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (hour > 23) hourError(pos + 1)
- if (ch2 != ':') charError(':', pos + 2)
- pos += 3
- hour * 3600
- }
- epochSecond += {
- if (pos + 1 >= len) instantError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch0 > '5') minuteError(pos + 1)
- pos += 2
- (ch0 * 10 + ch1 - 528) * 60 // 528 == '0' * 11
- }
- var nanoDigitWeight = -1
- var nano = 0
- var ch = (0: Char)
+ var epochSecond = 0
+ if (
+ pos + 4 >= len || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ val ch2 = input.charAt(pos + 2)
+ val ch3 = input.charAt(pos + 3)
+ val ch4 = input.charAt(pos + 4)
+ pos += 5
+ val hour = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ epochSecond = hour * 3600 + (ch3 * 10 + ch4 - 528) * 60 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch2 != ':' ||
+ ch3 < '0' || ch3 > '9' || ch4 < '0' || ch4 > '9' || ch3 > '5' || hour > 23
+ }
+ ) instantError()
+ var nano = 0
+ var ch = '0'
if (pos < len) {
ch = input.charAt(pos)
pos += 1
if (ch == ':') {
- nanoDigitWeight = -2
- epochSecond += {
- if (pos + 1 >= len) instantError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch0 > '5') secondError(pos + 1)
- pos += 2
- ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- }
+ if (
+ pos + 1 >= len || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ pos += 2
+ epochSecond += ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ }
+ ) instantError()
if (pos < len) {
ch = input.charAt(pos)
pos += 1
if (ch == '.') {
- nanoDigitWeight = 100000000
+ var nanoDigitWeight = 100000000
while (
pos < len && {
ch = input.charAt(pos)
@@ -281,18 +230,16 @@ private[json] object parsers {
}
var offsetTotal = 0
if (ch != 'Z') {
- val offsetNeg = ch == '-' || (ch != '+' && timezoneSignError(nanoDigitWeight, pos - 1))
- offsetTotal = {
- if (pos + 1 >= len) instantError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val offsetHour = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (offsetHour > 18) timezoneOffsetHourError(pos + 1)
- pos += 2
- offsetHour * 3600
- }
+ val offsetNeg = ch == '-' || (ch != '+' && instantError())
+ if (
+ pos + 1 >= len || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ pos += 2
+ offsetTotal = (ch0 * 10 + ch1 - 528) * 3600 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9'
+ }
+ ) instantError()
if (
pos < len && {
ch = input.charAt(pos)
@@ -300,16 +247,15 @@ private[json] object parsers {
ch == ':'
}
) {
- offsetTotal += {
- if (pos + 1 >= len) instantError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch0 > '5') timezoneOffsetMinuteError(pos + 1)
- pos += 2
- (ch0 * 10 + ch1 - 528) * 60 // 528 == '0' * 11
- }
+ if (
+ pos + 1 >= len || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ pos += 2
+ offsetTotal += (ch0 * 10 + ch1 - 528) * 60 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ }
+ ) instantError()
if (
pos < len && {
ch = input.charAt(pos)
@@ -317,429 +263,348 @@ private[json] object parsers {
ch == ':'
}
) {
- offsetTotal += {
- if (pos + 1 >= len) instantError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch0 > '5') timezoneOffsetSecondError(pos + 1)
- pos += 2
- ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- }
+ if (
+ pos + 1 >= len || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ pos += 2
+ offsetTotal += ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ }
+ ) instantError()
}
}
- if (offsetTotal > 64800) zoneOffsetError(pos) // 64800 == 18 * 60 * 60
+ if (offsetTotal > 64800) instantError() // 64800 == 18 * 60 * 60
if (offsetNeg) offsetTotal = -offsetTotal
}
- if (pos != len) instantError(pos)
- Instant.ofEpochSecond(
- epochDay * 86400 + (epochSecond - offsetTotal),
- nano.toLong
- ) // 86400 == seconds per day
+ if (pos != len) instantError()
+ Instant.ofEpochSecond(epochDay * 86400 + (epochSecond - offsetTotal), nano.toLong)
}
def unsafeParseLocalDate(input: String): LocalDate = {
- val len = input.length
- var pos = 0
- val year = {
- if (pos + 4 >= len) localDateError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val ch2 = input.charAt(pos + 2)
- val ch3 = input.charAt(pos + 3)
- val ch4 = input.charAt(pos + 4)
- if (ch0 >= '0' && ch0 <= '9') {
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch2 < '0' || ch2 > '9') digitError(pos + 2)
- if (ch3 < '0' || ch3 > '9') digitError(pos + 3)
- if (ch4 != '-') charError('-', pos + 4)
- pos += 5
- ch0 * 1000 + ch1 * 100 + ch2 * 10 + ch3 - 53328 // 53328 == '0' * 1111
- } else {
- val yearNeg = ch0 == '-' || (ch0 != '+' && charsOrDigitError('-', '+', pos))
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch2 < '0' || ch2 > '9') digitError(pos + 2)
- if (ch3 < '0' || ch3 > '9') digitError(pos + 3)
- if (ch4 < '0' || ch4 > '9') digitError(pos + 4)
+ val len = input.length
+ var pos, year, month, day = 0
+ if (
+ pos + 4 >= len || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ val ch2 = input.charAt(pos + 2)
+ val ch3 = input.charAt(pos + 3)
+ val ch4 = input.charAt(pos + 4)
pos += 5
- var year = ch1 * 1000 + ch2 * 100 + ch3 * 10 + ch4 - 53328 // 53328 == '0' * 1111
- var yearDigits = 4
- var ch: Char = '0'
- while ({
- if (pos >= len) localDateError(pos)
- ch = input.charAt(pos)
- pos += 1
- ch >= '0' && ch <= '9' && yearDigits < 9
- }) {
- year = year * 10 + (ch - '0')
- yearDigits += 1
- }
- if (yearNeg) {
- if (year == 0) yearError(pos - 2)
- year = -year
+ ch1 < '0' || ch1 > '9' || ch2 < '0' || ch2 > '9' || ch3 < '0' || ch3 > '9' || {
+ if (ch0 >= '0' && ch0 <= '9') {
+ year = ch0 * 1000 + ch1 * 100 + ch2 * 10 + ch3 - 53328 // 53328 == '0' * 1111
+ ch4 != '-'
+ } else {
+ year = ch1 * 1000 + ch2 * 100 + ch3 * 10 + ch4 - 53328 // 53328 == '0' * 1111
+ val yearNeg = ch0 == '-' || (ch0 != '+' && localDateError())
+ ch4 < '0' || ch4 > '9' || {
+ var yearDigits = 4
+ var ch = '0'
+ while (
+ pos < len && {
+ ch = input.charAt(pos)
+ pos += 1
+ ch >= '0' && ch <= '9' && yearDigits < 9
+ }
+ ) {
+ year = year * 10 + (ch - '0')
+ yearDigits += 1
+ }
+ yearNeg && {
+ year = -year
+ year == 0
+ } || ch != '-'
+ }
+ }
}
- if (ch != '-') yearError(yearNeg, yearDigits, pos - 1)
- year
+ } || pos + 5 != len || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ val ch2 = input.charAt(pos + 2)
+ val ch3 = input.charAt(pos + 3)
+ val ch4 = input.charAt(pos + 4)
+ pos += 5
+ month = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ day = ch3 * 10 + ch4 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch2 != '-' || ch3 < '0' || ch3 > '9' ||
+ ch4 < '0' || ch4 > '9' || month < 1 || month > 12 || day == 0 ||
+ (day > 28 && day > maxDayForYearMonth(year, month))
}
- }
- val month = {
- if (pos + 2 >= len) localDateError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val ch2 = input.charAt(pos + 2)
- val month = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (month < 1 || month > 12) monthError(pos + 1)
- if (ch2 != '-') charError('-', pos + 2)
- pos += 3
- month
- }
- val day = {
- if (pos + 1 >= len) localDateError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val day = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (day == 0 || (day > 28 && day > maxDayForYearMonth(year, month))) dayError(pos + 1)
- pos += 2
- day
- }
- if (pos != len) localDateError(pos)
+ ) localDateError()
LocalDate.of(year, month, day)
}
def unsafeParseLocalDateTime(input: String): LocalDateTime = {
- val len = input.length
- var pos = 0
- val year = {
- if (pos + 4 >= len) localDateTimeError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val ch2 = input.charAt(pos + 2)
- val ch3 = input.charAt(pos + 3)
- val ch4 = input.charAt(pos + 4)
- if (ch0 >= '0' && ch0 <= '9') {
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch2 < '0' || ch2 > '9') digitError(pos + 2)
- if (ch3 < '0' || ch3 > '9') digitError(pos + 3)
- if (ch4 != '-') charError('-', pos + 4)
- pos += 5
- ch0 * 1000 + ch1 * 100 + ch2 * 10 + ch3 - 53328 // 53328 == '0' * 1111
- } else {
- val yearNeg = ch0 == '-' || (ch0 != '+' && charsOrDigitError('-', '+', pos))
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch2 < '0' || ch2 > '9') digitError(pos + 2)
- if (ch3 < '0' || ch3 > '9') digitError(pos + 3)
- if (ch4 < '0' || ch4 > '9') digitError(pos + 4)
+ val len = input.length
+ var pos, year, month, day = 0
+ if (
+ pos + 4 >= len || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ val ch2 = input.charAt(pos + 2)
+ val ch3 = input.charAt(pos + 3)
+ val ch4 = input.charAt(pos + 4)
pos += 5
- var year = ch1 * 1000 + ch2 * 100 + ch3 * 10 + ch4 - 53328 // 53328 == '0' * 1111
- var yearDigits = 4
- var ch: Char = '0'
- while ({
- if (pos >= len) localDateTimeError(pos)
- ch = input.charAt(pos)
- pos += 1
- ch >= '0' && ch <= '9' && yearDigits < 9
- }) {
- year = year * 10 + (ch - '0')
- yearDigits += 1
- }
- if (yearNeg) {
- if (year == 0) yearError(pos - 2)
- year = -year
+ ch1 < '0' || ch1 > '9' || ch2 < '0' || ch2 > '9' || ch3 < '0' || ch3 > '9' || {
+ if (ch0 >= '0' && ch0 <= '9') {
+ year = ch0 * 1000 + ch1 * 100 + ch2 * 10 + ch3 - 53328 // 53328 == '0' * 1111
+ ch4 != '-'
+ } else {
+ year = ch1 * 1000 + ch2 * 100 + ch3 * 10 + ch4 - 53328 // 53328 == '0' * 1111
+ val yearNeg = ch0 == '-' || (ch0 != '+' && localDateTimeError())
+ ch4 < '0' || ch4 > '9' || {
+ var yearDigits = 4
+ var ch = '0'
+ while (
+ pos < len && {
+ ch = input.charAt(pos)
+ pos += 1
+ ch >= '0' && ch <= '9' && yearDigits < 9
+ }
+ ) {
+ year = year * 10 + (ch - '0')
+ yearDigits += 1
+ }
+ yearNeg && {
+ year = -year
+ year == 0
+ } || ch != '-'
+ }
+ }
}
- if (ch != '-') yearError(yearNeg, yearDigits, pos - 1)
- year
+ } || pos + 5 >= len || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ val ch2 = input.charAt(pos + 2)
+ val ch3 = input.charAt(pos + 3)
+ val ch4 = input.charAt(pos + 4)
+ val ch5 = input.charAt(pos + 5)
+ pos += 6
+ month = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ day = ch3 * 10 + ch4 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch2 != '-' || ch3 < '0' || ch3 > '9' ||
+ ch4 < '0' || ch4 > '9' || ch5 != 'T' || day == 0 || month < 1 || month > 12 ||
+ (day > 28 && day > maxDayForYearMonth(year, month))
}
- }
- val month = {
- if (pos + 2 >= len) localDateTimeError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val ch2 = input.charAt(pos + 2)
- val month = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (month < 1 || month > 12) monthError(pos + 1)
- if (ch2 != '-') charError('-', pos + 2)
- pos += 3
- month
- }
- val day = {
- if (pos + 2 >= len) localDateTimeError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val ch2 = input.charAt(pos + 2)
- val day = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (day == 0 || (day > 28 && day > maxDayForYearMonth(year, month))) dayError(pos + 1)
- if (ch2 != 'T') charError('T', pos + 2)
- pos += 3
- day
- }
- val hour = {
- if (pos + 2 >= len) localDateTimeError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val ch2 = input.charAt(pos + 2)
- val hour = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (hour > 23) hourError(pos + 1)
- if (ch2 != ':') charError(':', pos + 2)
- pos += 3
- hour
- }
- val minute = {
- if (pos + 1 >= len) localDateTimeError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch0 > '5') minuteError(pos + 1)
- pos += 2
- ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- }
- var second, nano = 0
- if (pos < len) {
- if (input.charAt(pos) != ':') charError(':', pos)
- pos += 1
- second = {
- if (pos + 1 >= len) localDateTimeError(pos)
+ ) localDateTimeError()
+ var hour, minute = 0
+ if (
+ pos + 4 >= len || {
val ch0 = input.charAt(pos)
val ch1 = input.charAt(pos + 1)
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch0 > '5') secondError(pos + 1)
- pos += 2
- ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ val ch2 = input.charAt(pos + 2)
+ val ch3 = input.charAt(pos + 3)
+ val ch4 = input.charAt(pos + 4)
+ pos += 5
+ hour = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ minute = ch3 * 10 + ch4 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch2 != ':' ||
+ ch3 < '0' || ch3 > '9' || ch4 < '0' || ch4 > '9' || ch3 > '5' || hour > 23
}
+ ) localDateTimeError()
+ var second, nano = 0
+ if (pos < len) {
+ if (
+ input.charAt(pos) != ':' || {
+ pos += 1
+ pos + 1 >= len || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ pos += 2
+ second = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ }
+ }
+ ) localDateTimeError()
if (pos < len) {
- if (input.charAt(pos) != '.') charError('.', pos)
- pos += 1
- var nanoDigitWeight = 100000000
- var ch = '0'
- while (
- pos < len && {
- ch = input.charAt(pos)
+ if (
+ input.charAt(pos) != '.' || {
pos += 1
- ch >= '0' && ch <= '9' && nanoDigitWeight != 0
+ var nanoDigitWeight = 100000000
+ var ch = '0'
+ while (
+ pos < len && {
+ ch = input.charAt(pos)
+ ch >= '0' && ch <= '9' && nanoDigitWeight != 0
+ }
+ ) {
+ nano += (ch - '0') * nanoDigitWeight
+ nanoDigitWeight = (nanoDigitWeight * 3435973837L >> 35).toInt // divide a positive int by 10
+ pos += 1
+ }
+ pos != len
}
- ) {
- nano += (ch - '0') * nanoDigitWeight
- nanoDigitWeight = (nanoDigitWeight * 3435973837L >> 35).toInt // divide a positive int by 10
- }
- if (pos != len || ch < '0' || ch > '9') localDateTimeError(pos - 1)
+ ) localDateTimeError()
}
}
LocalDateTime.of(year, month, day, hour, minute, second, nano)
}
def unsafeParseLocalTime(input: String): LocalTime = {
- val len = input.length
- var pos = 0
- val hour = {
- if (pos + 2 >= len) localTimeError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val ch2 = input.charAt(pos + 2)
- val hour = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (hour > 23) hourError(pos + 1)
- if (ch2 != ':') charError(':', pos + 2)
- pos += 3
- hour
- }
- val minute = {
- if (pos + 1 >= len) localTimeError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch0 > '5') minuteError(pos + 1)
- pos += 2
- ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- }
- var second, nano = 0
- if (pos < len) {
- if (input.charAt(pos) != ':') charError(':', pos)
- pos += 1
- second = {
- if (pos + 1 >= len) localTimeError(pos)
+ val len = input.length
+ var pos, hour, minute = 0
+ if (
+ pos + 4 >= len || {
val ch0 = input.charAt(pos)
val ch1 = input.charAt(pos + 1)
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch0 > '5') secondError(pos + 1)
- pos += 2
- ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ val ch2 = input.charAt(pos + 2)
+ val ch3 = input.charAt(pos + 3)
+ val ch4 = input.charAt(pos + 4)
+ pos += 5
+ hour = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ minute = ch3 * 10 + ch4 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch2 != ':' ||
+ ch3 < '0' || ch3 > '9' || ch4 < '0' || ch4 > '9' || ch3 > '5' || hour > 23
}
+ ) localTimeError()
+ var second, nano = 0
+ if (pos < len) {
+ if (
+ input.charAt(pos) != ':' || {
+ pos += 1
+ pos + 1 >= len || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ pos += 2
+ second = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ }
+ }
+ ) localTimeError()
if (pos < len) {
- if (input.charAt(pos) != '.') charError('.', pos)
- pos += 1
- var nanoDigitWeight = 100000000
- var ch = '0'
- while (
- pos < len && {
- ch = input.charAt(pos)
+ if (
+ input.charAt(pos) != '.' || {
pos += 1
- ch >= '0' && ch <= '9' && nanoDigitWeight != 0
+ var nanoDigitWeight = 100000000
+ var ch = '0'
+ while (
+ pos < len && {
+ ch = input.charAt(pos)
+ ch >= '0' && ch <= '9' && nanoDigitWeight != 0
+ }
+ ) {
+ nano += (ch - '0') * nanoDigitWeight
+ nanoDigitWeight = (nanoDigitWeight * 3435973837L >> 35).toInt // divide a positive int by 10
+ pos += 1
+ }
+ pos != len
}
- ) {
- nano += (ch - '0') * nanoDigitWeight
- nanoDigitWeight = (nanoDigitWeight * 3435973837L >> 35).toInt // divide a positive int by 10
- }
- if (pos != len || ch < '0' || ch > '9') localTimeError(pos - 1)
+ ) localTimeError()
}
}
LocalTime.of(hour, minute, second, nano)
}
def unsafeParseMonthDay(input: String): MonthDay = {
- if (input.length != 7) error("illegal month day", 0)
- val ch0 = input.charAt(0)
- val ch1 = input.charAt(1)
- val ch2 = input.charAt(2)
- val ch3 = input.charAt(3)
- val ch4 = input.charAt(4)
- val ch5 = input.charAt(5)
- val ch6 = input.charAt(6)
- val month = ch2 * 10 + ch3 - 528 // 528 == '0' * 11
- val day = ch5 * 10 + ch6 - 528 // 528 == '0' * 11
- if (ch0 != '-') charError('-', 0)
- if (ch1 != '-') charError('-', 1)
- if (ch2 < '0' || ch2 > '9') digitError(2)
- if (ch3 < '0' || ch3 > '9') digitError(3)
- if (month < 1 || month > 12) monthError(3)
- if (ch4 != '-') charError('-', 4)
- if (ch5 < '0' || ch5 > '9') digitError(5)
- if (ch6 < '0' || ch6 > '9') digitError(6)
- if (day == 0 || (day > 28 && day > maxDayForMonth(month))) dayError(6)
+ var month, day = 0
+ if (
+ input.length != 7 || {
+ val ch0 = input.charAt(0)
+ val ch1 = input.charAt(1)
+ val ch2 = input.charAt(2)
+ val ch3 = input.charAt(3)
+ val ch4 = input.charAt(4)
+ val ch5 = input.charAt(5)
+ val ch6 = input.charAt(6)
+ month = ch2 * 10 + ch3 - 528 // 528 == '0' * 11
+ day = ch5 * 10 + ch6 - 528 // 528 == '0' * 11
+ ch0 != '-' || ch1 != '-' || ch2 < '0' || ch2 > '9' || ch3 < '0' || ch3 > '9' || ch4 != '-' ||
+ ch5 < '0' || ch5 > '9' || ch6 < '0' || ch6 > '9' || month < 1 || month > 12 || day == 0 ||
+ (day > 28 && day > maxDayForMonth(month))
+ }
+ ) monthDayError()
MonthDay.of(month, day)
}
def unsafeParseOffsetDateTime(input: String): OffsetDateTime = {
- val len = input.length
- var pos = 0
- val year = {
- if (pos + 4 >= len) offsetDateTimeError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val ch2 = input.charAt(pos + 2)
- val ch3 = input.charAt(pos + 3)
- val ch4 = input.charAt(pos + 4)
- if (ch0 >= '0' && ch0 <= '9') {
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch2 < '0' || ch2 > '9') digitError(pos + 2)
- if (ch3 < '0' || ch3 > '9') digitError(pos + 3)
- if (ch4 != '-') charError('-', pos + 4)
- pos += 5
- ch0 * 1000 + ch1 * 100 + ch2 * 10 + ch3 - 53328 // 53328 == '0' * 1111
- } else {
- val yearNeg = ch0 == '-' || (ch0 != '+' && charsOrDigitError('-', '+', pos))
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch2 < '0' || ch2 > '9') digitError(pos + 2)
- if (ch3 < '0' || ch3 > '9') digitError(pos + 3)
- if (ch4 < '0' || ch4 > '9') digitError(pos + 4)
+ val len = input.length
+ var pos, year, month, day = 0
+ if (
+ pos + 4 >= len || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ val ch2 = input.charAt(pos + 2)
+ val ch3 = input.charAt(pos + 3)
+ val ch4 = input.charAt(pos + 4)
pos += 5
- var year = ch1 * 1000 + ch2 * 100 + ch3 * 10 + ch4 - 53328 // 53328 == '0' * 1111
- var yearDigits = 4
- var ch: Char = '0'
- while ({
- if (pos >= len) offsetDateTimeError(pos)
- ch = input.charAt(pos)
- pos += 1
- ch >= '0' && ch <= '9' && yearDigits < 9
- }) {
- year = year * 10 + (ch - '0')
- yearDigits += 1
- }
- if (yearNeg) {
- if (year == 0) yearError(pos - 2)
- year = -year
+ ch1 < '0' || ch1 > '9' || ch2 < '0' || ch2 > '9' || ch3 < '0' || ch3 > '9' || {
+ if (ch0 >= '0' && ch0 <= '9') {
+ year = ch0 * 1000 + ch1 * 100 + ch2 * 10 + ch3 - 53328 // 53328 == '0' * 1111
+ ch4 != '-'
+ } else {
+ year = ch1 * 1000 + ch2 * 100 + ch3 * 10 + ch4 - 53328 // 53328 == '0' * 1111
+ val yearNeg = ch0 == '-' || (ch0 != '+' && offsetDateTimeError())
+ ch4 < '0' || ch4 > '9' || {
+ var yearDigits = 4
+ var ch = '0'
+ while (
+ pos < len && {
+ ch = input.charAt(pos)
+ pos += 1
+ ch >= '0' && ch <= '9' && yearDigits < 9
+ }
+ ) {
+ year = year * 10 + (ch - '0')
+ yearDigits += 1
+ }
+ yearNeg && {
+ year = -year
+ year == 0
+ } || ch != '-'
+ }
+ }
}
- if (ch != '-') yearError(yearNeg, yearDigits, pos - 1)
- year
- }
- }
- val month = {
- if (pos + 2 >= len) offsetDateTimeError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val ch2 = input.charAt(pos + 2)
- val month = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (month < 1 || month > 12) monthError(pos + 1)
- if (ch2 != '-') charError('-', pos + 2)
- pos += 3
- month
- }
- val day = {
- if (pos + 2 >= len) offsetDateTimeError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val ch2 = input.charAt(pos + 2)
- val day = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (day == 0 || (day > 28 && day > maxDayForYearMonth(year, month))) dayError(pos + 1)
- if (ch2 != 'T') charError('T', pos + 2)
- pos += 3
- day
- }
- val hour = {
- if (pos + 2 >= len) offsetDateTimeError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val ch2 = input.charAt(pos + 2)
- val hour = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (hour > 23) hourError(pos + 1)
- if (ch2 != ':') charError(':', pos + 2)
- pos += 3
- hour
- }
- val minute = {
- if (pos + 1 >= len) offsetDateTimeError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch0 > '5') minuteError(pos + 1)
- pos += 2
- ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- }
- var second, nano = 0
- var nanoDigitWeight = -1
- if (pos >= len) timezoneSignError(nanoDigitWeight, pos)
- var ch = input.charAt(pos)
- pos += 1
- if (ch == ':') {
- nanoDigitWeight = -2
- second = {
- if (pos + 1 >= len) offsetDateTimeError(pos)
+ } || pos + 5 >= len || {
val ch0 = input.charAt(pos)
val ch1 = input.charAt(pos + 1)
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch0 > '5') secondError(pos + 1)
- pos += 2
- ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ val ch2 = input.charAt(pos + 2)
+ val ch3 = input.charAt(pos + 3)
+ val ch4 = input.charAt(pos + 4)
+ val ch5 = input.charAt(pos + 5)
+ pos += 6
+ month = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ day = ch3 * 10 + ch4 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch2 != '-' || ch3 < '0' || ch3 > '9' ||
+ ch4 < '0' || ch4 > '9' || ch5 != 'T' || month < 1 || month > 12 || day == 0 ||
+ (day > 28 && day > maxDayForYearMonth(year, month))
}
- if (pos >= len) timezoneSignError(nanoDigitWeight, pos)
+ ) offsetDateTimeError()
+ var hour, minute = 0
+ if (
+ pos + 4 >= len || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ val ch2 = input.charAt(pos + 2)
+ val ch3 = input.charAt(pos + 3)
+ val ch4 = input.charAt(pos + 4)
+ pos += 5
+ hour = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ minute = ch3 * 10 + ch4 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch2 != ':' ||
+ ch3 < '0' || ch3 > '9' || ch4 < '0' || ch4 > '9' || ch3 > '5' || hour > 23
+ } || pos >= len
+ ) offsetDateTimeError()
+ var second, nano = 0
+ var ch = input.charAt(pos)
+ pos += 1
+ if (ch == ':') {
+ if (
+ pos + 1 >= len || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ pos += 2
+ second = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ } || pos >= len
+ ) offsetDateTimeError()
ch = input.charAt(pos)
pos += 1
if (ch == '.') {
- nanoDigitWeight = 100000000
+ var nanoDigitWeight = 100000000
while ({
- if (pos >= len) timezoneSignError(nanoDigitWeight, pos)
+ if (pos >= len) offsetDateTimeError()
ch = input.charAt(pos)
pos += 1
ch >= '0' && ch <= '9' && nanoDigitWeight != 0
@@ -752,111 +617,88 @@ private[json] object parsers {
val zoneOffset =
if (ch == 'Z') ZoneOffset.UTC
else {
- val offsetNeg = ch == '-' || (ch != '+' && timezoneSignError(nanoDigitWeight, pos - 1))
- val offsetHour = {
- if (pos + 1 >= len) offsetDateTimeError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val offsetHour = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (offsetHour > 18) timezoneOffsetHourError(pos + 1)
- pos += 2
- offsetHour
- }
- var offsetMinute, offsetSecond = 0
+ val offsetNeg = ch == '-' || (ch != '+' && offsetDateTimeError())
+ var offsetTotal = 0
if (
- pos < len && {
- ch = input.charAt(pos)
- pos += 1
- ch == ':'
- }
- ) {
- offsetMinute = {
- if (pos + 1 >= len) offsetDateTimeError(pos)
+ pos + 1 >= len || {
val ch0 = input.charAt(pos)
val ch1 = input.charAt(pos + 1)
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch0 > '5') timezoneOffsetMinuteError(pos + 1)
pos += 2
- ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ offsetTotal = (ch0 * 10 + ch1 - 528) * 3600 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9'
}
+ ) offsetDateTimeError()
+ if (pos < len) {
if (
- pos < len && {
- ch = input.charAt(pos)
+ input.charAt(pos) != ':' || {
pos += 1
- ch == ':'
- }
- ) {
- offsetSecond = {
- if (pos + 1 >= len) offsetDateTimeError(pos)
+ pos + 1 >= len
+ } || {
val ch0 = input.charAt(pos)
val ch1 = input.charAt(pos + 1)
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch0 > '5') timezoneOffsetSecondError(pos + 1)
pos += 2
- ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ offsetTotal += (ch0 * 10 + ch1 - 528) * 60 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
}
+ ) offsetDateTimeError()
+ if (pos < len) {
+ if (
+ input.charAt(pos) != ':' || {
+ pos += 1
+ pos + 1 >= len
+ } || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ pos += 2
+ offsetTotal += ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ }
+ ) offsetDateTimeError()
}
}
- toZoneOffset(offsetNeg, offsetHour, offsetMinute, offsetSecond, pos)
+ if (offsetTotal > 64800) offsetDateTimeError()
+ toZoneOffset(offsetNeg, offsetTotal)
}
- if (pos != len) offsetDateTimeError(pos)
+ if (pos != len) offsetDateTimeError()
OffsetDateTime.of(year, month, day, hour, minute, second, nano, zoneOffset)
}
def unsafeParseOffsetTime(input: String): OffsetTime = {
- val len = input.length
- var pos = 0
- val hour = {
- if (pos + 2 >= len) offsetTimeError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val ch2 = input.charAt(pos + 2)
- val hour = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (hour > 23) hourError(pos + 1)
- if (ch2 != ':') charError(':', pos + 2)
- pos += 3
- hour
- }
- val minute = {
- if (pos + 1 >= len) offsetTimeError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch0 > '5') minuteError(pos + 1)
- pos += 2
- ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- }
- var second, nano = 0
- var nanoDigitWeight = -1
- if (pos >= len) timezoneSignError(nanoDigitWeight, pos)
- var ch = input.charAt(pos)
- pos += 1
- if (ch == ':') {
- nanoDigitWeight = -2
- second = {
- if (pos + 1 >= len) offsetTimeError(pos)
+ val len = input.length
+ var pos, hour, minute = 0
+ if (
+ pos + 4 >= len || {
val ch0 = input.charAt(pos)
val ch1 = input.charAt(pos + 1)
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch0 > '5') secondError(pos + 1)
- pos += 2
- ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- }
- if (pos >= len) timezoneSignError(nanoDigitWeight, pos)
+ val ch2 = input.charAt(pos + 2)
+ val ch3 = input.charAt(pos + 3)
+ val ch4 = input.charAt(pos + 4)
+ pos += 5
+ hour = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ minute = ch3 * 10 + ch4 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch2 != ':' ||
+ ch3 < '0' || ch3 > '9' || ch4 < '0' || ch4 > '9' || ch3 > '5' || hour > 23
+ } || pos >= len
+ ) offsetTimeError()
+ var second, nano = 0
+ var ch = input.charAt(pos)
+ pos += 1
+ if (ch == ':') {
+ if (
+ pos + 1 >= len || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ pos += 2
+ second = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ } || pos >= len
+ ) offsetTimeError()
ch = input.charAt(pos)
pos += 1
if (ch == '.') {
- nanoDigitWeight = 100000000
+ var nanoDigitWeight = 100000000
while ({
- if (pos >= len) timezoneSignError(nanoDigitWeight, pos)
+ if (pos >= len) offsetTimeError()
ch = input.charAt(pos)
pos += 1
ch >= '0' && ch <= '9' && nanoDigitWeight != 0
@@ -869,88 +711,76 @@ private[json] object parsers {
val zoneOffset =
if (ch == 'Z') ZoneOffset.UTC
else {
- val offsetNeg = ch == '-' || (ch != '+' && timezoneSignError(nanoDigitWeight, pos - 1))
- nanoDigitWeight = -3
- val offsetHour = {
- if (pos + 1 >= len) offsetTimeError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val offsetHour = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (offsetHour > 18) timezoneOffsetHourError(pos + 1)
- pos += 2
- offsetHour
- }
- var offsetMinute, offsetSecond = 0
+ val offsetNeg = ch == '-' || (ch != '+' && offsetTimeError())
+ var offsetTotal = 0
if (
- pos < len && {
- ch = input.charAt(pos)
- pos += 1
- ch == ':'
- }
- ) {
- offsetMinute = {
- if (pos + 1 >= len) offsetTimeError(pos)
+ pos + 1 >= len || {
val ch0 = input.charAt(pos)
val ch1 = input.charAt(pos + 1)
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch0 > '5') timezoneOffsetMinuteError(pos + 1)
pos += 2
- ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ offsetTotal = (ch0 * 10 + ch1 - 528) * 3600 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9'
}
+ ) offsetTimeError()
+ if (pos < len) {
if (
- pos < len && {
- ch = input.charAt(pos)
+ input.charAt(pos) != ':' || {
pos += 1
- ch == ':'
- }
- ) {
- nanoDigitWeight = -4
- offsetSecond = {
- if (pos + 1 >= len) offsetTimeError(pos)
+ pos + 1 >= len
+ } || {
val ch0 = input.charAt(pos)
val ch1 = input.charAt(pos + 1)
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch0 > '5') timezoneOffsetSecondError(pos + 1)
pos += 2
- ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ offsetTotal += (ch0 * 10 + ch1 - 528) * 60 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
}
+ ) offsetTimeError()
+ if (pos < len) {
+ if (
+ input.charAt(pos) != ':' || {
+ pos += 1
+ pos + 1 >= len
+ } || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ pos += 2
+ offsetTotal += ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ }
+ ) offsetTimeError()
}
}
- toZoneOffset(offsetNeg, offsetHour, offsetMinute, offsetSecond, pos)
+ if (offsetTotal > 64800) offsetTimeError()
+ toZoneOffset(offsetNeg, offsetTotal)
}
- if (pos != len) offsetTimeError(pos)
+ if (pos != len) offsetTimeError()
OffsetTime.of(hour, minute, second, nano, zoneOffset)
}
def unsafeParsePeriod(input: String): Period = {
val len = input.length
var pos, state, years, months, days = 0
- if (pos >= len) periodError(pos)
+ if (pos >= len) periodError()
var ch = input.charAt(pos)
pos += 1
val isNeg = ch == '-'
if (isNeg) {
- if (pos >= len) periodError(pos)
+ if (pos >= len) periodError()
ch = input.charAt(pos)
pos += 1
}
- if (ch != 'P') durationOrPeriodStartError(isNeg, pos - 1)
- if (pos >= len) periodError(pos)
+ if (ch != 'P' || pos >= len) periodError()
ch = input.charAt(pos)
pos += 1
while ({
- if (state == 4 && pos >= len) periodError(pos - 1)
+ if (state == 4 && pos >= len) periodError()
val isNegX = ch == '-'
if (isNegX) {
- if (pos >= len) periodError(pos)
+ if (pos >= len) periodError()
ch = input.charAt(pos)
pos += 1
}
- if (ch < '0' || ch > '9') durationOrPeriodDigitError(isNegX, state <= 1, pos - 1)
+ if (ch < '0' || ch > '9') periodError()
var x: Int = '0' - ch
while (
(pos < len) && {
@@ -963,11 +793,11 @@ private[json] object parsers {
x = x * 10 + ('0' - ch)
x > 0
}
- ) periodError(pos)
+ ) periodError()
pos += 1
}
if (!(isNeg ^ isNegX)) {
- if (x == -2147483648) periodError(pos)
+ if (x == -2147483648) periodError()
x = -x
}
if (ch == 'Y' && state <= 0) {
@@ -977,15 +807,15 @@ private[json] object parsers {
months = x
state = 2
} else if (ch == 'W' && state <= 2) {
- if (x < -306783378 || x > 306783378) periodError(pos)
+ if (x < -306783378 || x > 306783378) periodError()
days = x * 7
state = 3
} else if (ch == 'D') {
val ds = x.toLong + days
- if (ds != ds.toInt) periodError(pos)
+ if (ds != ds.toInt) periodError()
days = ds.toInt
state = 4
- } else periodError(state, pos)
+ } else periodError()
pos += 1
(pos < len) && {
ch = input.charAt(pos)
@@ -997,230 +827,175 @@ private[json] object parsers {
}
def unsafeParseYear(input: String): Year = {
- val len = input.length
- var pos = 0
- val year = {
- if (pos + 3 >= len) yearError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val ch2 = input.charAt(pos + 2)
- val ch3 = input.charAt(pos + 3)
- if (ch0 >= '0' && ch0 <= '9') {
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch2 < '0' || ch2 > '9') digitError(pos + 2)
- if (ch3 < '0' || ch3 > '9') digitError(pos + 3)
- if (len != 4) yearError(pos + 4)
- pos += 4
- ch0 * 1000 + ch1 * 100 + ch2 * 10 + ch3 - 53328 // 53328 == '0' * 1111
- } else {
- val yearNeg = ch0 == '-' || (ch0 != '+' && charsOrDigitError('-', '+', pos))
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch2 < '0' || ch2 > '9') digitError(pos + 2)
- if (ch3 < '0' || ch3 > '9') digitError(pos + 3)
+ val len = input.length
+ var pos, year = 0
+ if (
+ pos + 3 >= len || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ val ch2 = input.charAt(pos + 2)
+ val ch3 = input.charAt(pos + 3)
pos += 4
- var year = ch1 * 100 + ch2 * 10 + ch3 - 5328 // 53328 == '0' * 111
- var yearDigits = 3
- var ch: Char = '0'
- while (
- pos < len && {
- ch = input.charAt(pos)
- ch >= '0' && ch <= '9' && yearDigits < 9
+ ch1 < '0' || ch1 > '9' || ch2 < '0' || ch2 > '9' || ch3 < '0' || ch3 > '9' || {
+ if (ch0 >= '0' && ch0 <= '9') {
+ year = ch0 * 1000 + ch1 * 100 + ch2 * 10 + ch3 - 53328 // 53328 == '0' * 1111
+ pos != len
+ } else {
+ val yearNeg = ch0 == '-' || (ch0 != '+' && yearError())
+ year = ch1 * 100 + ch2 * 10 + ch3 - 5328 // 53328 == '0' * 111
+ var yearDigits = 3
+ var ch = '0'
+ while (
+ pos < len && {
+ ch = input.charAt(pos)
+ ch >= '0' && ch <= '9' && yearDigits < 9
+ }
+ ) {
+ year = year * 10 + (ch - '0')
+ yearDigits += 1
+ pos += 1
+ }
+ yearNeg && {
+ year = -year
+ year == 0
+ } || pos != len
}
- ) {
- year = year * 10 + (ch - '0')
- yearDigits += 1
- pos += 1
- }
- if (yearNeg) {
- if (year == 0) yearError(pos - 1)
- year = -year
}
- if (pos != len || ch < '0' || ch > '9') {
- if (yearDigits == 9) yearError(pos)
- digitError(pos)
- }
- year
}
- }
+ ) yearError()
Year.of(year)
}
def unsafeParseYearMonth(input: String): YearMonth = {
- val len = input.length
- var pos = 0
- val year = {
- if (pos + 4 >= len) yearMonthError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val ch2 = input.charAt(pos + 2)
- val ch3 = input.charAt(pos + 3)
- val ch4 = input.charAt(pos + 4)
- if (ch0 >= '0' && ch0 <= '9') {
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch2 < '0' || ch2 > '9') digitError(pos + 2)
- if (ch3 < '0' || ch3 > '9') digitError(pos + 3)
- if (ch4 != '-') charError('-', pos + 4)
- pos += 5
- ch0 * 1000 + ch1 * 100 + ch2 * 10 + ch3 - 53328 // 53328 == '0' * 1111
- } else {
- val yearNeg = ch0 == '-' || (ch0 != '+' && charsOrDigitError('-', '+', pos))
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch2 < '0' || ch2 > '9') digitError(pos + 2)
- if (ch3 < '0' || ch3 > '9') digitError(pos + 3)
- if (ch4 < '0' || ch4 > '9') digitError(pos + 4)
+ val len = input.length
+ var pos, year, month = 0
+ if (
+ pos + 4 >= len || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ val ch2 = input.charAt(pos + 2)
+ val ch3 = input.charAt(pos + 3)
+ val ch4 = input.charAt(pos + 4)
pos += 5
- var year = ch1 * 1000 + ch2 * 100 + ch3 * 10 + ch4 - 53328 // 53328 == '0' * 1111
- var yearDigits = 4
- var ch: Char = '0'
- while ({
- if (pos >= len) yearMonthError(pos)
- ch = input.charAt(pos)
- pos += 1
- ch >= '0' && ch <= '9' && yearDigits < 9
- }) {
- year = year * 10 + (ch - '0')
- yearDigits += 1
- }
- if (yearNeg) {
- if (year == 0) yearError(pos - 2)
- year = -year
+ ch1 < '0' || ch1 > '9' || ch2 < '0' || ch2 > '9' || ch3 < '0' || ch3 > '9' || {
+ if (ch0 >= '0' && ch0 <= '9') {
+ year = ch0 * 1000 + ch1 * 100 + ch2 * 10 + ch3 - 53328 // 53328 == '0' * 1111
+ ch4 != '-'
+ } else {
+ val yearNeg = ch0 == '-' || (ch0 != '+' && yearMonthError())
+ year = ch1 * 1000 + ch2 * 100 + ch3 * 10 + ch4 - 53328 // 53328 == '0' * 1111
+ ch4 < '0' || ch4 > '9' || {
+ var yearDigits = 4
+ var ch = '0'
+ while ({
+ if (pos >= len) yearMonthError()
+ ch = input.charAt(pos)
+ pos += 1
+ ch >= '0' && ch <= '9' && yearDigits < 9
+ }) {
+ year = year * 10 + (ch - '0')
+ yearDigits += 1
+ }
+ yearNeg && {
+ year = -year
+ year == 0
+ } || ch != '-'
+ }
+ }
}
- if (ch != '-') yearError(yearNeg, yearDigits, pos - 1)
- year
+ } || pos + 2 != len || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ pos += 2
+ month = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || month < 1 || month > 12
}
- }
- val month = {
- if (pos + 1 >= len) yearMonthError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val month = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (month < 1 || month > 12) monthError(pos + 1)
- pos += 2
- month
- }
- if (pos != len) yearMonthError(pos)
+ ) yearMonthError()
YearMonth.of(year, month)
}
def unsafeParseZonedDateTime(input: String): ZonedDateTime = {
- val len = input.length
- var pos = 0
- val year = {
- if (pos + 4 >= len) zonedDateTimeError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val ch2 = input.charAt(pos + 2)
- val ch3 = input.charAt(pos + 3)
- val ch4 = input.charAt(pos + 4)
- if (ch0 >= '0' && ch0 <= '9') {
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch2 < '0' || ch2 > '9') digitError(pos + 2)
- if (ch3 < '0' || ch3 > '9') digitError(pos + 3)
- if (ch4 != '-') charError('-', pos + 4)
- pos += 5
- ch0 * 1000 + ch1 * 100 + ch2 * 10 + ch3 - 53328 // 53328 == '0' * 1111
- } else {
- val yearNeg = ch0 == '-' || (ch0 != '+' && charsOrDigitError('-', '+', pos))
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch2 < '0' || ch2 > '9') digitError(pos + 2)
- if (ch3 < '0' || ch3 > '9') digitError(pos + 3)
- if (ch4 < '0' || ch4 > '9') digitError(pos + 4)
+ val len = input.length
+ var pos, year, month, day, hour, minute = 0
+ if (
+ pos + 4 >= len || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ val ch2 = input.charAt(pos + 2)
+ val ch3 = input.charAt(pos + 3)
+ val ch4 = input.charAt(pos + 4)
pos += 5
- var year = ch1 * 1000 + ch2 * 100 + ch3 * 10 + ch4 - 53328 // 53328 == '0' * 1111
- var yearDigits = 4
- var ch: Char = '0'
- while ({
- if (pos >= len) zonedDateTimeError(pos)
- ch = input.charAt(pos)
- pos += 1
- ch >= '0' && ch <= '9' && yearDigits < 9
- }) {
- year = year * 10 + (ch - '0')
- yearDigits += 1
- }
- if (yearNeg) {
- if (year == 0) yearError(pos - 2)
- year = -year
+ ch1 < '0' || ch1 > '9' || ch2 < '0' || ch2 > '9' || ch3 < '0' || ch3 > '9' || {
+ if (ch0 >= '0' && ch0 <= '9') {
+ year = ch0 * 1000 + ch1 * 100 + ch2 * 10 + ch3 - 53328 // 53328 == '0' * 1111
+ ch4 != '-'
+ } else {
+ val yearNeg = ch0 == '-' || (ch0 != '+' && zonedDateTimeError())
+ year = ch1 * 1000 + ch2 * 100 + ch3 * 10 + ch4 - 53328 // 53328 == '0' * 1111
+ ch4 < '0' || ch4 > '9' || {
+ var yearDigits = 4
+ var ch = '0'
+ while ({
+ if (pos >= len) zonedDateTimeError()
+ ch = input.charAt(pos)
+ pos += 1
+ ch >= '0' && ch <= '9' && yearDigits < 9
+ }) {
+ year = year * 10 + (ch - '0')
+ yearDigits += 1
+ }
+ yearNeg && {
+ year = -year
+ year == 0
+ } || ch != '-'
+ }
+ }
}
- if (ch != '-') yearError(yearNeg, yearDigits, pos - 1)
- year
- }
- }
- val month = {
- if (pos + 2 >= len) zonedDateTimeError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val ch2 = input.charAt(pos + 2)
- val month = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (month < 1 || month > 12) monthError(pos + 1)
- if (ch2 != '-') charError('-', pos + 2)
- pos += 3
- month
- }
- val day = {
- if (pos + 2 >= len) zonedDateTimeError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val ch2 = input.charAt(pos + 2)
- val day = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (day == 0 || (day > 28 && day > maxDayForYearMonth(year, month))) dayError(pos + 1)
- if (ch2 != 'T') charError('T', pos + 2)
- pos += 3
- day
- }
- val hour = {
- if (pos + 2 >= len) zonedDateTimeError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val ch2 = input.charAt(pos + 2)
- val hour = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (hour > 23) hourError(pos + 1)
- if (ch2 != ':') charError(':', pos + 2)
- pos += 3
- hour
- }
- val minute = {
- if (pos + 1 >= len) zonedDateTimeError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch0 > '5') minuteError(pos + 1)
- pos += 2
- ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- }
- var second, nano = 0
- var nanoDigitWeight = -1
- if (pos >= len) timezoneSignError(nanoDigitWeight, pos)
- var ch = input.charAt(pos)
- pos += 1
- if (ch == ':') {
- nanoDigitWeight = -2
- second = {
- if (pos + 1 >= len) zonedDateTimeError(pos)
+ } || pos + 5 >= len || {
val ch0 = input.charAt(pos)
val ch1 = input.charAt(pos + 1)
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch0 > '5') secondError(pos + 1)
- pos += 2
- ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- }
- if (pos >= len) timezoneSignError(nanoDigitWeight, pos)
+ val ch2 = input.charAt(pos + 2)
+ val ch3 = input.charAt(pos + 3)
+ val ch4 = input.charAt(pos + 4)
+ val ch5 = input.charAt(pos + 5)
+ pos += 6
+ month = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ day = ch3 * 10 + ch4 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch2 != '-' || ch3 < '0' || ch3 > '9' ||
+ ch4 < '0' || ch4 > '9' || ch5 != 'T' || month < 1 || month > 12 || day == 0 ||
+ (day > 28 && day > maxDayForYearMonth(year, month))
+ } || pos + 4 >= len || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ val ch2 = input.charAt(pos + 2)
+ val ch3 = input.charAt(pos + 3)
+ val ch4 = input.charAt(pos + 4)
+ pos += 5
+ hour = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ minute = ch3 * 10 + ch4 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch2 != ':' ||
+ ch3 < '0' || ch3 > '9' || ch4 < '0' || ch4 > '9' || ch3 > '5' || hour > 23
+ } || pos >= len
+ ) zonedDateTimeError()
+ var second, nano = 0
+ var ch = input.charAt(pos)
+ pos += 1
+ if (ch == ':') {
+ if (
+ pos + 1 >= len || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ pos += 2
+ second = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ } || pos >= len
+ ) zonedDateTimeError()
ch = input.charAt(pos)
pos += 1
if (ch == '.') {
- nanoDigitWeight = 100000000
+ var nanoDigitWeight = 100000000
while ({
- if (pos >= len) timezoneSignError(nanoDigitWeight, pos)
+ if (pos >= len) zonedDateTimeError()
ch = input.charAt(pos)
pos += 1
ch >= '0' && ch <= '9' && nanoDigitWeight != 0
@@ -1231,98 +1006,93 @@ private[json] object parsers {
}
}
val localDateTime = LocalDateTime.of(year, month, day, hour, minute, second, nano)
- val zoneOffset =
+ val zoneOffset =
if (ch == 'Z') {
if (pos < len) {
ch = input.charAt(pos)
- if (ch != '[') charError('[', pos)
+ if (ch != '[') zonedDateTimeError()
pos += 1
}
ZoneOffset.UTC
} else {
- val offsetNeg = ch == '-' || (ch != '+' && timezoneSignError(nanoDigitWeight, pos - 1))
- nanoDigitWeight = -3
- val offsetHour = {
- if (pos + 1 >= len) zonedDateTimeError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val offsetHour = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (offsetHour > 18) timezoneOffsetHourError(pos + 1)
- pos += 2
- offsetHour
- }
- var offsetMinute, offsetSecond = 0
+ val offsetNeg = ch == '-' || (ch != '+' && zonedDateTimeError())
+ var offsetTotal = 0
+ if (
+ pos + 1 >= len || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ pos += 2
+ offsetTotal = (ch0 * 10 + ch1 - 528) * 3600 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9'
+ }
+ ) zonedDateTimeError()
if (
pos < len && {
ch = input.charAt(pos)
pos += 1
- ch == ':' || ch != '[' && charError('[', pos - 1)
+ ch == ':' || ch != '[' && zonedDateTimeError()
}
) {
- offsetMinute = {
- if (pos + 1 >= len) zonedDateTimeError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch0 > '5') timezoneOffsetMinuteError(pos + 1)
- pos += 2
- ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- }
+ if (
+ pos + 1 >= len || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ pos += 2
+ offsetTotal += (ch0 * 10 + ch1 - 528) * 60 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ }
+ ) zonedDateTimeError()
if (
pos < len && {
ch = input.charAt(pos)
pos += 1
- ch == ':' || ch != '[' && charError('[', pos - 1)
+ ch == ':' || ch != '[' && zonedDateTimeError()
}
) {
- nanoDigitWeight = -4
- offsetSecond = {
- if (pos + 1 >= len) zonedDateTimeError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch0 > '5') timezoneOffsetSecondError(pos + 1)
- pos += 2
- ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- }
+ if (
+ pos + 1 >= len || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ pos += 2
+ offsetTotal += ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ }
+ ) zonedDateTimeError()
if (pos < len) {
ch = input.charAt(pos)
- if (ch != '[') charError('[', pos)
+ if (ch != '[') zonedDateTimeError()
pos += 1
}
}
}
- toZoneOffset(offsetNeg, offsetHour, offsetMinute, offsetSecond, pos)
+ if (offsetTotal > 64800) zonedDateTimeError()
+ toZoneOffset(offsetNeg, offsetTotal)
}
if (ch == '[') {
- val zone =
- try {
- val from = pos
- while ({
- if (pos >= len) zonedDateTimeError(pos)
- ch = input.charAt(pos)
- ch != ']'
- }) pos += 1
- val key = input.substring(from, pos)
- var zoneId = zoneIds.get(key)
- if (
- (zoneId eq null) && {
- zoneId = ZoneId.of(key)
- !zoneId.isInstanceOf[ZoneOffset] || zoneId.asInstanceOf[ZoneOffset].getTotalSeconds % 900 == 0
- }
- ) zoneIds.put(key, zoneId)
- zoneId
- } catch {
- case _: DateTimeException => zonedDateTimeError(pos - 1)
+ var zoneId: ZoneId = null
+ val from = pos
+ while ({
+ if (pos >= len) zonedDateTimeError()
+ ch = input.charAt(pos)
+ ch != ']'
+ }) pos += 1
+ val key = input.substring(from, pos)
+ zoneId = zoneIds.get(key)
+ if (
+ (zoneId eq null) && {
+ try zoneId = ZoneId.of(key)
+ catch {
+ case _: DateTimeException => zonedDateTimeError()
+ }
+ !zoneId.isInstanceOf[ZoneOffset] || zoneId.asInstanceOf[ZoneOffset].getTotalSeconds % 900 == 0
}
- pos += 1
- if (pos != len) zonedDateTimeError(pos)
- ZonedDateTime.ofInstant(localDateTime, zoneOffset, zone)
- } else ZonedDateTime.ofLocal(localDateTime, zoneOffset, null)
+ ) zoneIds.put(key, zoneId)
+ if (pos + 1 != len) zonedDateTimeError()
+ ZonedDateTime.ofInstant(localDateTime, zoneOffset, zoneId)
+ } else {
+ if (pos != len) zonedDateTimeError()
+ ZonedDateTime.ofLocal(localDateTime, zoneOffset, null)
+ }
}
def unsafeParseZoneId(input: String): ZoneId =
@@ -1336,103 +1106,81 @@ private[json] object parsers {
) zoneIds.put(input, zoneId)
zoneId
} catch {
- case _: DateTimeException => error("illegal zone id", 0)
+ case _: DateTimeException => zoneIdError()
}
def unsafeParseZoneOffset(input: String): ZoneOffset = {
- val len = input.length
- var pos, nanoDigitWeight = 0
- if (pos >= len) zoneOffsetError(pos)
- var ch = input.charAt(pos)
+ val len = input.length
+ var pos = 0
+ if (pos >= len) zoneOffsetError()
+ val ch = input.charAt(pos)
pos += 1
- if (ch == 'Z') ZoneOffset.UTC
- else {
- val offsetNeg = ch == '-' || (ch != '+' && timezoneSignError(nanoDigitWeight, pos - 1))
- nanoDigitWeight = -3
- val offsetHour = {
- if (pos + 1 >= len) zoneOffsetError(pos)
- val ch0 = input.charAt(pos)
- val ch1 = input.charAt(pos + 1)
- val offsetHour = ch0 * 10 + ch1 - 528 // 528 == '0' * 11
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (offsetHour > 18) timezoneOffsetHourError(pos + 1)
- pos += 2
- offsetHour
- }
- var offsetMinute, offsetSecond = 0
+ if (ch == 'Z') {
+ if (pos != len) zoneOffsetError()
+ ZoneOffset.UTC
+ } else {
+ val offsetNeg = ch == '-' || (ch != '+' && zoneOffsetError())
+ var offsetTotal = 0
if (
- pos < len && {
- ch = input.charAt(pos)
- pos += 1
- ch == ':'
- }
- ) {
- offsetMinute = {
- if (pos + 1 >= len) zoneOffsetError(pos)
+ pos + 1 >= len || {
val ch0 = input.charAt(pos)
val ch1 = input.charAt(pos + 1)
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch0 > '5') timezoneOffsetMinuteError(pos + 1)
+ offsetTotal = (ch0 * 10 + ch1 - 528) * 3600 // 528 == '0' * 11
pos += 2
- ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9'
}
+ ) zoneOffsetError()
+ if (pos < len) {
if (
- pos < len && {
- ch = input.charAt(pos)
+ input.charAt(pos) != ':' || {
pos += 1
- ch == ':'
- }
- ) {
- nanoDigitWeight = -4
- offsetSecond = {
- if (pos + 1 >= len) zoneOffsetError(pos)
+ pos + 1 >= len
+ } || {
val ch0 = input.charAt(pos)
val ch1 = input.charAt(pos + 1)
- if (ch0 < '0' || ch0 > '9') digitError(pos)
- if (ch1 < '0' || ch1 > '9') digitError(pos + 1)
- if (ch0 > '5') timezoneOffsetSecondError(pos + 1)
pos += 2
- ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ offsetTotal += (ch0 * 10 + ch1 - 528) * 60 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
}
+ ) zoneOffsetError()
+ if (pos < len) {
+ if (
+ input.charAt(pos) != ':' || {
+ pos += 1
+ pos + 1 >= len
+ } || {
+ val ch0 = input.charAt(pos)
+ val ch1 = input.charAt(pos + 1)
+ pos += 2
+ offsetTotal += ch0 * 10 + ch1 - 528 // 528 == '0' * 11
+ ch0 < '0' || ch0 > '9' || ch1 < '0' || ch1 > '9' || ch0 > '5'
+ }
+ ) zoneOffsetError()
}
}
- if (pos != len) zoneOffsetError(pos)
- toZoneOffset(offsetNeg, offsetHour, offsetMinute, offsetSecond, pos)
+ if (offsetTotal > 64800 || pos != len) zoneOffsetError() // 64800 == 18 * 60 * 60
+ toZoneOffset(offsetNeg, offsetTotal)
}
}
- private[this] def toZoneOffset(
- offsetNeg: Boolean,
- offsetHour: Int,
- offsetMinute: Int,
- offsetSecond: Int,
- pos: Int
- ): ZoneOffset = {
- var offsetTotal = offsetHour * 3600 + offsetMinute * 60 + offsetSecond
- var qp = offsetTotal * 37283
- if (offsetTotal > 64800) zoneOffsetError(pos) // 64800 == 18 * 60 * 60
- if ((qp & 0x1ff8000) == 0) { // check if offsetTotal divisible by 900
- qp >>>= 25 // divide offsetTotal by 900
+ private[this] def toZoneOffset(offsetNeg: Boolean, offsetTotal: Int): ZoneOffset = {
+ var qp = offsetTotal * 37283
+ if ((qp & 0x1ff8000) == 0) { // check if offsetTotal divisible by 900
+ qp >>>= 25 // divide offsetTotal by 900
if (offsetNeg) qp = -qp
var zoneOffset = zoneOffsets(qp + 72)
if (zoneOffset ne null) zoneOffset
else {
- if (offsetNeg) offsetTotal = -offsetTotal
- zoneOffset = ZoneOffset.ofTotalSeconds(offsetTotal)
+ zoneOffset = ZoneOffset.ofTotalSeconds(if (offsetNeg) -offsetTotal else offsetTotal)
zoneOffsets(qp + 72) = zoneOffset
zoneOffset
}
- } else {
- if (offsetNeg) offsetTotal = -offsetTotal
- ZoneOffset.ofTotalSeconds(offsetTotal)
- }
+ } else ZoneOffset.ofTotalSeconds(if (offsetNeg) -offsetTotal else offsetTotal)
}
- private[this] def sumSeconds(s1: Long, s2: Long, pos: Int): Long = {
+ private[this] def sumSeconds(s1: Long, s2: Long): Long = {
val s = s1 + s2
- if (((s1 ^ s) & (s2 ^ s)) < 0) durationError(pos)
+ if (((s1 ^ s) & (s2 ^ s)) < 0) durationError()
s
}
@@ -1464,114 +1212,33 @@ private[json] object parsers {
((cp ^ cc) & 0x1fc0000000L) != 0 || (((cp >> 37).toInt - cc) & 0x3) == 0
}
- private[this] def nanoError(nanoDigitWeight: Int, ch: Char, pos: Int): Nothing = {
- if (nanoDigitWeight == 0) charError(ch, pos)
- charOrDigitError(ch, pos)
- }
-
- private[this] def durationOrPeriodStartError(isNeg: Boolean, pos: Int) =
- error(
- if (isNeg) "expected 'P'"
- else "expected 'P' or '-'",
- pos
- )
-
- private[this] def durationOrPeriodDigitError(isNegX: Boolean, isNumReq: Boolean, pos: Int): Nothing =
- error(
- if (isNegX) "expected digit"
- else if (isNumReq) "expected '-' or digit"
- else "expected '\"' or '-' or digit",
- pos
- )
-
- private[this] def durationError(state: Int, pos: Int): Nothing =
- error(
- (state: @switch) match {
- case 0 => "expected 'D' or digit"
- case 1 => "expected 'H' or 'M' or 'S or '.' or digit"
- case 2 => "expected 'M' or 'S or '.' or digit"
- case 3 => "expected 'S or '.' or digit"
- },
- pos
- )
-
- private[this] def durationError(pos: Int) = error("illegal duration", pos)
-
- private[this] def timezoneSignError(nanoDigitWeight: Int, pos: Int) =
- error(
- if (nanoDigitWeight == -2) "expected '.' or '+' or '-' or 'Z'"
- else if (nanoDigitWeight == -1) "expected ':' or '+' or '-' or 'Z'"
- else if (nanoDigitWeight == 0) "expected '+' or '-' or 'Z'"
- else "expected digit or '+' or '-' or 'Z'",
- pos
- )
-
- private[this] def instantError(pos: Int) = error("illegal instant", pos)
-
- private[this] def localDateError(pos: Int) = error("illegal local date", pos)
-
- private[this] def localDateTimeError(pos: Int) = error("illegal local date time", pos)
-
- private[this] def localTimeError(pos: Int) = error("illegal local time", pos)
-
- private[this] def offsetDateTimeError(pos: Int) = error("illegal offset date time", pos)
-
- private[this] def offsetTimeError(pos: Int) = error("illegal offset time", pos)
-
- private[this] def periodError(state: Int, pos: Int): Nothing =
- error(
- (state: @switch) match {
- case 0 => "expected 'Y' or 'M' or 'W' or 'D' or digit"
- case 1 => "expected 'M' or 'W' or 'D' or digit"
- case 2 => "expected 'W' or 'D' or digit"
- case 3 => "expected 'D' or digit"
- },
- pos
- )
-
- private[this] def periodError(pos: Int) = error("illegal period", pos)
-
- private[this] def yearMonthError(pos: Int) = error("illegal year month", pos)
-
- private[this] def zonedDateTimeError(pos: Int) = error("illegal zoned date time", pos)
-
- private[this] def zoneOffsetError(pos: Int) = error("illegal zone offset", pos)
-
- private[this] def yearError(yearNeg: Boolean, yearDigits: Int, pos: Int) = {
- if (!yearNeg && yearDigits == 4) digitError(pos)
- if (yearDigits == 9) charError('-', pos)
- charOrDigitError('-', pos)
- }
-
- private[this] def yearError(pos: Int) = error("illegal year", pos)
+ @noinline private[this] def durationError() = error("expected a Duration")
- private[this] def monthError(pos: Int) = error("illegal month", pos)
+ @noinline private[this] def instantError() = error("expected an Instant")
- private[this] def dayError(pos: Int) = error("illegal day", pos)
+ @noinline private[this] def localDateError() = error("expected a LocalDate")
- private[this] def hourError(pos: Int) = error("illegal hour", pos)
+ @noinline private[this] def localDateTimeError() = error("expected a LocalDateTime")
- private[this] def minuteError(pos: Int) = error("illegal minute", pos)
+ @noinline private[this] def localTimeError() = error("expected a LocalTime")
- private[this] def secondError(pos: Int) = error("illegal second", pos)
+ @noinline private[this] def offsetDateTimeError() = error("expected an OffsetDateTime")
- private[this] def timezoneOffsetHourError(pos: Int) = error("illegal timezone offset hour", pos)
+ @noinline private[this] def offsetTimeError() = error("expected an OffsetTime")
- private[this] def timezoneOffsetMinuteError(pos: Int) = error("illegal timezone offset minute", pos)
+ @noinline private[this] def periodError() = error("expected a Period")
- private[this] def timezoneOffsetSecondError(pos: Int) = error("illegal timezone offset second", pos)
+ @noinline private[this] def monthDayError() = error("expected a MonthDay")
- private[this] def digitError(pos: Int) = error("expected digit", pos)
+ @noinline private[this] def yearMonthError() = error("expected a YearMonth")
- private[this] def charsOrDigitError(ch1: Char, ch2: Char, pos: Int) =
- error(s"expected '$ch1' or '$ch2' or digit", pos)
+ @noinline private[this] def zonedDateTimeError() = error("expected a ZonedDateTime")
- private[this] def charsError(ch1: Char, ch2: Char, pos: Int) = error(s"expected '$ch1' or '$ch2'", pos)
+ @noinline private[this] def zoneOffsetError() = error("expected a ZoneOffset")
- private[this] def charOrDigitError(ch1: Char, pos: Int) = error(s"expected '$ch1' or digit", pos)
+ @noinline private[this] def zoneIdError() = error("expected a ZoneId")
- private[this] def charError(ch: Char, pos: Int) = error(s"expected '$ch'", pos)
+ @noinline private[this] def yearError() = error("expected a Year")
- private[this] def error(msg: String, pos: Int) =
- throw new DateTimeException(msg + " at index " + pos) with NoStackTrace
+ private[this] def error(msg: String): Nothing = throw new DateTimeException(msg) with NoStackTrace
}
diff --git a/zio-json/shared/src/main/scala/zio/json/javatime/serializers.scala b/zio-json/shared/src/main/scala/zio/json/javatime/serializers.scala
index 9fa4365b7..8b739cfb4 100644
--- a/zio-json/shared/src/main/scala/zio/json/javatime/serializers.scala
+++ b/zio-json/shared/src/main/scala/zio/json/javatime/serializers.scala
@@ -15,15 +15,22 @@
*/
package zio.json.javatime
+import zio.json.internal.{ FastStringWrite, SafeNumbers, Write }
+
import java.time._
private[json] object serializers {
def toString(x: Duration): String = {
- val s = new java.lang.StringBuilder(16)
- s.append('P').append('T')
+ val out = writes.get
+ write(x, out)
+ out.toString
+ }
+
+ def write(x: Duration, out: Write): Unit = {
+ out.write('P', 'T')
val totalSecs = x.getSeconds
var nano = x.getNano
- if ((totalSecs | nano) == 0) s.append('0').append('S')
+ if ((totalSecs | nano) == 0) out.write('0', 'S')
else {
var effectiveTotalSecs = totalSecs
if (totalSecs < 0 && nano > 0) effectiveTotalSecs += 1
@@ -31,35 +38,40 @@ private[json] object serializers {
val secsOfHour = (effectiveTotalSecs - hours * 3600).toInt
val minutes = secsOfHour / 60
val seconds = secsOfHour - minutes * 60
- if (hours != 0) s.append(hours).append('H')
- if (minutes != 0) s.append(minutes).append('M')
+ if (hours != 0) {
+ SafeNumbers.write(hours, out)
+ out.write('H')
+ }
+ if (minutes != 0) {
+ SafeNumbers.write(minutes, out)
+ out.write('M')
+ }
if ((seconds | nano) != 0) {
- if (totalSecs < 0 && seconds == 0) s.append('-').append('0')
- else s.append(seconds)
+ if (totalSecs < 0 && seconds == 0) out.write('-', '0')
+ else SafeNumbers.write(seconds, out)
if (nano != 0) {
if (totalSecs < 0) nano = 1000000000 - nano
- val dotPos = s.length
- s.append(nano + 1000000000)
- var i = s.length - 1
- while (s.charAt(i) == '0') i -= 1
- s.setLength(i + 1)
- s.setCharAt(dotPos, '.')
+ SafeNumbers.writeNano(nano, out)
}
- s.append('S')
+ out.write('S')
}
}
- s.toString
}
def toString(x: Instant): String = {
- val s = new java.lang.StringBuilder(32)
+ val out = writes.get
+ write(x, out)
+ out.toString
+ }
+
+ def write(x: Instant, out: Write): Unit = {
val epochSecond = x.getEpochSecond
- val epochDay =
+ val epochDay =
(if (epochSecond >= 0) epochSecond
else epochSecond - 86399) / 86400 // 86400 == seconds per day
- val secsOfDay = (epochSecond - epochDay * 86400).toInt
- var marchZeroDay = epochDay + 719468 // 719468 == 719528 - 60 == days 0000 to 1970 - days 1st Jan to 1st Mar
- var adjustYear = 0
+ val secsOfDay = (epochSecond - epochDay * 86400).toInt
+ var marchZeroDay = epochDay + 719468 // 719468 == 719528 - 60 == days 0000 to 1970 - days 1st Jan to 1st Mar
+ var adjustYear = 0
if (marchZeroDay < 0) { // adjust negative years to positive for calculation
val adjust400YearCycles = to400YearCycle(marchZeroDay + 1) - 1
adjustYear = adjust400YearCycles * 400
@@ -73,7 +85,7 @@ private[json] object serializers {
}
val marchMonth = (marchDayOfYear * 17135 + 6854) >> 19 // (marchDayOfYear * 5 + 2) / 153
year += (marchMonth * 3277 >> 15) + adjustYear // year += marchMonth / 10 + adjustYear (reset any negative year and convert march-based values back to january-based)
- val month = marchMonth +
+ val month = marchMonth +
(if (marchMonth < 10) 3
else -9)
val day =
@@ -82,187 +94,242 @@ private[json] object serializers {
val secsOfHour = secsOfDay - hour * 3600
val minute = secsOfHour * 17477 >> 20 // divide a small positive int by 60
val second = secsOfHour - minute * 60
- appendYear(year, s)
- append2Digits(month, s.append('-'))
- append2Digits(day, s.append('-'))
- append2Digits(hour, s.append('T'))
- append2Digits(minute, s.append(':'))
- append2Digits(second, s.append(':'))
+ writeYear(year, out)
+ out.write('-')
+ SafeNumbers.write2Digits(month, out)
+ out.write('-')
+ SafeNumbers.write2Digits(day, out)
+ out.write('T')
+ SafeNumbers.write2Digits(hour, out)
+ out.write(':')
+ SafeNumbers.write2Digits(minute, out)
+ out.write(':')
+ SafeNumbers.write2Digits(second, out)
val nano = x.getNano
if (nano != 0) {
- s.append('.')
+ out.write('.')
val q1 = nano / 1000000
val r1 = nano - q1 * 1000000
- append3Digits(q1, s)
+ SafeNumbers.write3Digits(q1, out)
if (r1 != 0) {
val q2 = r1 / 1000
val r2 = r1 - q2 * 1000
- append3Digits(q2, s)
- if (r2 != 0) append3Digits(r2, s)
+ SafeNumbers.write3Digits(q2, out)
+ if (r2 != 0) SafeNumbers.write3Digits(r2, out)
}
}
- s.append('Z').toString
+ out.write('Z')
}
def toString(x: LocalDate): String = {
- val s = new java.lang.StringBuilder(16)
- appendLocalDate(x, s)
- s.toString
+ val out = writes.get
+ write(x, out)
+ out.toString
+ }
+
+ def write(x: LocalDate, out: Write): Unit = {
+ writeYear(x.getYear, out)
+ out.write('-')
+ SafeNumbers.write2Digits(x.getMonthValue, out)
+ out.write('-')
+ SafeNumbers.write2Digits(x.getDayOfMonth, out)
}
def toString(x: LocalDateTime): String = {
- val s = new java.lang.StringBuilder(32)
- appendLocalDate(x.toLocalDate, s)
- appendLocalTime(x.toLocalTime, s.append('T'))
- s.toString
+ val out = writes.get
+ write(x, out)
+ out.toString
+ }
+
+ def write(x: LocalDateTime, out: Write): Unit = {
+ write(x.toLocalDate, out)
+ out.write('T')
+ write(x.toLocalTime, out)
}
def toString(x: LocalTime): String = {
- val s = new java.lang.StringBuilder(24)
- appendLocalTime(x, s)
- s.toString
+ val out = writes.get
+ write(x, out)
+ out.toString
+ }
+
+ def write(x: LocalTime, out: Write): Unit = {
+ SafeNumbers.write2Digits(x.getHour, out)
+ out.write(':')
+ SafeNumbers.write2Digits(x.getMinute, out)
+ out.write(':')
+ SafeNumbers.write2Digits(x.getSecond, out)
+ val nano = x.getNano
+ if (nano != 0) SafeNumbers.writeNano(nano, out)
}
def toString(x: MonthDay): String = {
- val s = new java.lang.StringBuilder(8)
- append2Digits(x.getMonthValue, s.append('-').append('-'))
- append2Digits(x.getDayOfMonth, s.append('-'))
- s.toString
+ val out = writes.get
+ write(x, out)
+ out.toString
+ }
+
+ def write(x: MonthDay, out: Write): Unit = {
+ out.write('-', '-')
+ SafeNumbers.write2Digits(x.getMonthValue, out)
+ out.write('-')
+ SafeNumbers.write2Digits(x.getDayOfMonth, out)
}
def toString(x: OffsetDateTime): String = {
- val s = new java.lang.StringBuilder(48)
- appendLocalDate(x.toLocalDate, s)
- appendLocalTime(x.toLocalTime, s.append('T'))
- appendZoneOffset(x.getOffset, s)
- s.toString
+ val out = writes.get
+ write(x, out)
+ out.toString
+ }
+
+ def write(x: OffsetDateTime, out: Write): Unit = {
+ write(x.toLocalDate, out)
+ out.write('T')
+ write(x.toLocalTime, out)
+ write(x.getOffset, out)
}
def toString(x: OffsetTime): String = {
- val s = new java.lang.StringBuilder(32)
- appendLocalTime(x.toLocalTime, s)
- appendZoneOffset(x.getOffset, s)
- s.toString
+ val out = writes.get
+ write(x, out)
+ out.toString
+ }
+
+ def write(x: OffsetTime, out: Write): Unit = {
+ write(x.toLocalTime, out)
+ write(x.getOffset, out)
}
def toString(x: Period): String = {
- val s = new java.lang.StringBuilder(16)
- s.append('P')
- if (x.isZero) s.append('0').append('D')
+ val out = writes.get
+ write(x, out)
+ out.toString
+ }
+
+ def write(x: Period, out: Write): Unit = {
+ out.write('P')
+ if (x.isZero) out.write('0', 'D')
else {
val years = x.getYears
val months = x.getMonths
val days = x.getDays
- if (years != 0) s.append(years).append('Y')
- if (months != 0) s.append(months).append('M')
- if (days != 0) s.append(days).append('D')
+ if (years != 0) {
+ SafeNumbers.write(years, out)
+ out.write('Y')
+ }
+ if (months != 0) {
+ SafeNumbers.write(months, out)
+ out.write('M')
+ }
+ if (days != 0) {
+ SafeNumbers.write(days, out)
+ out.write('D')
+ }
}
- s.toString
}
def toString(x: Year): String = {
- val s = new java.lang.StringBuilder(16)
- appendYear(x.getValue, s)
- s.toString
+ val out = writes.get
+ write(x, out)
+ out.toString
}
+ @inline def write(x: Year, out: Write): Unit = writeYear(x.getValue, out)
+
def toString(x: YearMonth): String = {
- val s = new java.lang.StringBuilder(16)
- appendYear(x.getYear, s)
- append2Digits(x.getMonthValue, s.append('-'))
- s.toString
+ val out = writes.get
+ write(x, out)
+ out.toString
}
- def toString(x: ZonedDateTime): String = {
- val s = new java.lang.StringBuilder(48)
- appendLocalDate(x.toLocalDate, s)
- appendLocalTime(x.toLocalTime, s.append('T'))
- appendZoneOffset(x.getOffset, s)
- val zone = x.getZone
- if (!zone.isInstanceOf[ZoneOffset]) s.append('[').append(zone.getId).append(']')
- s.toString
+ def write(x: YearMonth, out: Write): Unit = {
+ writeYear(x.getYear, out)
+ out.write('-')
+ SafeNumbers.write2Digits(x.getMonthValue, out)
}
- def toString(x: ZoneId): String = x.getId
-
- def toString(x: ZoneOffset): String = {
- val s = new java.lang.StringBuilder(16)
- appendZoneOffset(x, s)
- s.toString
+ def toString(x: ZonedDateTime): String = {
+ val out = writes.get
+ write(x, out)
+ out.toString
}
- private[this] def appendLocalDate(x: LocalDate, s: java.lang.StringBuilder): Unit = {
- appendYear(x.getYear, s)
- append2Digits(x.getMonthValue, s.append('-'))
- append2Digits(x.getDayOfMonth, s.append('-'))
+ def write(x: ZonedDateTime, out: Write): Unit = {
+ write(x.toLocalDate, out)
+ out.write('T')
+ write(x.toLocalTime, out)
+ write(x.getOffset, out)
+ val zone = x.getZone
+ if (!zone.isInstanceOf[ZoneOffset]) {
+ out.write('[')
+ out.write(zone.getId)
+ out.write(']')
+ }
}
- private[this] def appendLocalTime(x: LocalTime, s: java.lang.StringBuilder): Unit = {
- append2Digits(x.getHour, s)
- append2Digits(x.getMinute, s.append(':'))
- append2Digits(x.getSecond, s.append(':'))
- val nano = x.getNano
- if (nano != 0) {
- val dotPos = s.length
- s.append(nano + 1000000000)
- var i = s.length - 1
- while (s.charAt(i) == '0') i -= 1
- s.setLength(i + 1)
- s.setCharAt(dotPos, '.')
- }
+ @inline def toString(x: ZoneId): String = x.getId
+
+ @inline def write(x: ZoneId, out: Write): Unit = out.write(x.getId)
+
+ def toString(x: ZoneOffset): String = {
+ val out = writes.get
+ write(x, out)
+ out.toString
}
- private[this] def appendZoneOffset(x: ZoneOffset, s: java.lang.StringBuilder): Unit = {
+ def write(x: ZoneOffset, out: Write): Unit = {
val totalSeconds = x.getTotalSeconds
- if (totalSeconds == 0) s.append('Z'): Unit
+ if (totalSeconds == 0) out.write('Z'): Unit
else {
val q0 =
if (totalSeconds > 0) {
- s.append('+')
+ out.write('+')
totalSeconds
} else {
- s.append('-')
+ out.write('-')
-totalSeconds
}
val q1 = q0 * 37283 >>> 27 // divide a small positive int by 3600
val r1 = q0 - q1 * 3600
- append2Digits(q1, s)
- s.append(':')
+ SafeNumbers.write2Digits(q1, out)
+ out.write(':')
val q2 = r1 * 17477 >> 20 // divide a small positive int by 60
val r2 = r1 - q2 * 60
- append2Digits(q2, s)
- if (r2 != 0) append2Digits(r2, s.append(':'))
+ SafeNumbers.write2Digits(q2, out)
+ if (r2 != 0) {
+ out.write(':')
+ SafeNumbers.write2Digits(r2, out)
+ }
}
}
- private[this] def appendYear(x: Int, s: java.lang.StringBuilder): Unit =
+ private[this] def writeYear(x: Int, out: Write): Unit =
if (x >= 0) {
- if (x < 10000) append4Digits(x, s)
- else s.append('+').append(x): Unit
- } else if (x > -10000) append4Digits(-x, s.append('-'))
- else s.append(x): Unit
-
- private[this] def append4Digits(x: Int, s: java.lang.StringBuilder): Unit = {
- val q = x * 5243 >> 19 // divide a 4-digit positive int by 100
- append2Digits(q, s)
- append2Digits(x - q * 100, s)
- }
-
- private[this] def append3Digits(x: Int, s: java.lang.StringBuilder): Unit = {
- val q = x * 1311 >> 17 // divide a 3-digit positive int by 100
- append2Digits(x - q * 100, s.append((q + '0').toChar))
- }
-
- private[this] def append2Digits(x: Int, s: java.lang.StringBuilder): Unit = {
- val q = x * 103 >> 10 // divide a 2-digit positive int by 10
- s.append((q + '0').toChar).append((x + '0' - q * 10).toChar): Unit
- }
+ if (x < 10000) SafeNumbers.write4Digits(x, out)
+ else {
+ out.write('+')
+ SafeNumbers.write(x, out): Unit
+ }
+ } else if (x > -10000) {
+ out.write('-')
+ SafeNumbers.write4Digits(-x, out)
+ } else SafeNumbers.write(x, out): Unit
- private[this] def to400YearCycle(day: Long): Int =
+ @inline private[this] def to400YearCycle(day: Long): Int =
(day / 146097).toInt // 146097 == number of days in a 400 year cycle
- private[this] def toMarchDayOfYear(marchZeroDay: Long, year: Int): Int = {
+ @inline private[this] def toMarchDayOfYear(marchZeroDay: Long, year: Int): Int = {
val century = year / 100
(marchZeroDay - year * 365L).toInt - (year >> 2) + century - (century >> 2)
}
+
+ private[this] val writes = new ThreadLocal[FastStringWrite] {
+ override def initialValue(): FastStringWrite = new FastStringWrite(64)
+
+ override def get: FastStringWrite = {
+ val w = super.get
+ w.reset()
+ w
+ }
+ }
}
diff --git a/zio-json/shared/src/main/scala/zio/json/package.scala b/zio-json/shared/src/main/scala/zio/json/package.scala
index b9747b164..deca80500 100644
--- a/zio-json/shared/src/main/scala/zio/json/package.scala
+++ b/zio-json/shared/src/main/scala/zio/json/package.scala
@@ -19,12 +19,12 @@ import zio.json.ast.Json
package object json extends JsonPackagePlatformSpecific {
implicit final class EncoderOps[A](private val a: A) extends AnyVal {
- def toJson(implicit encoder: JsonEncoder[A]): String = encoder.encodeJson(a, None).toString
+ @inline def toJson(implicit encoder: JsonEncoder[A]): String = encoder.encodeJson(a, None).toString
// Jon Pretty's better looking brother, but a bit slower
- def toJsonPretty(implicit encoder: JsonEncoder[A]): String = encoder.encodeJson(a, Some(0)).toString
+ @inline def toJsonPretty(implicit encoder: JsonEncoder[A]): String = encoder.encodeJson(a, Some(0)).toString
- def toJsonAST(implicit encoder: JsonEncoder[A]): Either[String, Json] = encoder.toJsonAST(a)
+ @inline def toJsonAST(implicit encoder: JsonEncoder[A]): Either[String, Json] = encoder.toJsonAST(a)
}
implicit final class DecoderOps(private val json: CharSequence) extends AnyVal {
@@ -32,14 +32,12 @@ package object json extends JsonPackagePlatformSpecific {
/**
* Attempts to decode the raw JSON string as an `A`.
*
- * On failure a human readable message is returned using a jq friendly
- * format. For example the error
- * `.rows[0].elements[0].distance.value(missing)"` tells us the location of a
- * missing field named "value". We can use part of the error message in the
- * `jq` command line tool for further inspection, e.g.
+ * On failure a human readable message is returned using a jq friendly format. For example the error
+ * `.rows[0].elements[0].distance.value(missing)"` tells us the location of a missing field named "value". We can
+ * use part of the error message in the `jq` command line tool for further inspection, e.g.
*
* {{{jq '.rows[0].elements[0].distance' input.json}}}
*/
- def fromJson[A](implicit decoder: JsonDecoder[A]): Either[String, A] = decoder.decodeJson(json)
+ @inline def fromJson[A](implicit decoder: JsonDecoder[A]): Either[String, A] = decoder.decodeJson(json)
}
}
diff --git a/zio-json/shared/src/main/scala/zio/json/uuid/UUIDParser.scala b/zio-json/shared/src/main/scala/zio/json/uuid/UUIDParser.scala
index b976fe265..372a160f2 100644
--- a/zio-json/shared/src/main/scala/zio/json/uuid/UUIDParser.scala
+++ b/zio-json/shared/src/main/scala/zio/json/uuid/UUIDParser.scala
@@ -15,15 +15,13 @@
*/
package zio.json.uuid
-import scala.annotation.nowarn
+import java.util.UUID
+import scala.util.control.NoStackTrace
-// A port of https://github.com/openjdk/jdk/commit/ebadfaeb2e1cc7b5ce5f101cd8a539bc5478cf5b with optimizations applied
private[json] object UUIDParser {
- // Converts characters to their numeric representation (for example 'E' or 'e' becomes 0XE)
- private[this] val CharToNumeric: Array[Byte] = {
- // by filling in -1's we prevent from trying to parse invalid characters
- val ns = Array.fill[Byte](256)(-1)
-
+ private[this] val hexDigits: Array[Byte] = {
+ val ns = new Array[Byte](256)
+ java.util.Arrays.fill(ns, -1: Byte)
ns('0') = 0
ns('1') = 1
ns('2') = 2
@@ -34,103 +32,92 @@ private[json] object UUIDParser {
ns('7') = 7
ns('8') = 8
ns('9') = 9
-
ns('A') = 10
ns('B') = 11
ns('C') = 12
ns('D') = 13
ns('E') = 14
ns('F') = 15
-
ns('a') = 10
ns('b') = 11
ns('c') = 12
ns('d') = 13
ns('e') = 14
ns('f') = 15
-
ns
}
- def unsafeParse(input: String): java.util.UUID =
+ def unsafeParse(input: String): java.util.UUID = {
if (
- input.length != 36 || {
+ input.length == 36 && {
val ch1 = input.charAt(8)
val ch2 = input.charAt(13)
val ch3 = input.charAt(18)
val ch4 = input.charAt(23)
- ch1 != '-' || ch2 != '-' || ch3 != '-' || ch4 != '-'
+ ch1 == '-' && ch2 == '-' && ch3 == '-' && ch4 == '-'
+ }
+ ) {
+ val ds = hexDigits
+ val msb1 = uuidNibble(ds, input, 0)
+ val msb2 = uuidNibble(ds, input, 4)
+ val msb3 = uuidNibble(ds, input, 9)
+ val msb4 = uuidNibble(ds, input, 14)
+ val lsb1 = uuidNibble(ds, input, 19)
+ val lsb2 = uuidNibble(ds, input, 24)
+ val lsb3 = uuidNibble(ds, input, 28)
+ val lsb4 = uuidNibble(ds, input, 32)
+ if ((msb1 | msb2 | msb3 | msb4 | lsb1 | lsb2 | lsb3 | lsb4) >= 0) {
+ return new UUID(
+ msb1.toLong << 48 | msb2.toLong << 32 | msb3.toLong << 16 | msb4,
+ lsb1.toLong << 48 | lsb2.toLong << 32 | lsb3.toLong << 16 | lsb4
+ )
}
- ) unsafeParseExtended(input)
- else {
- val ch2n = CharToNumeric
- val msb1 = parseNibbles(ch2n, input, 0)
- val msb2 = parseNibbles(ch2n, input, 4)
- val msb3 = parseNibbles(ch2n, input, 9)
- val msb4 = parseNibbles(ch2n, input, 14)
- val lsb1 = parseNibbles(ch2n, input, 19)
- val lsb2 = parseNibbles(ch2n, input, 24)
- val lsb3 = parseNibbles(ch2n, input, 28)
- val lsb4 = parseNibbles(ch2n, input, 32)
- if ((msb1 | msb2 | msb3 | msb4 | lsb1 | lsb2 | lsb3 | lsb4) < 0) invalidUUIDError(input)
- new java.util.UUID(msb1 << 48 | msb2 << 32 | msb3 << 16 | msb4, lsb1 << 48 | lsb2 << 32 | lsb3 << 16 | lsb4)
+ } else if (input.length <= 36) {
+ return uuidExtended(input)
}
-
- // A nibble is 4 bits
- @nowarn("msg=implicit numeric widening")
- private[this] def parseNibbles(ch2n: Array[Byte], input: String, position: Int): Long = {
- val ch1 = input.charAt(position)
- val ch2 = input.charAt(position + 1)
- val ch3 = input.charAt(position + 2)
- val ch4 = input.charAt(position + 3)
- if ((ch1 | ch2 | ch3 | ch4) > 0xff) -1L
- else ch2n(ch1) << 12 | ch2n(ch2) << 8 | ch2n(ch3) << 4 | ch2n(ch4)
+ uuidError()
}
- private[this] def unsafeParseExtended(input: String): java.util.UUID = {
- val len = input.length
- if (len > 36) throw new IllegalArgumentException("UUID string too large")
- val dash1 = input.indexOf('-', 0)
- val dash2 = input.indexOf('-', dash1 + 1)
- val dash3 = input.indexOf('-', dash2 + 1)
- val dash4 = input.indexOf('-', dash3 + 1)
- val dash5 = input.indexOf('-', dash4 + 1)
-
- // For any valid input, dash1 through dash4 will be positive and dash5 will be negative,
- // but it's enough to check dash4 and dash5:
- // - if dash1 is -1, dash4 will be -1
- // - if dash1 is positive but dash2 is -1, dash4 will be -1
- // - if dash1 and dash2 is positive, dash3 will be -1, dash4 will be positive, but so will dash5
- if (dash4 < 0 || dash5 >= 0) invalidUUIDError(input)
+ private[this] def uuidNibble(ds: Array[Byte], input: String, offset: Int): Int = {
+ val ch1 = input.charAt(offset).toInt
+ val ch2 = input.charAt(offset + 1).toInt
+ val ch3 = input.charAt(offset + 2).toInt
+ val ch4 = input.charAt(offset + 3).toInt
+ if ((ch1 | ch2 | ch3 | ch4) > 0xff) -1
+ else ds(ch1) << 12 | ds(ch2) << 8 | ds(ch3) << 4 | ds(ch4)
+ }
- val ch2n = CharToNumeric
- val section1 = parseSection(ch2n, input, 0, dash1, 0xfffffff00000000L)
- val section2 = parseSection(ch2n, input, dash1 + 1, dash2, 0xfffffffffff0000L)
- val section3 = parseSection(ch2n, input, dash2 + 1, dash3, 0xfffffffffff0000L)
- val section4 = parseSection(ch2n, input, dash3 + 1, dash4, 0xfffffffffff0000L)
- val section5 = parseSection(ch2n, input, dash4 + 1, len, 0xfff000000000000L)
- new java.util.UUID((section1 << 32) | (section2 << 16) | section3, (section4 << 48) | section5)
+ private[this] def uuidExtended(input: String): UUID = {
+ val dash1 = input.indexOf('-', 1)
+ val dash2 = input.indexOf('-', dash1 + 2)
+ val dash3 = input.indexOf('-', dash2 + 2)
+ val dash4 = input.indexOf('-', dash3 + 2)
+ if (dash4 >= 0) {
+ val ds = hexDigits
+ val section1 = uuidSection(ds, input, 0, dash1, 0xffffffff00000000L)
+ val section2 = uuidSection(ds, input, dash1 + 1, dash2, 0xffffffffffff0000L)
+ val section3 = uuidSection(ds, input, dash2 + 1, dash3, 0xffffffffffff0000L)
+ val section4 = uuidSection(ds, input, dash3 + 1, dash4, 0xffffffffffff0000L)
+ val section5 = uuidSection(ds, input, dash4 + 1, input.length, 0xffff000000000000L)
+ return new UUID((section1 << 32) | (section2 << 16) | section3, (section4 << 48) | section5)
+ }
+ uuidError()
}
- @nowarn("msg=implicit numeric widening")
- private[this] def parseSection(
- ch2n: Array[Byte],
- input: String,
- beginIndex: Int,
- endIndex: Int,
- zeroMask: Long
- ): Long = {
- if (beginIndex >= endIndex || beginIndex + 16 < endIndex) invalidUUIDError(input)
- var result = 0L
- var i = beginIndex
- while (i < endIndex) {
- result = (result << 4) | ch2n(input.charAt(i))
- i += 1
+ private[this] def uuidSection(ds: Array[Byte], input: String, from: Int, to: Int, mask: Long): Long = {
+ if (from < to && from + 16 >= to) {
+ var result = 0L
+ var i = from
+ while (i < to) {
+ val c = input.charAt(i).toInt
+ if (c > 0xff) uuidError()
+ result = (result << 4) | ds(c)
+ i += 1
+ }
+ if ((result & mask) == 0L) return result
}
- if ((result & zeroMask) != 0) invalidUUIDError(input)
- result
+ uuidError()
}
- private[this] def invalidUUIDError(input: String): IllegalArgumentException =
- throw new IllegalArgumentException(input)
+ @noinline private[this] def uuidError(): Nothing = throw new IllegalArgumentException with NoStackTrace
}
diff --git a/zio-json/shared/src/test/scala-2.13/zio/json/CodecVersionSpecificSpec.scala b/zio-json/shared/src/test/scala-2.13/zio/json/CodecVersionSpecificSpec.scala
new file mode 100644
index 000000000..d4ca8d932
--- /dev/null
+++ b/zio-json/shared/src/test/scala-2.13/zio/json/CodecVersionSpecificSpec.scala
@@ -0,0 +1,17 @@
+package zio.json
+
+import zio.test.Assertion._
+import zio.test._
+
+import scala.collection.immutable
+
+object CodecVersionSpecificSpec extends ZIOSpecDefault {
+ val spec: Spec[Environment, Any] =
+ suite("CodecVersionSpecific")(
+ test("ArraySeq") {
+ val jsonStr = """["5XL","2XL","XL"]"""
+ val expected = immutable.ArraySeq("5XL", "2XL", "XL")
+ assert(jsonStr.fromJson[immutable.ArraySeq[String]])(isRight(equalTo(expected)))
+ }
+ )
+}
diff --git a/zio-json/shared/src/test/scala-2.13/zio/json/DecoderVersionSpecificSpec.scala b/zio-json/shared/src/test/scala-2.13/zio/json/DecoderVersionSpecificSpec.scala
new file mode 100644
index 000000000..c9af9f4ba
--- /dev/null
+++ b/zio-json/shared/src/test/scala-2.13/zio/json/DecoderVersionSpecificSpec.scala
@@ -0,0 +1,28 @@
+package zio.json
+
+import zio.json.ast.Json
+import zio.test.Assertion._
+import zio.test._
+
+import scala.collection.immutable
+
+object DecoderVersionSpecificSpec extends ZIOSpecDefault {
+
+ val spec: Spec[Environment, Any] =
+ suite("DecoderVersionSpecific")(
+ suite("fromJson")(
+ test("ArraySeq") {
+ val jsonStr = """["5XL","2XL","XL"]"""
+ val expected = immutable.ArraySeq("5XL", "2XL", "XL")
+ assert(jsonStr.fromJson[immutable.ArraySeq[String]])(isRight(equalTo(expected)))
+ }
+ ),
+ suite("fromJsonAST")(
+ test("ArraySeq") {
+ val json = Json.Arr(Json.Str("5XL"), Json.Str("2XL"), Json.Str("XL"))
+ val expected = immutable.ArraySeq("5XL", "2XL", "XL")
+ assert(json.as[Seq[String]])(isRight(equalTo(expected)))
+ }
+ )
+ )
+}
diff --git a/zio-json/shared/src/test/scala-2.13/zio/json/EncoderVesionSpecificSpec.scala b/zio-json/shared/src/test/scala-2.13/zio/json/EncoderVesionSpecificSpec.scala
new file mode 100644
index 000000000..aaabfb137
--- /dev/null
+++ b/zio-json/shared/src/test/scala-2.13/zio/json/EncoderVesionSpecificSpec.scala
@@ -0,0 +1,30 @@
+package zio.json
+
+import zio.json.ast.Json
+import zio.test.Assertion._
+import zio.test._
+
+import scala.collection.immutable
+
+object EncoderVesionSpecificSpec extends ZIOSpecDefault {
+
+ val spec: Spec[Environment, Any] =
+ suite("EncoderVesionSpecific")(
+ suite("toJson")(
+ test("collections") {
+ assert(immutable.ArraySeq[Int]().toJson)(equalTo("[]")) &&
+ assert(immutable.ArraySeq(1, 2, 3).toJson)(equalTo("[1,2,3]")) &&
+ assert(immutable.ArraySeq[String]().toJsonPretty)(equalTo("[]")) &&
+ assert(immutable.ArraySeq("foo", "bar").toJsonPretty)(equalTo("[\n \"foo\",\n \"bar\"\n]"))
+ }
+ ),
+ suite("toJsonAST")(
+ test("collections") {
+ val arrEmpty = Json.Arr()
+ val arr123 = Json.Arr(Json.Num(1), Json.Num(2), Json.Num(3))
+ assert(immutable.ArraySeq[Int]().toJsonAST)(isRight(equalTo(arrEmpty))) &&
+ assert(immutable.ArraySeq(1, 2, 3).toJsonAST)(isRight(equalTo(arr123)))
+ }
+ )
+ )
+}
diff --git a/zio-json/shared/src/test/scala-2.x/zio/json/ConfigurableDeriveCodecSpec.scala b/zio-json/shared/src/test/scala-2.x/zio/json/ConfigurableDeriveCodecSpec.scala
deleted file mode 100644
index b61ff8485..000000000
--- a/zio-json/shared/src/test/scala-2.x/zio/json/ConfigurableDeriveCodecSpec.scala
+++ /dev/null
@@ -1,169 +0,0 @@
-package zio.json
-
-import zio.json.JsonCodecConfiguration.SumTypeHandling.DiscriminatorField
-import zio.json.ast.Json
-import zio.test._
-
-object ConfigurableDeriveCodecSpec extends ZIOSpecDefault {
- case class ClassWithFields(someField: Int, someOtherField: String)
-
- sealed trait ST
-
- object ST {
- case object CaseObj extends ST
- case class CaseClass(i: Int) extends ST
- }
-
- def spec = suite("ConfigurableDeriveCodecSpec")(
- suite("defaults")(
- suite("string")(
- test("should not map field names by default") {
- val expectedStr = """{"someField":1,"someOtherField":"a"}"""
- val expectedObj = ClassWithFields(1, "a")
-
- implicit val codec: JsonCodec[ClassWithFields] = DeriveJsonCodec.gen
-
- assertTrue(
- expectedStr.fromJson[ClassWithFields].toOption.get == expectedObj,
- expectedObj.toJson == expectedStr
- )
- },
- test("should not use discriminator by default") {
- val expectedStr = """{"CaseObj":{}}"""
- val expectedObj: ST = ST.CaseObj
-
- implicit val codec: JsonCodec[ST] = DeriveJsonCodec.gen
-
- assertTrue(
- expectedStr.fromJson[ST].toOption.get == expectedObj,
- expectedObj.toJson == expectedStr
- )
- },
- test("should allow extra fields by default") {
- val jsonStr = """{"someField":1,"someOtherField":"a","extra":123}"""
- val expectedObj = ClassWithFields(1, "a")
-
- implicit val codec: JsonCodec[ClassWithFields] = DeriveJsonCodec.gen
-
- assertTrue(
- jsonStr.fromJson[ClassWithFields].toOption.get == expectedObj
- )
- }
- ),
- suite("AST")(
- test("should not map field names by default") {
- val expectedAST = Json.Obj("someField" -> Json.Num(1), "someOtherField" -> Json.Str("a"))
- val expectedObj = ClassWithFields(1, "a")
-
- implicit val codec: JsonCodec[ClassWithFields] = DeriveJsonCodec.gen
-
- assertTrue(
- expectedAST.as[ClassWithFields].toOption.get == expectedObj,
- expectedObj.toJsonAST.toOption.get == expectedAST
- )
- },
- test("should not use discriminator by default") {
- val expectedAST = Json.Obj("CaseObj" -> Json.Obj())
- val expectedObj: ST = ST.CaseObj
-
- implicit val codec: JsonCodec[ST] = DeriveJsonCodec.gen
-
- assertTrue(
- expectedAST.as[ST].toOption.get == expectedObj,
- expectedObj.toJsonAST.toOption.get == expectedAST
- )
- },
- test("should allow extra fields by default") {
- val jsonAST = Json.Obj("someField" -> Json.Num(1), "someOtherField" -> Json.Str("a"), "extra" -> Json.Num(1))
- val expectedObj = ClassWithFields(1, "a")
-
- implicit val codec: JsonCodec[ClassWithFields] = DeriveJsonCodec.gen
-
- assertTrue(
- jsonAST.as[ClassWithFields].toOption.get == expectedObj
- )
- }
- )
- ),
- suite("overrides")(
- suite("string")(
- test("should override field name mapping") {
- val expectedStr = """{"some_field":1,"some_other_field":"a"}"""
- val expectedObj = ClassWithFields(1, "a")
-
- implicit val config: JsonCodecConfiguration =
- JsonCodecConfiguration(fieldNameMapping = SnakeCase)
- implicit val codec: JsonCodec[ClassWithFields] = DeriveJsonCodec.gen
-
- assertTrue(
- expectedStr.fromJson[ClassWithFields].toOption.get == expectedObj,
- expectedObj.toJson == expectedStr
- )
- },
- test("should specify discriminator") {
- val expectedStr = """{"$type":"CaseClass","i":1}"""
- val expectedObj: ST = ST.CaseClass(i = 1)
-
- implicit val config: JsonCodecConfiguration =
- JsonCodecConfiguration(sumTypeHandling = DiscriminatorField("$type"))
- implicit val codec: JsonCodec[ST] = DeriveJsonCodec.gen
-
- assertTrue(
- expectedStr.fromJson[ST].toOption.get == expectedObj,
- expectedObj.toJson == expectedStr
- )
- },
- test("should prevent extra fields") {
- val jsonStr = """{"someField":1,"someOtherField":"a","extra":123}"""
-
- implicit val config: JsonCodecConfiguration =
- JsonCodecConfiguration(allowExtraFields = false)
- implicit val codec: JsonCodec[ClassWithFields] = DeriveJsonCodec.gen
-
- assertTrue(
- jsonStr.fromJson[ClassWithFields].isLeft
- )
- }
- ),
- suite("AST")(
- test("should override field name mapping") {
- val expectedAST = Json.Obj("some_field" -> Json.Num(1), "some_other_field" -> Json.Str("a"))
- val expectedObj = ClassWithFields(1, "a")
-
- implicit val config: JsonCodecConfiguration =
- JsonCodecConfiguration(fieldNameMapping = SnakeCase)
- implicit val codec: JsonCodec[ClassWithFields] = DeriveJsonCodec.gen
-
- assertTrue(
- expectedAST.as[ClassWithFields].toOption.get == expectedObj,
- expectedObj.toJsonAST.toOption.get == expectedAST
- )
- },
- test("should specify discriminator") {
- val expectedAST = Json.Obj("$type" -> Json.Str("CaseClass"), "i" -> Json.Num(1))
- val expectedObj: ST = ST.CaseClass(i = 1)
-
- implicit val config: JsonCodecConfiguration =
- JsonCodecConfiguration(sumTypeHandling = DiscriminatorField("$type"))
- implicit val codec: JsonCodec[ST] = DeriveJsonCodec.gen
-
- assertTrue(
- expectedAST.as[ST].toOption.get == expectedObj,
- expectedObj.toJsonAST.toOption.get == expectedAST
- )
- },
- test("should prevent extra fields") {
- val jsonAST = Json.Obj("someField" -> Json.Num(1), "someOtherField" -> Json.Str("a"), "extra" -> Json.Num(1))
-
- implicit val config: JsonCodecConfiguration =
- JsonCodecConfiguration(allowExtraFields = false)
- implicit val codec: JsonCodec[ClassWithFields] = DeriveJsonCodec.gen
-
- assertTrue(
- jsonAST.as[ClassWithFields].isLeft
- )
- }
- )
- )
- )
-}
diff --git a/zio-json/shared/src/test/scala-3/zio/json/CodecVersionSpecificSpec.scala b/zio-json/shared/src/test/scala-3/zio/json/CodecVersionSpecificSpec.scala
new file mode 100644
index 000000000..ab76fe5bc
--- /dev/null
+++ b/zio-json/shared/src/test/scala-3/zio/json/CodecVersionSpecificSpec.scala
@@ -0,0 +1,87 @@
+package zio.json
+
+import zio.test.Assertion._
+import zio.test._
+
+import scala.collection.immutable
+
+object CodecVersionSpecificSpec extends ZIOSpecDefault {
+ val spec: Spec[Environment, Any] =
+ suite("CodecVersionSpecific")(
+ test("ArraySeq") {
+ val jsonStr = """["5XL","2XL","XL"]"""
+ val expected = immutable.ArraySeq("5XL", "2XL", "XL")
+ assert(jsonStr.fromJson[immutable.ArraySeq[String]])(isRight(equalTo(expected)))
+ },
+ test("Derives for a product type") {
+ assertZIO(typeCheck {
+ """
+ case class Foo(bar: String) derives JsonCodec
+
+ Foo("bar").toJson.fromJson[Foo]
+ """
+ })(isRight(anything))
+ },
+ test("Derives for a sum type") {
+ assertZIO(typeCheck {
+ """
+ enum Foo derives JsonCodec:
+ case Bar
+ case Baz(baz: String)
+ case Qux(foo: Foo)
+
+ (Foo.Qux(Foo.Bar): Foo).toJson.fromJson[Foo]
+ """
+ })(isRight(anything))
+ },
+ test("Derives and encodes for a union of string-based literals") {
+ case class Foo(aOrB: "A" | "B", optA: Option["A"]) derives JsonCodec
+
+ assertTrue(Foo("A", Some("A")).toJson.fromJson[Foo] == Right(Foo("A", Some("A"))))
+ },
+ test("Custom codec for union of standard types using an internal API") {
+ import zio.json.internal._
+
+ type Value = Null | String | Int | Boolean
+
+ final case class MyDomain(v: Value)
+
+ object MyDomain:
+ given JsonCodec[MyDomain] = new JsonCodec[MyDomain](
+ (a: MyDomain, indent: Option[Int], out: Write) =>
+ a.v match {
+ case i: Int => SafeNumbers.write(i, out)
+ case b: Boolean => out.write(if (b) "true" else "false")
+ case s: String => JsonEncoder.string.unsafeEncode(s, indent, out)
+ case null => out.write("null")
+ },
+ (trace: List[JsonError], in: RetractReader) =>
+ new MyDomain({
+ val c = in.nextNonWhitespace()
+ if (c == '"') {
+ in.retract()
+ Lexer.string(trace, in).toString
+ } else if (c == 't' && in.readChar() == 'r' && in.readChar() == 'u' && in.readChar() == 'e') {
+ true
+ } else if (
+ c == 'f' && in.readChar() == 'a' && in.readChar() == 'l' && in.readChar() == 's' && in
+ .readChar() == 'e'
+ ) {
+ false
+ } else if (c == 'n' && in.readChar() == 'u' && in.readChar() == 'l' && in.readChar() == 'l') {
+ null
+ } else {
+ in.retract()
+ Lexer.int(trace, in)
+ }
+ })
+ )
+
+ assertTrue(
+ List(MyDomain("xxx"), MyDomain(777), MyDomain(true), MyDomain(false), MyDomain(null)).toJson
+ .fromJson[List[MyDomain]] ==
+ Right(List(MyDomain("xxx"), MyDomain(777), MyDomain(true), MyDomain(false), MyDomain(null)))
+ )
+ }
+ )
+}
diff --git a/zio-json/shared/src/test/scala-3/zio/json/DecoderVersionSpecificSpec.scala b/zio-json/shared/src/test/scala-3/zio/json/DecoderVersionSpecificSpec.scala
new file mode 100644
index 000000000..367cdb97a
--- /dev/null
+++ b/zio-json/shared/src/test/scala-3/zio/json/DecoderVersionSpecificSpec.scala
@@ -0,0 +1,135 @@
+package zio.json
+
+import zio.json.ast.Json
+import zio.test.Assertion._
+import zio.test._
+
+import scala.collection.immutable
+
+object DecoderVersionSpecificSpec extends ZIOSpecDefault {
+
+ val spec: Spec[Environment, Any] =
+ suite("DecoderVersionSpecific")(
+ suite("fromJson")(
+ test("ArraySeq") {
+ val jsonStr = """["5XL","2XL","XL"]"""
+ val expected = immutable.ArraySeq("5XL", "2XL", "XL")
+
+ assert(jsonStr.fromJson[immutable.ArraySeq[String]])(isRight(equalTo(expected)))
+ },
+ test("Derives for a product type") {
+ case class Foo(bar: String) derives JsonDecoder
+
+ assertTrue("{\"bar\": \"hello\"}".fromJson[Foo] == Right(Foo("hello")))
+ },
+ test("Derives for a sum enum Enumeration type") {
+ @jsonHintNames(SnakeCase)
+ enum Foo derives JsonDecoder:
+ case Bar
+ case Baz
+ case Qux
+
+ assertTrue("\"qux\"".fromJson[Foo] == Right(Foo.Qux)) &&
+ assertTrue("\"bar\"".fromJson[Foo] == Right(Foo.Bar))
+ },
+ test("Derives for a sum enum Enumeration type with enumValuesAsStrings = false") {
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(enumValuesAsStrings = false)
+
+ enum Foo derives JsonDecoder:
+ case Bar
+ case Baz
+ case Qux
+
+ assertTrue("{\"Qux\":{}}".fromJson[Foo] == Right(Foo.Qux)) &&
+ assertTrue("{\"Bar\":{}}".fromJson[Foo] == Right(Foo.Bar))
+ },
+ test("Derives for a sum sealed trait Enumeration type") {
+ sealed trait Foo derives JsonDecoder
+ object Foo:
+ @jsonHint("Barrr")
+ case object Bar extends Foo
+ case object Baz extends Foo
+ case object Qux extends Foo
+
+ assertTrue("\"Qux\"".fromJson[Foo] == Right(Foo.Qux)) &&
+ assertTrue("\"Barrr\"".fromJson[Foo] == Right(Foo.Bar))
+ },
+ test("Derives for a sum sealed trait Enumeration type with enumValuesAsStrings = false") {
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(enumValuesAsStrings = false)
+
+ sealed trait Foo derives JsonDecoder
+ object Foo:
+ @jsonHint("Barrr")
+ case object Bar extends Foo
+ case object Baz extends Foo
+ case object Qux extends Foo
+
+ assertTrue("{\"Qux\":{}}".fromJson[Foo] == Right(Foo.Qux)) &&
+ assertTrue("{\"Barrr\":{}}".fromJson[Foo] == Right(Foo.Bar))
+ },
+ test("Derives for a sum sealed trait Enumeration type with discriminator") {
+ @jsonDiscriminator("$type")
+ sealed trait Foo derives JsonDecoder
+ object Foo:
+ @jsonHint("Barrr")
+ case object Bar extends Foo
+ case object Baz extends Foo
+ case object Qux extends Foo
+
+ assertTrue("""{"$type":"Qux"}""".fromJson[Foo] == Right(Foo.Qux)) &&
+ assertTrue("""{"$type":"Barrr"}""".fromJson[Foo] == Right(Foo.Bar))
+ },
+ test("skip JSON encoded in a string value") {
+ @jsonDiscriminator("type")
+ sealed trait Example derives JsonDecoder {
+ type Content
+ def content: Content
+ }
+ object Example {
+ @jsonHint("JSON")
+ final case class JsonInput(content: String) extends Example {
+ override type Content = String
+ }
+ }
+
+ val json =
+ """
+ |{
+ | "content": "\"{\\n \\\"name\\\": \\\"John\\\",\\\"location\\\":\\\"Sydney\\\",\\n \\\"email\\\": \\\"jdoe@test.com\\\"\\n}\"",
+ | "type": "JSON"
+ |}
+ |""".stripMargin.trim
+ assertTrue(json.fromJson[Example].isRight)
+ },
+ test("Derives for a recursive sum ADT type") {
+ enum Foo derives JsonDecoder:
+ case Bar
+ case Baz(baz: String)
+ case Qux(foo: Foo)
+
+ assertTrue("{\"Qux\":{\"foo\":{\"Bar\":{}}}}".fromJson[Foo] == Right(Foo.Qux(Foo.Bar)))
+ },
+ test("Derives and decodes for a union of string-based literals") {
+ case class Foo(aOrB: "A" | "B", optA: Option["A"]) derives JsonDecoder
+
+ assertTrue("""{"aOrB": "A", "optA": "A"}""".fromJson[Foo] == Right(Foo("A", Some("A")))) &&
+ assertTrue("""{"aOrB": "C"}""".fromJson[Foo] == Left(".aOrB(expected one of: A, B)"))
+ },
+ test("Derives and decodes for a custom map key string-based union type") {
+ case class Foo(aOrB: Map["A" | "B", Int]) derives JsonDecoder
+
+ assertTrue("""{"aOrB": {"A": 1, "B": 2}}""".fromJson[Foo] == Right(Foo(Map("A" -> 1, "B" -> 2)))) &&
+ assertTrue("""{"aOrB": {"C": 1}}""".fromJson[Foo] == Left(".aOrB.C(expected one of: A, B)"))
+ }
+ ),
+ suite("fromJsonAST")(
+ test("ArraySeq") {
+ val json = Json.Arr(Json.Str("5XL"), Json.Str("2XL"), Json.Str("XL"))
+ val expected = immutable.ArraySeq("5XL", "2XL", "XL")
+ assert(json.as[Seq[String]])(isRight(equalTo(expected)))
+ }
+ )
+ )
+}
diff --git a/zio-json/shared/src/test/scala-3/zio/json/DerivedCodecSpec.scala b/zio-json/shared/src/test/scala-3/zio/json/DerivedCodecSpec.scala
deleted file mode 100644
index 7a15eb5ff..000000000
--- a/zio-json/shared/src/test/scala-3/zio/json/DerivedCodecSpec.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-package testzio.json
-
-import zio._
-import zio.json._
-import zio.test.Assertion._
-import zio.test._
-
-object DerivedCodecSpec extends ZIOSpecDefault {
- val spec = suite("DerivedCodecSpec")(
- test("Derives for a product type") {
- assertZIO(typeCheck {
- """
- case class Foo(bar: String) derives JsonCodec
-
- Foo("bar").toJson.fromJson[Foo]
- """
- })(isRight(anything))
- },
- test("Derives for a sum type") {
- assertZIO(typeCheck {
- """
- enum Foo derives JsonCodec:
- case Bar
- case Baz(baz: String)
- case Qux(foo: Foo)
-
- (Foo.Qux(Foo.Bar): Foo).toJson.fromJson[Foo]
- """
- })(isRight(anything))
- }
- )
-}
diff --git a/zio-json/shared/src/test/scala-3/zio/json/DerivedDecoderSpec.scala b/zio-json/shared/src/test/scala-3/zio/json/DerivedDecoderSpec.scala
deleted file mode 100644
index a2d702e34..000000000
--- a/zio-json/shared/src/test/scala-3/zio/json/DerivedDecoderSpec.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-package testzio.json
-
-import zio._
-import zio.json._
-import zio.test.Assertion._
-import zio.test._
-
-object DerivedDecoderSpec extends ZIOSpecDefault {
-
- val spec = suite("DerivedDecoderSpec")(
- test("Derives for a product type") {
- assertZIO(typeCheck {
- """
- case class Foo(bar: String) derives JsonDecoder
-
- "{\"bar\": \"hello\"}".fromJson[Foo]
- """
- })(isRight(anything))
- },
- test("Derives for a sum type") {
- assertZIO(typeCheck {
- """
- enum Foo derives JsonDecoder:
- case Bar
- case Baz(baz: String)
- case Qux(foo: Foo)
-
- "{\"Qux\":{\"foo\":{\"Bar\":{}}}}".fromJson[Foo]
- """
- })(isRight(anything))
- }
- )
-}
diff --git a/zio-json/shared/src/test/scala-3/zio/json/DerivedEncoderSpec.scala b/zio-json/shared/src/test/scala-3/zio/json/DerivedEncoderSpec.scala
deleted file mode 100644
index 20fd42888..000000000
--- a/zio-json/shared/src/test/scala-3/zio/json/DerivedEncoderSpec.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-package testzio.json
-
-import zio._
-import zio.json._
-import zio.test.Assertion._
-import zio.test._
-
-object DerivedEncoderSpec extends ZIOSpecDefault {
- val spec = suite("DerivedEncoderSpec")(
- test("Derives for a product type") {
- assertZIO(typeCheck {
- """
- case class Foo(bar: String) derives JsonEncoder
-
- Foo("bar").toJson
- """
- })(isRight(anything))
- },
- test("Derives for a sum type") {
- assertZIO(typeCheck {
- """
- enum Foo derives JsonEncoder:
- case Bar
- case Baz(baz: String)
- case Qux(foo: Foo)
-
- (Foo.Qux(Foo.Bar): Foo).toJson
- """
- })(isRight(anything))
- }
- )
-}
diff --git a/zio-json/shared/src/test/scala-3/zio/json/EncoderVesionSpecificSpec.scala b/zio-json/shared/src/test/scala-3/zio/json/EncoderVesionSpecificSpec.scala
new file mode 100644
index 000000000..ab59ea3e0
--- /dev/null
+++ b/zio-json/shared/src/test/scala-3/zio/json/EncoderVesionSpecificSpec.scala
@@ -0,0 +1,103 @@
+package zio.json
+
+import zio.json.ast.Json
+import zio.test.Assertion._
+import zio.test._
+
+import scala.collection.immutable
+
+object EncoderVesionSpecificSpec extends ZIOSpecDefault {
+
+ val spec: Spec[Environment, Any] =
+ suite("EncoderVesionSpecific")(
+ suite("toJson")(
+ test("collections") {
+ assert(immutable.ArraySeq[Int]().toJson)(equalTo("[]")) &&
+ assert(immutable.ArraySeq(1, 2, 3).toJson)(equalTo("[1,2,3]")) &&
+ assert(immutable.ArraySeq[String]().toJsonPretty)(equalTo("[]")) &&
+ assert(immutable.ArraySeq("foo", "bar").toJsonPretty)(equalTo("[\n \"foo\",\n \"bar\"\n]"))
+ },
+ test("IArray") {
+ assert(IArray.empty[Int].toJson)(equalTo("[]")) &&
+ assert(IArray(1, 2, 3).toJson)(equalTo("[1,2,3]")) &&
+ assert(IArray.empty[String].toJsonPretty)(equalTo("[]")) &&
+ assert(IArray("foo", "bar").toJsonPretty)(equalTo("[\n \"foo\",\n \"bar\"\n]"))
+ },
+ test("Derives for a product type") {
+ case class Foo(bar: String) derives JsonEncoder
+
+ val json = Foo("bar").toJson
+ assertTrue(json == """{"bar":"bar"}""")
+ },
+ test("Derives for a sum enum Enumeration type") {
+ enum Foo derives JsonEncoder:
+ case Bar
+ case Baz
+ case Qux
+
+ val json = (Foo.Qux: Foo).toJson
+ assertTrue(json == """"Qux"""")
+ },
+ test("Derives for a sum enum Enumeration type with enumValuesAsStrings = false") {
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(enumValuesAsStrings = false)
+
+ enum Foo derives JsonEncoder:
+ case Bar
+ case Baz
+ case Qux
+
+ val json = (Foo.Qux: Foo).toJson
+ assertTrue(json == """{"Qux":{}}""")
+ },
+ test("Derives for a sum enum Enumeration type with discriminator") {
+ @jsonDiscriminator("$type")
+ enum Foo derives JsonEncoder:
+ case Bar
+ case Baz
+ case Qux
+
+ val json = (Foo.Qux: Foo).toJson
+ assertTrue(json == """{"$type":"Qux"}""")
+ },
+ test("Derives for a sum sealed trait Enumeration type") {
+ sealed trait Foo derives JsonEncoder
+ object Foo:
+ case object Bar extends Foo
+ case object Baz extends Foo
+ case object Qux extends Foo
+
+ val json = (Foo.Qux: Foo).toJson
+ assertTrue(json == """"Qux"""")
+ },
+ test("Derives for a sum ADT type") {
+ enum Foo derives JsonEncoder:
+ case Bar
+ case Baz(baz: String)
+ case Qux(foo: Foo)
+
+ val json = (Foo.Qux(Foo.Bar): Foo).toJson
+ assertTrue(json == """{"Qux":{"foo":{"Bar":{}}}}""")
+ },
+ test("Derives and encodes for a union of string-based literals") {
+ case class Foo(aOrB: "A" | "B", optA: Option["A"]) derives JsonEncoder
+
+ assertTrue(Foo("A", Some("A")).toJson == """{"aOrB":"A","optA":"A"}""")
+ },
+ test("Derives and encodes for a custom map key string-based union type") {
+ case class Foo(aOrB: Map["A" | "B", Int]) derives JsonEncoder
+
+ assertTrue(Foo(Map("A" -> 1, "B" -> 2)).toJson == """{"aOrB":{"A":1,"B":2}}""")
+ }
+ ),
+ suite("toJsonAST")(
+ test("collections") {
+ val arrEmpty = Json.Arr()
+ val arr123 = Json.Arr(Json.Num(1), Json.Num(2), Json.Num(3))
+
+ assert(immutable.ArraySeq[Int]().toJsonAST)(isRight(equalTo(arrEmpty))) &&
+ assert(immutable.ArraySeq(1, 2, 3).toJsonAST)(isRight(equalTo(arr123)))
+ }
+ )
+ )
+}
diff --git a/zio-json/shared/src/test/scala/zio/json/AnnotationsCodecSpec.scala b/zio-json/shared/src/test/scala/zio/json/AnnotationsCodecSpec.scala
new file mode 100644
index 000000000..373ecd739
--- /dev/null
+++ b/zio-json/shared/src/test/scala/zio/json/AnnotationsCodecSpec.scala
@@ -0,0 +1,561 @@
+package zio.json
+
+import zio.json.ast.Json
+import zio.test._
+import zio.Chunk
+
+import scala.collection.immutable
+import scala.collection.mutable
+
+object AnnotationsCodecSpec extends ZIOSpecDefault {
+
+ def spec = suite("AnnotationsCodecSpec")(
+ suite("annotations overrides")(
+ test("should override field name mapping") {
+ @jsonMemberNames(SnakeCase)
+ case class ClassWithFields(someField: Int, someOtherField: String)
+
+ val expectedStr = """{"some_field":1,"some_other_field":"a"}"""
+ val expectedObj = ClassWithFields(1, "a")
+
+ implicit val codec: JsonCodec[ClassWithFields] = DeriveJsonCodec.gen
+
+ assertTrue(
+ expectedStr.fromJson[ClassWithFields].toOption.get == expectedObj,
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("should specify discriminator") {
+ @jsonDiscriminator("$type")
+ sealed trait ST
+
+ object ST {
+ case object CaseObj extends ST
+ case class CaseClass(i: Int) extends ST
+
+ implicit lazy val codec: JsonCodec[ST] = DeriveJsonCodec.gen
+ }
+
+ val expectedStr = """{"$type":"CaseClass","i":1}"""
+ val expectedObj: ST = ST.CaseClass(i = 1)
+
+ assertTrue(
+ expectedStr.fromJson[ST].toOption.get == expectedObj,
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("should override sum type mapping") {
+ @jsonHintNames(SnakeCase)
+ @jsonDiscriminator("$type")
+ sealed trait ST
+
+ object ST {
+ case object CaseObj extends ST
+ case class CaseClass(i: Int) extends ST
+
+ implicit lazy val codec: JsonCodec[ST] = DeriveJsonCodec.gen
+ }
+
+ val expectedStr = """{"$type":"case_class","i":1}"""
+ val expectedObj: ST = ST.CaseClass(i = 1)
+
+ assertTrue(
+ expectedStr.fromJson[ST].toOption.get == expectedObj,
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("should prevent extra fields") {
+ @jsonNoExtraFields
+ case class ClassWithFields(someField: Int, someOtherField: String)
+
+ val jsonStr = """{"someField":1,"someOtherField":"a","extra":123}"""
+
+ implicit val codec: JsonCodec[ClassWithFields] = DeriveJsonCodec.gen
+
+ assertTrue(
+ jsonStr.fromJson[ClassWithFields].isLeft
+ )
+ },
+ test("use explicit null values") {
+ @jsonExplicitNull
+ case class OptionalField(a: Option[Int])
+
+ val expectedStr = """{"a":null}"""
+ val expectedObj = OptionalField(None)
+
+ implicit val codec: JsonCodec[OptionalField] = DeriveJsonCodec.gen
+
+ assertTrue(
+ expectedStr.fromJson[OptionalField].toOption.get == expectedObj,
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("do not write empty collections") {
+ @jsonExplicitEmptyCollections(false)
+ case class EmptySeq(a: Seq[Int])
+
+ val expectedStr = """{}"""
+
+ implicit val codec: JsonCodec[EmptySeq] = DeriveJsonCodec.gen
+
+ assertTrue(EmptySeq(Seq.empty).toJson == expectedStr)
+ }
+ ),
+ suite("annotations overrides AST")(
+ test("should override field name mapping") {
+ @jsonMemberNames(SnakeCase)
+ case class ClassWithFields(someField: Int, someOtherField: String)
+
+ val expectedAST = Json.Obj("some_field" -> Json.Num(1), "some_other_field" -> Json.Str("a"))
+ val expectedObj = ClassWithFields(1, "a")
+
+ implicit val codec: JsonCodec[ClassWithFields] = DeriveJsonCodec.gen
+
+ assertTrue(
+ expectedAST.as[ClassWithFields].toOption.get == expectedObj,
+ expectedObj.toJsonAST.toOption.get == expectedAST
+ )
+ },
+ test("should specify discriminator") {
+ @jsonDiscriminator("$type")
+ sealed trait ST
+
+ object ST {
+ case object CaseObj extends ST
+ case class CaseClass(i: Int) extends ST
+
+ implicit lazy val codec: JsonCodec[ST] = DeriveJsonCodec.gen
+ }
+
+ val expectedAST = Json.Obj("$type" -> Json.Str("CaseClass"), "i" -> Json.Num(1))
+ val expectedObj: ST = ST.CaseClass(i = 1)
+
+ assertTrue(
+ expectedAST.as[ST].toOption.get == expectedObj,
+ expectedObj.toJsonAST.toOption.get == expectedAST
+ )
+ },
+ test("should prevent extra fields") {
+ @jsonNoExtraFields
+ case class ClassWithFields(someField: Int, someOtherField: String)
+
+ val jsonAST = Json.Obj("someField" -> Json.Num(1), "someOtherField" -> Json.Str("a"), "extra" -> Json.Num(1))
+
+ implicit val codec: JsonCodec[ClassWithFields] = DeriveJsonCodec.gen
+
+ assertTrue(
+ jsonAST.as[ClassWithFields].isLeft
+ )
+ },
+ test("use explicit null values") {
+ @jsonExplicitNull
+ case class OptionalField(a: Option[Int])
+
+ val jsonAST = Json.Obj("a" -> Json.Null)
+ val expectedObj = OptionalField(None)
+
+ implicit val codec: JsonCodec[OptionalField] = DeriveJsonCodec.gen
+
+ assertTrue(jsonAST.as[OptionalField].toOption.get == expectedObj, expectedObj.toJsonAST == Right(jsonAST))
+ },
+ test("do not write empty collections") {
+ @jsonExplicitEmptyCollections(false)
+ case class EmptySeq(a: Seq[Int])
+
+ val jsonAST = Json.Obj()
+
+ implicit val codec: JsonCodec[EmptySeq] = DeriveJsonCodec.gen
+
+ assertTrue(EmptySeq(Seq.empty).toJsonAST == Right(jsonAST))
+ }
+ ),
+ suite("explicit empty collections")(
+ suite("should fill in missing empty collections and write empty collections")(
+ test("for an array") {
+ @jsonExplicitEmptyCollections(true, decoding = false)
+ case class EmptyArray(a: Array[Int])
+ val expectedStr = """{"a":[]}"""
+ val expectedObj = EmptyArray(Array.empty)
+
+ implicit val codec: JsonCodec[EmptyArray] = DeriveJsonCodec.gen
+
+ assertTrue("""{}""".fromJson[EmptyArray].toOption.exists(_.a.isEmpty), expectedObj.toJson == expectedStr)
+ },
+ test("for a seq") {
+ @jsonExplicitEmptyCollections(true, decoding = false)
+ case class EmptySeq(a: Seq[Int])
+ val expectedStr = """{"a":[]}"""
+ val expectedObj = EmptySeq(Seq.empty)
+
+ implicit val codec: JsonCodec[EmptySeq] = DeriveJsonCodec.gen
+
+ assertTrue("""{}""".fromJson[EmptySeq].toOption.contains(expectedObj), expectedObj.toJson == expectedStr)
+ },
+ test("for a chunk") {
+ @jsonExplicitEmptyCollections(true, decoding = false)
+ case class EmptyChunk(a: Chunk[Int])
+ val expectedStr = """{"a":[]}"""
+ val expectedObj = EmptyChunk(Chunk.empty)
+
+ implicit val codec: JsonCodec[EmptyChunk] = DeriveJsonCodec.gen
+
+ assertTrue("""{}""".fromJson[EmptyChunk].toOption.contains(expectedObj), expectedObj.toJson == expectedStr)
+ },
+ test("for an indexed seq") {
+ @jsonExplicitEmptyCollections(true, decoding = false)
+ case class EmptyIndexedSeq(a: IndexedSeq[Int])
+ val expectedStr = """{"a":[]}"""
+ val expectedObj = EmptyIndexedSeq(IndexedSeq.empty)
+
+ implicit val codec: JsonCodec[EmptyIndexedSeq] = DeriveJsonCodec.gen
+
+ assertTrue(
+ """{}""".fromJson[EmptyIndexedSeq].toOption.contains(expectedObj),
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("for a linear seq") {
+ @jsonExplicitEmptyCollections(true, decoding = false)
+ case class EmptyLinearSeq(a: immutable.LinearSeq[Int])
+ val expectedStr = """{"a":[]}"""
+ val expectedObj = EmptyLinearSeq(immutable.LinearSeq.empty)
+
+ implicit val codec: JsonCodec[EmptyLinearSeq] = DeriveJsonCodec.gen
+
+ assertTrue(
+ """{}""".fromJson[EmptyLinearSeq].toOption.contains(expectedObj),
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("for a list set") {
+ @jsonExplicitEmptyCollections(true, decoding = false)
+ case class EmptyListSet(a: immutable.ListSet[Int])
+ val expectedStr = """{"a":[]}"""
+ val expectedObj = EmptyListSet(immutable.ListSet.empty)
+
+ implicit val codec: JsonCodec[EmptyListSet] = DeriveJsonCodec.gen
+
+ assertTrue(
+ """{}""".fromJson[EmptyListSet].toOption.contains(expectedObj),
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("for a tree set") {
+ @jsonExplicitEmptyCollections(true, decoding = false)
+ case class EmptyTreeSet(a: immutable.TreeSet[Int])
+ val expectedStr = """{"a":[]}"""
+ val expectedObj = EmptyTreeSet(immutable.TreeSet.empty)
+
+ implicit val codec: JsonCodec[EmptyTreeSet] = DeriveJsonCodec.gen
+
+ assertTrue(
+ """{}""".fromJson[EmptyTreeSet].toOption.contains(expectedObj),
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("for a list") {
+ @jsonExplicitEmptyCollections(true, decoding = false)
+ case class EmptyList(a: List[Int])
+ val expectedStr = """{"a":[]}"""
+ val expectedObj = EmptyList(List.empty)
+
+ implicit val codec: JsonCodec[EmptyList] = DeriveJsonCodec.gen
+
+ assertTrue(
+ """{}""".fromJson[EmptyList].toOption.contains(expectedObj),
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("for a vector") {
+ @jsonExplicitEmptyCollections(true, decoding = false)
+ case class EmptyVector(a: Vector[Int])
+ val expectedStr = """{"a":[]}"""
+ val expectedObj = EmptyVector(Vector.empty)
+
+ implicit val codec: JsonCodec[EmptyVector] = DeriveJsonCodec.gen
+
+ assertTrue(
+ """{}""".fromJson[EmptyVector].toOption.contains(expectedObj),
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("for a set") {
+ @jsonExplicitEmptyCollections(true, decoding = false)
+ case class EmptySet(a: Set[Int])
+ val expectedStr = """{"a":[]}"""
+ val expectedObj = EmptySet(Set.empty)
+
+ implicit val codec: JsonCodec[EmptySet] = DeriveJsonCodec.gen
+
+ assertTrue(
+ """{}""".fromJson[EmptySet].toOption.contains(expectedObj),
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("for a hash set") {
+ @jsonExplicitEmptyCollections(true, decoding = false)
+ case class EmptyHashSet(a: immutable.HashSet[Int])
+ val expectedStr = """{"a":[]}"""
+ val expectedObj = EmptyHashSet(immutable.HashSet.empty)
+
+ implicit val codec: JsonCodec[EmptyHashSet] = DeriveJsonCodec.gen
+
+ assertTrue(
+ """{}""".fromJson[EmptyHashSet].toOption.contains(expectedObj),
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("for a sorted set") {
+ @jsonExplicitEmptyCollections(true, decoding = false)
+ case class EmptySortedSet(a: immutable.SortedSet[Int])
+ val expectedStr = """{"a":[]}"""
+ val expectedObj = EmptySortedSet(immutable.SortedSet.empty)
+
+ implicit val codec: JsonCodec[EmptySortedSet] = DeriveJsonCodec.gen
+
+ assertTrue(
+ """{}""".fromJson[EmptySortedSet].toOption.contains(expectedObj),
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("for a map") {
+ @jsonExplicitEmptyCollections(true, decoding = false)
+ case class EmptyMap(a: Map[String, String])
+ val expectedStr = """{"a":{}}"""
+ val expectedObj = EmptyMap(Map.empty)
+
+ implicit val codec: JsonCodec[EmptyMap] = DeriveJsonCodec.gen
+
+ assertTrue(
+ """{}""".fromJson[EmptyMap].toOption.contains(expectedObj),
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("for a hash map") {
+ @jsonExplicitEmptyCollections(true, decoding = false)
+ case class EmptyHashMap(a: immutable.HashMap[String, String])
+ val expectedStr = """{"a":{}}"""
+ val expectedObj = EmptyHashMap(immutable.HashMap.empty)
+
+ implicit val codec: JsonCodec[EmptyHashMap] = DeriveJsonCodec.gen
+
+ assertTrue(
+ """{}""".fromJson[EmptyHashMap].toOption.contains(expectedObj),
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("for a mutable map") {
+ @jsonExplicitEmptyCollections(true, decoding = false)
+ case class EmptyMutableMap(a: mutable.Map[String, String])
+ val expectedStr = """{"a":{}}"""
+ val expectedObj = EmptyMutableMap(mutable.Map.empty)
+
+ implicit val codec: JsonCodec[EmptyMutableMap] = DeriveJsonCodec.gen
+
+ assertTrue(
+ """{}""".fromJson[EmptyMutableMap].toOption.contains(expectedObj),
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("for a sorted map") {
+ @jsonExplicitEmptyCollections(true, decoding = false)
+ case class EmptySortedMap(a: collection.SortedMap[String, String])
+ val expectedStr = """{"a":{}}"""
+ val expectedObj = EmptySortedMap(collection.SortedMap.empty)
+
+ implicit val codec: JsonCodec[EmptySortedMap] = DeriveJsonCodec.gen
+
+ assertTrue(
+ """{}""".fromJson[EmptySortedMap].toOption.contains(expectedObj),
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("for a list map") {
+ @jsonExplicitEmptyCollections(true, decoding = false)
+ case class EmptyListMap(a: immutable.ListMap[String, String])
+ val expectedStr = """{"a":{}}"""
+ val expectedObj = EmptyListMap(immutable.ListMap.empty)
+
+ implicit val codec: JsonCodec[EmptyListMap] = DeriveJsonCodec.gen
+
+ assertTrue(
+ """{}""".fromJson[EmptyListMap].toOption.contains(expectedObj),
+ expectedObj.toJson == expectedStr
+ )
+ }
+ ),
+ suite("should not write empty collections and fail missing empty collections")(
+ test("for an array") {
+ @jsonExplicitEmptyCollections(false)
+ case class EmptyArray(a: Array[Int])
+ val expectedStr = """{}"""
+ val expectedObj = EmptyArray(Array.empty)
+
+ implicit val codec: JsonCodec[EmptyArray] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyArray].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a seq") {
+ @jsonExplicitEmptyCollections(false)
+ case class EmptySeq(a: Seq[Int])
+ val expectedStr = """{}"""
+ val expectedObj = EmptySeq(Seq.empty)
+
+ implicit val codec: JsonCodec[EmptySeq] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptySeq].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a chunk") {
+ @jsonExplicitEmptyCollections(false)
+ case class EmptyChunk(a: Chunk[Int])
+ val expectedStr = """{}"""
+ val expectedObj = EmptyChunk(Chunk.empty)
+
+ implicit val codec: JsonCodec[EmptyChunk] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyChunk].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for an indexed seq") {
+ @jsonExplicitEmptyCollections(false)
+ case class EmptyIndexedSeq(a: IndexedSeq[Int])
+ val expectedStr = """{}"""
+ val expectedObj = EmptyIndexedSeq(IndexedSeq.empty)
+
+ implicit val codec: JsonCodec[EmptyIndexedSeq] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyIndexedSeq].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a linear seq") {
+ @jsonExplicitEmptyCollections(false)
+ case class EmptyLinearSeq(a: immutable.LinearSeq[Int])
+ val expectedStr = """{}"""
+ val expectedObj = EmptyLinearSeq(immutable.LinearSeq.empty)
+
+ implicit val codec: JsonCodec[EmptyLinearSeq] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyLinearSeq].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a list set") {
+ @jsonExplicitEmptyCollections(false)
+ case class EmptyListSet(a: immutable.ListSet[Int])
+ val expectedStr = """{}"""
+ val expectedObj = EmptyListSet(immutable.ListSet.empty)
+
+ implicit val codec: JsonCodec[EmptyListSet] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyListSet].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a treeSet") {
+ @jsonExplicitEmptyCollections(false)
+ case class EmptyTreeSet(a: immutable.TreeSet[Int])
+ val expectedStr = """{}"""
+ val expectedObj = EmptyTreeSet(immutable.TreeSet.empty)
+
+ implicit val codec: JsonCodec[EmptyTreeSet] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyTreeSet].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a list") {
+ @jsonExplicitEmptyCollections(false)
+ case class EmptyList(a: List[Int])
+ val expectedStr = """{}"""
+ val expectedObj = EmptyList(List.empty)
+
+ implicit val codec: JsonCodec[EmptyList] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyList].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a vector") {
+ @jsonExplicitEmptyCollections(false)
+ case class EmptyVector(a: Vector[Int])
+ val expectedStr = """{}"""
+ val expectedObj = EmptyVector(Vector.empty)
+
+ implicit val codec: JsonCodec[EmptyVector] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyVector].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a set") {
+ @jsonExplicitEmptyCollections(false)
+ case class EmptySet(a: Set[Int])
+ val expectedStr = """{}"""
+ val expectedObj = EmptySet(Set.empty)
+
+ implicit val codec: JsonCodec[EmptySet] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptySet].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a hash set") {
+ @jsonExplicitEmptyCollections(false)
+ case class EmptyHashSet(a: immutable.HashSet[Int])
+ val expectedStr = """{}"""
+ val expectedObj = EmptyHashSet(immutable.HashSet.empty)
+
+ implicit val codec: JsonCodec[EmptyHashSet] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyHashSet].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a sorted set") {
+ @jsonExplicitEmptyCollections(false)
+ case class EmptySortedSet(a: immutable.SortedSet[Int])
+ val expectedStr = """{}"""
+ val expectedObj = EmptySortedSet(immutable.SortedSet.empty)
+
+ implicit val codec: JsonCodec[EmptySortedSet] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptySortedSet].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a map") {
+ @jsonExplicitEmptyCollections(false)
+ case class EmptyMap(a: Map[String, String])
+ val expectedStr = """{}"""
+ val expectedObj = EmptyMap(Map.empty)
+
+ implicit val codec: JsonCodec[EmptyMap] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyMap].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a hashMap") {
+ @jsonExplicitEmptyCollections(false)
+ case class EmptyHashMap(a: immutable.HashMap[String, String])
+ val expectedStr = """{}"""
+ val expectedObj = EmptyHashMap(immutable.HashMap.empty)
+
+ implicit val codec: JsonCodec[EmptyHashMap] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyHashMap].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a mutable map") {
+ @jsonExplicitEmptyCollections(false)
+ case class EmptyMutableMap(a: mutable.Map[String, String])
+ val expectedStr = """{}"""
+ val expectedObj = EmptyMutableMap(mutable.Map.empty)
+
+ implicit val codec: JsonCodec[EmptyMutableMap] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyMutableMap].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a sorted map") {
+ @jsonExplicitEmptyCollections(false)
+ case class EmptySortedMap(a: collection.SortedMap[String, String])
+ val expectedStr = """{}"""
+ val expectedObj = EmptySortedMap(collection.SortedMap.empty)
+
+ implicit val codec: JsonCodec[EmptySortedMap] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptySortedMap].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a list map") {
+ @jsonExplicitEmptyCollections(false)
+ case class EmptyListMap(a: immutable.ListMap[String, String])
+ val expectedStr = """{}"""
+ val expectedObj = EmptyListMap(immutable.ListMap.empty)
+
+ implicit val codec: JsonCodec[EmptyListMap] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyListMap].isLeft, expectedObj.toJson == expectedStr)
+ }
+ )
+ )
+ )
+}
diff --git a/zio-json/jvm/src/test/scala/zio/json/CarterSpec.scala b/zio-json/shared/src/test/scala/zio/json/CarterSpec.scala
similarity index 98%
rename from zio-json/jvm/src/test/scala/zio/json/CarterSpec.scala
rename to zio-json/shared/src/test/scala/zio/json/CarterSpec.scala
index d19c899e1..99283326b 100644
--- a/zio-json/jvm/src/test/scala/zio/json/CarterSpec.scala
+++ b/zio-json/shared/src/test/scala/zio/json/CarterSpec.scala
@@ -1,6 +1,5 @@
-package testzio.json
+package zio.json
-import zio.json._
import zio.test.Assertion._
import zio.test._
diff --git a/zio-json/shared/src/test/scala/zio/json/CodecSpec.scala b/zio-json/shared/src/test/scala/zio/json/CodecSpec.scala
index c05d08986..a785de23e 100644
--- a/zio-json/shared/src/test/scala/zio/json/CodecSpec.scala
+++ b/zio-json/shared/src/test/scala/zio/json/CodecSpec.scala
@@ -1,7 +1,6 @@
-package testzio.json
+package zio.json
import zio._
-import zio.json._
import zio.json.ast.Json
import zio.test.Assertion._
import zio.test.TestAspect.jvmOnly
@@ -36,11 +35,13 @@ object CodecSpec extends ZIOSpecDefault {
)
},
test("primitives") {
- val exampleBDString = "234234.234"
- // this big integer consumes more than 128 bits
- assert("170141183460469231731687303715884105728".fromJson[java.math.BigInteger])(
- isLeft(equalTo("(expected a 128 bit BigInteger)"))
- ) && assert(exampleBDString.fromJson[BigDecimal])(isRight(equalTo(BigDecimal(exampleBDString))))
+ // this big integer consumes more than 256 bits
+ assert(
+ "170141183460469231731687303715884105728489465165484668486513574864654818964653168465316546851"
+ .fromJson[java.math.BigInteger]
+ )(
+ isLeft(equalTo("(expected a 256-bit BigInteger)"))
+ )
},
test("java.util.Currency") {
val exampleValue = "\"USD\""
@@ -78,6 +79,13 @@ object CodecSpec extends ZIOSpecDefault {
assert("""{"Child2":{}}""".fromJson[Parent])(isRight(equalTo(Child2()))) &&
assert("""{"type":"Child1"}""".fromJson[Parent])(isLeft(equalTo("(invalid disambiguator)")))
},
+ test("sum encoding with hint names") {
+ import examplesumhintnames._
+
+ assert("""{"child1":{}}""".fromJson[Parent])(isRight(equalTo(Child1()))) &&
+ assert("""{"child2":{}}""".fromJson[Parent])(isRight(equalTo(Child2()))) &&
+ assert("""{"type":"child1"}""".fromJson[Parent])(isLeft(equalTo("(invalid disambiguator)")))
+ },
test("sum alternative encoding") {
import examplealtsum._
@@ -86,16 +94,23 @@ object CodecSpec extends ZIOSpecDefault {
assert("""{"hint":"Samson"}""".fromJson[Parent])(isLeft(equalTo("(invalid disambiguator)"))) &&
assert("""{"Cain":{}}""".fromJson[Parent])(isLeft(equalTo("(missing hint 'hint')")))
},
+ test("sum alternative encoding with hint names") {
+ import examplealtsumhintnames._
+
+ assert("""{"hint":"child1"}""".fromJson[Parent])(isRight(equalTo(Child1()))) &&
+ assert("""{"hint":"Abel"}""".fromJson[Parent])(isRight(equalTo(Child2()))) &&
+ assert("""{"hint":"Child1"}""".fromJson[Parent])(isLeft(equalTo("(invalid disambiguator)"))) &&
+ assert("""{"child1":{}}""".fromJson[Parent])(isLeft(equalTo("(missing hint 'hint')")))
+ },
test("key transformation") {
import exampletransformkeys._
- val kebabed = """{"shish123-kebab":""}"""
- val snaked = """{"indiana123_jones":""}"""
+ val kebabed = """{"shi-sh123-kebab":""}"""
+ val snaked = """{"indi_ana123_jones":""}"""
val pascaled = """{"Anders123Hejlsberg":""}"""
val cameled = """{"small123Talk":""}"""
- val indianaJones = """{"wHATcASEiStHIS":""}"""
val overrides = """{"not_modified":"","but-this-should-be":0}"""
- val kebabedLegacy = """{"shish-123-kebab":""}"""
- val snakedLegacy = """{"indiana_123_jones":""}"""
+ val kebabedLegacy = """{"shi-sh-123-kebab":""}"""
+ val snakedLegacy = """{"indi_ana_123_jones":""}"""
assert(kebabed.fromJson[Kebabed])(isRight(equalTo(Kebabed("")))) &&
assert(kebabedLegacy.fromJson[legacy.Kebabed])(isRight(equalTo(legacy.Kebabed("")))) &&
@@ -168,6 +183,12 @@ object CodecSpec extends ZIOSpecDefault {
assert(jsonStr.fromJson[Map[String, Int]])(isRight(equalTo(expected)))
},
+ test("ListMap") {
+ val jsonStr = """{"5XL":3,"2XL":14,"XL":159}"""
+ val expected = collection.immutable.ListMap("5XL" -> 3, "2XL" -> 14, "XL" -> 159)
+
+ assert(jsonStr.fromJson[collection.immutable.ListMap[String, Int]])(isRight(equalTo(expected)))
+ },
test("zio.Chunk") {
val jsonStr = """["5XL","2XL","XL"]"""
val expected = Chunk("5XL", "2XL", "XL")
@@ -225,6 +246,18 @@ object CodecSpec extends ZIOSpecDefault {
object examplesum {
sealed abstract class Parent
+ object Parent {
+ implicit val codec: JsonCodec[Parent] = DeriveJsonCodec.gen[Parent]
+ }
+ @jsonNoExtraFields
+ case class Child1() extends Parent
+ case class Child2() extends Parent
+ }
+
+ object examplesumhintnames {
+ @jsonHintNames(SnakeCase)
+ sealed abstract class Parent
+
object Parent {
implicit val codec: JsonCodec[Parent] = DeriveJsonCodec.gen[Parent]
}
@@ -243,6 +276,7 @@ object CodecSpec extends ZIOSpecDefault {
object examplealtsum {
@jsonDiscriminator("hint")
+ @jsonHintNames(SnakeCase)
sealed abstract class Parent
object Parent {
@@ -256,15 +290,31 @@ object CodecSpec extends ZIOSpecDefault {
case class Child2() extends Parent
}
+ object examplealtsumhintnames {
+
+ @jsonDiscriminator("hint")
+ @jsonHintNames(SnakeCase)
+ sealed abstract class Parent
+
+ object Parent {
+ implicit val codec: JsonCodec[Parent] = DeriveJsonCodec.gen[Parent]
+ }
+
+ case class Child1() extends Parent
+
+ @jsonHint("Abel")
+ case class Child2() extends Parent
+ }
+
object exampletransformkeys {
@jsonMemberNames(KebabCase)
- case class Kebabed(shish123Kebab: String)
+ case class Kebabed(`shi_sh123Kebab`: String)
object Kebabed {
implicit val codec: JsonCodec[Kebabed] = DeriveJsonCodec.gen[Kebabed]
}
@jsonMemberNames(SnakeCase)
- case class Snaked(indiana123Jones: String)
+ case class Snaked(`indi-ana123Jones`: String)
object Snaked {
implicit val codec: JsonCodec[Snaked] = DeriveJsonCodec.gen[Snaked]
}
@@ -301,14 +351,14 @@ object CodecSpec extends ZIOSpecDefault {
object legacy {
@jsonMemberNames(ziojson_03.KebabCase)
- case class Kebabed(shish123Kebab: String)
+ case class Kebabed(shi_sh123Kebab: String)
object Kebabed {
implicit val codec: JsonCodec[Kebabed] = DeriveJsonCodec.gen[Kebabed]
}
@jsonMemberNames(ziojson_03.SnakeCase)
- case class Snaked(indiana123Jones: String)
+ case class Snaked(`indi-ana123Jones`: String)
object Snaked {
implicit val codec: JsonCodec[Snaked] = DeriveJsonCodec.gen[Snaked]
diff --git a/zio-json/shared/src/test/scala/zio/json/ConfigurableDeriveCodecSpec.scala b/zio-json/shared/src/test/scala/zio/json/ConfigurableDeriveCodecSpec.scala
new file mode 100644
index 000000000..1ed90a175
--- /dev/null
+++ b/zio-json/shared/src/test/scala/zio/json/ConfigurableDeriveCodecSpec.scala
@@ -0,0 +1,853 @@
+package zio.json
+
+import zio.json.JsonCodecConfiguration.SumTypeHandling.DiscriminatorField
+import zio.json.ast.Json
+import zio.test._
+import zio.Chunk
+
+import scala.collection.immutable
+import scala.collection.mutable
+
+object ConfigurableDeriveCodecSpec extends ZIOSpecDefault {
+ case class ClassWithFields(someField: Int, someOtherField: String)
+
+ sealed trait ST
+
+ object ST {
+ case object CaseObj extends ST
+ case class CaseClass(i: Int) extends ST
+ }
+
+ case class OptionalField(a: Option[Int])
+
+ def spec = suite("ConfigurableDeriveCodecSpec")(
+ suite("defaults")(
+ test("should not map field names by default") {
+ val expectedStr = """{"someField":1,"someOtherField":"a"}"""
+ val expectedObj = ClassWithFields(1, "a")
+
+ implicit val codec: JsonCodec[ClassWithFields] = DeriveJsonCodec.gen
+
+ assertTrue(
+ expectedStr.fromJson[ClassWithFields].toOption.get == expectedObj,
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("should not use discriminator by default") {
+ val expectedStr = """{"CaseObj":{}}"""
+ val expectedObj: ST = ST.CaseObj
+
+ implicit val codec: JsonCodec[ST] = DeriveJsonCodec.gen
+
+ assertTrue(
+ expectedStr.fromJson[ST].toOption.get == expectedObj,
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("should allow extra fields by default") {
+ val jsonStr = """{"someField":1,"someOtherField":"a","extra":123}"""
+ val expectedObj = ClassWithFields(1, "a")
+
+ implicit val codec: JsonCodec[ClassWithFields] = DeriveJsonCodec.gen
+
+ assertTrue(
+ jsonStr.fromJson[ClassWithFields].toOption.get == expectedObj
+ )
+ },
+ test("do not write nulls by default") {
+ val expectedStr = """{}"""
+ val expectedObj = OptionalField(None)
+
+ implicit val codec: JsonCodec[OptionalField] = DeriveJsonCodec.gen
+
+ assertTrue(
+ expectedStr.fromJson[OptionalField].toOption.get == expectedObj,
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("do not fail on missing null values") {
+ val expectedStr = """{}"""
+ val expectedObj = OptionalField(None)
+
+ implicit val codec: JsonCodec[OptionalField] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[OptionalField].toOption.get == expectedObj, expectedObj.toJson == expectedStr)
+ },
+ test("write empty collections by default") {
+ case class Empty(z: Option[Int])
+ case class EmptyObj(a: Empty)
+ case class EmptySeq(a: Seq[Int])
+
+ val expectedObjStr = """{"a":{}}"""
+ val expectedSeqStr = """{"a":[]}"""
+ val expectedObj = EmptyObj(Empty(None))
+ val expectedSeq = EmptySeq(Seq.empty)
+
+ implicit val emptyCodec: JsonCodec[Empty] = DeriveJsonCodec.gen
+ implicit val emptyObjCodec: JsonCodec[EmptyObj] = DeriveJsonCodec.gen
+ implicit val emptySeqCodec: JsonCodec[EmptySeq] = DeriveJsonCodec.gen
+
+ assertTrue(
+ expectedObjStr.fromJson[EmptyObj].toOption.get == expectedObj,
+ expectedObj.toJson == expectedObjStr,
+ expectedSeqStr.fromJson[EmptySeq].toOption.get == expectedSeq,
+ expectedSeq.toJson == expectedSeqStr
+ )
+ },
+ test("fail on decoding missing empty collections by default") {
+ case class Empty(z: Option[Int])
+ case class EmptyObj(a: Empty)
+ case class EmptySeq(b: Seq[Int])
+
+ implicit val codecEmpty: JsonCodec[Empty] = DeriveJsonCodec.gen[Empty]
+ implicit val codecEmptyObj: JsonCodec[EmptyObj] = DeriveJsonCodec.gen[EmptyObj]
+ implicit val codecEmptySeq: JsonCodec[EmptySeq] = DeriveJsonCodec.gen[EmptySeq]
+
+ assertTrue(
+ """{}""".fromJson[EmptyObj] == Left(".a(missing)"),
+ """{}""".fromJson[EmptySeq] == Left(".b(missing)")
+ )
+ }
+ ),
+ suite("AST defaults")(
+ test("should not map field names by default") {
+ val expectedAST = Json.Obj("someField" -> Json.Num(1), "someOtherField" -> Json.Str("a"))
+ val expectedObj = ClassWithFields(1, "a")
+
+ implicit val codec: JsonCodec[ClassWithFields] = DeriveJsonCodec.gen
+
+ assertTrue(
+ expectedAST.as[ClassWithFields].toOption.get == expectedObj,
+ expectedObj.toJsonAST.toOption.get == expectedAST
+ )
+ },
+ test("should not use discriminator by default") {
+ val expectedAST = Json.Obj("CaseObj" -> Json.Obj())
+ val expectedObj: ST = ST.CaseObj
+
+ implicit val codec: JsonCodec[ST] = DeriveJsonCodec.gen
+
+ assertTrue(
+ expectedAST.as[ST].toOption.get == expectedObj,
+ expectedObj.toJsonAST.toOption.get == expectedAST
+ )
+ },
+ test("should allow extra fields by default") {
+ val jsonAST = Json.Obj("someField" -> Json.Num(1), "someOtherField" -> Json.Str("a"), "extra" -> Json.Num(1))
+ val expectedObj = ClassWithFields(1, "a")
+
+ implicit val codec: JsonCodec[ClassWithFields] = DeriveJsonCodec.gen
+
+ assertTrue(
+ jsonAST.as[ClassWithFields].toOption.get == expectedObj
+ )
+ },
+ test("do not write nulls by default") {
+ val jsonAST = Json.Obj()
+ val expectedObj = OptionalField(None)
+
+ implicit val codec: JsonCodec[OptionalField] = DeriveJsonCodec.gen
+
+ assertTrue(
+ jsonAST.as[OptionalField].toOption.get == expectedObj,
+ expectedObj.toJsonAST == Right(jsonAST)
+ )
+ },
+ test("write empty collections by default") {
+ case class Empty(z: Option[Int])
+ case class EmptyObj(a: Empty)
+ case class EmptySeq(a: Seq[Int])
+
+ val expectedSeqJson = Json.Obj("a" -> Json.Arr())
+ val expectedObjJson = Json.Obj("a" -> Json.Obj())
+ val expectedObj = EmptyObj(Empty(None))
+ val expectedSeq = EmptySeq(Seq.empty)
+
+ implicit val emptyCodec: JsonCodec[Empty] = DeriveJsonCodec.gen
+ implicit val emptyObjCodec: JsonCodec[EmptyObj] = DeriveJsonCodec.gen
+ implicit val emptySeqCodec: JsonCodec[EmptySeq] = DeriveJsonCodec.gen
+
+ assertTrue(
+ expectedObj.toJsonAST == Right(expectedObjJson),
+ expectedSeq.toJsonAST == Right(expectedSeqJson),
+ expectedObjJson.as[EmptyObj] == Right(expectedObj),
+ expectedSeqJson.as[EmptySeq] == Right(expectedSeq)
+ )
+ },
+ test("fail on decoding missing empty collections by default") {
+ case class Empty(z: Option[Int])
+ case class EmptyObj(a: Empty)
+ case class EmptySeq(b: Seq[Int])
+
+ implicit val codecEmpty: JsonDecoder[Empty] = DeriveJsonDecoder.gen[Empty]
+ implicit val codecEmptyObj: JsonDecoder[EmptyObj] = DeriveJsonDecoder.gen[EmptyObj]
+ implicit val codecEmptySeq: JsonDecoder[EmptySeq] = DeriveJsonDecoder.gen[EmptySeq]
+
+ assertTrue(
+ Json.Obj().as[EmptyObj] == Left(".a(missing)"),
+ Json.Obj().as[EmptySeq] == Left(".b(missing)")
+ )
+ }
+ ),
+ suite("override defaults")(
+ test("should override field name mapping") {
+ val expectedStr = """{"some_field":1,"some_other_field":"a"}"""
+ val expectedObj = ClassWithFields(1, "a")
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(fieldNameMapping = SnakeCase)
+ implicit val codec: JsonCodec[ClassWithFields] = DeriveJsonCodec.gen
+
+ assertTrue(
+ expectedStr.fromJson[ClassWithFields].toOption.get == expectedObj,
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("should specify discriminator") {
+ val expectedStr = """{"$type":"CaseClass","i":1}"""
+ val expectedObj: ST = ST.CaseClass(i = 1)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(sumTypeHandling = DiscriminatorField("$type"))
+ implicit val codec: JsonCodec[ST] = DeriveJsonCodec.gen
+
+ assertTrue(
+ expectedStr.fromJson[ST].toOption.get == expectedObj,
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("should override sum type mapping") {
+ val expectedStr = """{"$type":"case_class","i":1}"""
+ val expectedObj: ST = ST.CaseClass(i = 1)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(sumTypeHandling = DiscriminatorField("$type"), sumTypeMapping = SnakeCase)
+ implicit val codec: JsonCodec[ST] = DeriveJsonCodec.gen
+
+ assertTrue(
+ expectedStr.fromJson[ST].toOption.get == expectedObj,
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("should prevent extra fields") {
+ val jsonStr = """{"someField":1,"someOtherField":"a","extra":123}"""
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(allowExtraFields = false)
+ implicit val codec: JsonCodec[ClassWithFields] = DeriveJsonCodec.gen
+
+ assertTrue(
+ jsonStr.fromJson[ClassWithFields].isLeft
+ )
+ },
+ test("use explicit null values") {
+ val expectedStr = """{"a":null}"""
+ val expectedObj = OptionalField(None)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitNulls = true)
+ implicit val codec: JsonCodec[OptionalField] = DeriveJsonCodec.gen
+
+ assertTrue(
+ expectedStr.fromJson[OptionalField].toOption.get == expectedObj,
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("do not write empty collections") {
+ case class Empty(z: Option[Int])
+ case class EmptyObj(a: Empty)
+ case class EmptySeq(b: Seq[Int])
+
+ val expectedStr = """{"a":{}}"""
+ val expectedEmptyObj = EmptyObj(Empty(None))
+ val expectedEmptySeq = EmptySeq(Seq.empty)
+
+ implicit val config: JsonCodecConfiguration = JsonCodecConfiguration(explicitEmptyCollections =
+ ExplicitEmptyCollections(decoding = false, encoding = false)
+ )
+ implicit val codecEmpty: JsonCodec[Empty] = DeriveJsonCodec.gen[Empty]
+ implicit val codecEmptyObj: JsonCodec[EmptyObj] = DeriveJsonCodec.gen[EmptyObj]
+ implicit val codecEmptySeq: JsonCodec[EmptySeq] = DeriveJsonCodec.gen[EmptySeq]
+
+ assertTrue(
+ expectedEmptyObj.toJson == expectedStr,
+ expectedEmptySeq.toJson == "{}",
+ expectedStr.fromJson[EmptyObj] == Right(expectedEmptyObj),
+ expectedStr.fromJson[EmptySeq] == Right(expectedEmptySeq)
+ )
+ },
+ test("decode missing empty collections with defaults") {
+ case class EmptySeq(b: Seq[Int] = Seq(1))
+ case class EmptyObj(a: EmptySeq)
+
+ val expectedStr = """{}"""
+ val expectedSeq = EmptySeq(Seq(1))
+ val expectedObj = EmptyObj(expectedSeq)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(decoding = false))
+ implicit val codec: JsonCodec[EmptySeq] = DeriveJsonCodec.gen
+ implicit val codecObj: JsonCodec[EmptyObj] = DeriveJsonCodec.gen
+
+ assertTrue(
+ expectedStr.fromJson[EmptySeq].toOption.get == expectedSeq,
+ expectedStr.fromJson[EmptyObj].toOption.get == expectedObj
+ )
+ }
+ ),
+ suite("override AST defaults")(
+ test("should override field name mapping") {
+ val expectedAST = Json.Obj("some_field" -> Json.Num(1), "some_other_field" -> Json.Str("a"))
+ val expectedObj = ClassWithFields(1, "a")
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(fieldNameMapping = SnakeCase)
+ implicit val codec: JsonCodec[ClassWithFields] = DeriveJsonCodec.gen
+
+ assertTrue(
+ expectedAST.as[ClassWithFields].toOption.get == expectedObj,
+ expectedObj.toJsonAST.toOption.get == expectedAST
+ )
+ },
+ test("should specify discriminator") {
+ val expectedAST = Json.Obj("$type" -> Json.Str("CaseClass"), "i" -> Json.Num(1))
+ val expectedObj: ST = ST.CaseClass(i = 1)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(sumTypeHandling = DiscriminatorField("$type"))
+ implicit val codec: JsonCodec[ST] = DeriveJsonCodec.gen
+
+ assertTrue(
+ expectedAST.as[ST].toOption.get == expectedObj,
+ expectedObj.toJsonAST.toOption.get == expectedAST
+ )
+ },
+ test("should prevent extra fields") {
+ val jsonAST = Json.Obj("someField" -> Json.Num(1), "someOtherField" -> Json.Str("a"), "extra" -> Json.Num(1))
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(allowExtraFields = false)
+ implicit val codec: JsonCodec[ClassWithFields] = DeriveJsonCodec.gen
+
+ assertTrue(
+ jsonAST.as[ClassWithFields].isLeft
+ )
+ },
+ test("use explicit null values") {
+ val jsonAST = Json.Obj("a" -> Json.Null)
+ val expectedObj = OptionalField(None)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitNulls = true)
+ implicit val codec: JsonCodec[OptionalField] = DeriveJsonCodec.gen
+
+ assertTrue(jsonAST.as[OptionalField].toOption.get == expectedObj, expectedObj.toJsonAST == Right(jsonAST))
+ },
+ test("fail on decoding missing explicit nulls") {
+ val jsonStr = """{}"""
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitNulls = true)
+ implicit val codec: JsonCodec[OptionalField] = DeriveJsonCodec.gen
+
+ assertTrue(jsonStr.fromJson[OptionalField].isLeft)
+ } @@ TestAspect.ignore,
+ test("do not write empty collections") {
+ case class Empty(z: Option[Int])
+ case class EmptyObj(a: Empty)
+ case class EmptySeq(b: Seq[Int])
+
+ val expectedJson = Json.Obj(Chunk("a" -> Json.Obj.empty))
+ val expectedEmptyObj = EmptyObj(Empty(None))
+ val expectedEmptySeq = EmptySeq(Seq.empty)
+
+ implicit val config: JsonCodecConfiguration = JsonCodecConfiguration(explicitEmptyCollections =
+ ExplicitEmptyCollections(decoding = false, encoding = false)
+ )
+ implicit val emptyCodec: JsonCodec[Empty] = DeriveJsonCodec.gen
+ implicit val emptyObjCodec: JsonCodec[EmptyObj] = DeriveJsonCodec.gen
+ implicit val emptySeqCodec: JsonCodec[EmptySeq] = DeriveJsonCodec.gen
+
+ assertTrue(
+ expectedEmptyObj.toJsonAST == Right(expectedJson),
+ expectedEmptySeq.toJsonAST == Right(Json.Obj()),
+ expectedJson.as[EmptyObj] == Right(expectedEmptyObj),
+ expectedJson.as[EmptySeq] == Right(expectedEmptySeq)
+ )
+ }
+ ),
+ suite("explicit empty collections")(
+ suite("should fill in missing empty collections and write empty collections")(
+ test("for an array") {
+ case class EmptyArray(a: Array[Int])
+ val expectedStr = """{"a":[]}"""
+ val expectedObj = EmptyArray(Array.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(decoding = false))
+ implicit val codec: JsonCodec[EmptyArray] = DeriveJsonCodec.gen
+
+ assertTrue("""{}""".fromJson[EmptyArray].toOption.exists(_.a.isEmpty), expectedObj.toJson == expectedStr)
+ },
+ test("for a seq") {
+ case class EmptySeq(a: Seq[Int])
+ val expectedStr = """{"a":[]}"""
+ val expectedObj = EmptySeq(Seq.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(decoding = false))
+ implicit val codec: JsonCodec[EmptySeq] = DeriveJsonCodec.gen
+
+ assertTrue("""{}""".fromJson[EmptySeq].toOption.contains(expectedObj), expectedObj.toJson == expectedStr)
+ },
+ test("for a chunk") {
+ case class EmptyChunk(a: Chunk[Int])
+ val expectedStr = """{"a":[]}"""
+ val expectedObj = EmptyChunk(Chunk.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(decoding = false))
+ implicit val codec: JsonCodec[EmptyChunk] = DeriveJsonCodec.gen
+
+ assertTrue("""{}""".fromJson[EmptyChunk].toOption.contains(expectedObj), expectedObj.toJson == expectedStr)
+ },
+ test("for an indexed seq") {
+ case class EmptyIndexedSeq(a: IndexedSeq[Int])
+ val expectedStr = """{"a":[]}"""
+ val expectedObj = EmptyIndexedSeq(IndexedSeq.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(decoding = false))
+ implicit val codec: JsonCodec[EmptyIndexedSeq] = DeriveJsonCodec.gen
+
+ assertTrue(
+ """{}""".fromJson[EmptyIndexedSeq].toOption.contains(expectedObj),
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("for a linear seq") {
+ case class EmptyLinearSeq(a: immutable.LinearSeq[Int])
+ val expectedStr = """{"a":[]}"""
+ val expectedObj = EmptyLinearSeq(immutable.LinearSeq.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(decoding = false))
+ implicit val codec: JsonCodec[EmptyLinearSeq] = DeriveJsonCodec.gen
+
+ assertTrue(
+ """{}""".fromJson[EmptyLinearSeq].toOption.contains(expectedObj),
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("for a list set") {
+ case class EmptyListSet(a: immutable.ListSet[Int])
+ val expectedStr = """{"a":[]}"""
+ val expectedObj = EmptyListSet(immutable.ListSet.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(decoding = false))
+ implicit val codec: JsonCodec[EmptyListSet] = DeriveJsonCodec.gen
+
+ assertTrue("""{}""".fromJson[EmptyListSet].toOption.contains(expectedObj), expectedObj.toJson == expectedStr)
+ },
+ test("for a tree set") {
+ case class EmptyTreeSet(a: immutable.TreeSet[Int])
+ val expectedStr = """{"a":[]}"""
+ val expectedObj = EmptyTreeSet(immutable.TreeSet.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(decoding = false))
+ implicit val codec: JsonCodec[EmptyTreeSet] = DeriveJsonCodec.gen
+
+ assertTrue("""{}""".fromJson[EmptyTreeSet].toOption.contains(expectedObj), expectedObj.toJson == expectedStr)
+ },
+ test("for a list") {
+ case class EmptyList(a: List[Int])
+ val expectedStr = """{"a":[]}"""
+ val expectedObj = EmptyList(List.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(decoding = false))
+ implicit val codec: JsonCodec[EmptyList] = DeriveJsonCodec.gen
+
+ assertTrue("""{}""".fromJson[EmptyList].toOption.contains(expectedObj), expectedObj.toJson == expectedStr)
+ },
+ test("for a vector") {
+ case class EmptyVector(a: Vector[Int])
+ val expectedStr = """{"a":[]}"""
+ val expectedObj = EmptyVector(Vector.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(decoding = false))
+ implicit val codec: JsonCodec[EmptyVector] = DeriveJsonCodec.gen
+
+ assertTrue("""{}""".fromJson[EmptyVector].toOption.contains(expectedObj), expectedObj.toJson == expectedStr)
+ },
+ test("for a set") {
+ case class EmptySet(a: Set[Int])
+ val expectedStr = """{"a":[]}"""
+ val expectedObj = EmptySet(Set.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(decoding = false))
+ implicit val codec: JsonCodec[EmptySet] = DeriveJsonCodec.gen
+
+ assertTrue("""{}""".fromJson[EmptySet].toOption.contains(expectedObj), expectedObj.toJson == expectedStr)
+ },
+ test("for a hash set") {
+ case class EmptyHashSet(a: immutable.HashSet[Int])
+ val expectedStr = """{"a":[]}"""
+ val expectedObj = EmptyHashSet(immutable.HashSet.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(decoding = false))
+ implicit val codec: JsonCodec[EmptyHashSet] = DeriveJsonCodec.gen
+
+ assertTrue("""{}""".fromJson[EmptyHashSet].toOption.contains(expectedObj), expectedObj.toJson == expectedStr)
+ },
+ test("for a sorted set") {
+ case class EmptySortedSet(a: immutable.SortedSet[Int])
+ val expectedStr = """{"a":[]}"""
+ val expectedObj = EmptySortedSet(immutable.SortedSet.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(decoding = false))
+ implicit val codec: JsonCodec[EmptySortedSet] = DeriveJsonCodec.gen
+
+ assertTrue(
+ """{}""".fromJson[EmptySortedSet].toOption.contains(expectedObj),
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("for a map") {
+ case class EmptyMap(a: Map[String, String])
+ val expectedStr = """{"a":{}}"""
+ val expectedObj = EmptyMap(Map.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(decoding = false))
+ implicit val codec: JsonCodec[EmptyMap] = DeriveJsonCodec.gen
+
+ assertTrue("""{}""".fromJson[EmptyMap].toOption.contains(expectedObj), expectedObj.toJson == expectedStr)
+ },
+ test("for a hash map") {
+ case class EmptyHashMap(a: immutable.HashMap[String, String])
+ val expectedStr = """{"a":{}}"""
+ val expectedObj = EmptyHashMap(immutable.HashMap.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(decoding = false))
+ implicit val codec: JsonCodec[EmptyHashMap] = DeriveJsonCodec.gen
+
+ assertTrue("""{}""".fromJson[EmptyHashMap].toOption.contains(expectedObj), expectedObj.toJson == expectedStr)
+ },
+ test("for a mutable map") {
+ case class EmptyMutableMap(a: mutable.Map[String, String])
+ val expectedStr = """{"a":{}}"""
+ val expectedObj = EmptyMutableMap(mutable.Map.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(decoding = false))
+ implicit val codec: JsonCodec[EmptyMutableMap] = DeriveJsonCodec.gen
+
+ assertTrue(
+ """{}""".fromJson[EmptyMutableMap].toOption.contains(expectedObj),
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("for a sorted map") {
+ case class EmptySortedMap(a: collection.SortedMap[String, String])
+ val expectedStr = """{"a":{}}"""
+ val expectedObj = EmptySortedMap(collection.SortedMap.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(decoding = false))
+ implicit val codec: JsonCodec[EmptySortedMap] = DeriveJsonCodec.gen
+
+ assertTrue(
+ """{}""".fromJson[EmptySortedMap].toOption.contains(expectedObj),
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("for a list map") {
+ case class EmptyListMap(a: immutable.ListMap[String, String])
+ val expectedStr = """{"a":{}}"""
+ val expectedObj = EmptyListMap(immutable.ListMap.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(decoding = false))
+ implicit val codec: JsonCodec[EmptyListMap] = DeriveJsonCodec.gen
+
+ assertTrue("""{}""".fromJson[EmptyListMap].toOption.contains(expectedObj), expectedObj.toJson == expectedStr)
+ },
+ test("for a transform collection") {
+ case class MappedCollection(a: List[Int])
+ case class EmptyMappedCollection(a: MappedCollection)
+ val expectedStr = """{"a":[]}"""
+ val expectedObj = EmptyMappedCollection(MappedCollection(List.empty))
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(decoding = false))
+ implicit val codec: JsonCodec[MappedCollection] = JsonCodec
+ .list[Int]
+ .transform(
+ v => MappedCollection(v),
+ _.a
+ )
+ implicit val emptyMappedCollectionCodec: JsonCodec[EmptyMappedCollection] = DeriveJsonCodec.gen
+
+ assertTrue(
+ """{}""".fromJson[EmptyMappedCollection].toOption.contains(expectedObj),
+ expectedObj.toJson == expectedStr
+ )
+ },
+ test("for a transformOrFail collection") {
+ case class MappedCollection(a: List[Int])
+ case class EmptyMappedCollection(a: MappedCollection)
+ val expectedStr = """{"a":[]}"""
+ val expectedObj = EmptyMappedCollection(MappedCollection(List.empty))
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(decoding = false))
+ implicit val codec: JsonCodec[MappedCollection] = JsonCodec
+ .list[Int]
+ .transformOrFail(
+ v => Right(MappedCollection(v)),
+ _.a
+ )
+ implicit val emptyMappedCollectionCodec: JsonCodec[EmptyMappedCollection] = DeriveJsonCodec.gen
+
+ assertTrue(
+ """{}""".fromJson[EmptyMappedCollection].toOption.contains(expectedObj),
+ expectedObj.toJson == expectedStr
+ )
+ }
+ ),
+ suite("should not write empty collections and fail missing empty collections")(
+ test("for an array") {
+ case class EmptyArray(a: Array[Int])
+ val expectedStr = """{}"""
+ val expectedObj = EmptyArray(Array.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(false))
+ implicit val codec: JsonCodec[EmptyArray] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyArray].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a seq") {
+ case class EmptySeq(a: Seq[Int])
+ val expectedStr = """{}"""
+ val expectedObj = EmptySeq(Seq.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(false))
+ implicit val codec: JsonCodec[EmptySeq] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptySeq].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a chunk") {
+ case class EmptyChunk(a: Chunk[Int])
+ val expectedStr = """{}"""
+ val expectedObj = EmptyChunk(Chunk.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(false))
+ implicit val codec: JsonCodec[EmptyChunk] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyChunk].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for an indexed seq") {
+ case class EmptyIndexedSeq(a: IndexedSeq[Int])
+ val expectedStr = """{}"""
+ val expectedObj = EmptyIndexedSeq(IndexedSeq.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(false))
+ implicit val codec: JsonCodec[EmptyIndexedSeq] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyIndexedSeq].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a linear seq") {
+ case class EmptyLinearSeq(a: immutable.LinearSeq[Int])
+ val expectedStr = """{}"""
+ val expectedObj = EmptyLinearSeq(immutable.LinearSeq.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(false))
+ implicit val codec: JsonCodec[EmptyLinearSeq] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyLinearSeq].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a list set") {
+ case class EmptyListSet(a: immutable.ListSet[Int])
+ val expectedStr = """{}"""
+ val expectedObj = EmptyListSet(immutable.ListSet.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(false))
+ implicit val codec: JsonCodec[EmptyListSet] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyListSet].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a treeSet") {
+ case class EmptyTreeSet(a: immutable.TreeSet[Int])
+ val expectedStr = """{}"""
+ val expectedObj = EmptyTreeSet(immutable.TreeSet.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(false))
+ implicit val codec: JsonCodec[EmptyTreeSet] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyTreeSet].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a list") {
+ case class EmptyList(a: List[Int])
+ val expectedStr = """{}"""
+ val expectedObj = EmptyList(List.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(false))
+ implicit val codec: JsonCodec[EmptyList] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyList].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a vector") {
+ case class EmptyVector(a: Vector[Int])
+ val expectedStr = """{}"""
+ val expectedObj = EmptyVector(Vector.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(false))
+ implicit val codec: JsonCodec[EmptyVector] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyVector].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a set") {
+ case class EmptySet(a: Set[Int])
+ val expectedStr = """{}"""
+ val expectedObj = EmptySet(Set.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(false))
+ implicit val codec: JsonCodec[EmptySet] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptySet].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a hash set") {
+ case class EmptyHashSet(a: immutable.HashSet[Int])
+ val expectedStr = """{}"""
+ val expectedObj = EmptyHashSet(immutable.HashSet.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(false))
+ implicit val codec: JsonCodec[EmptyHashSet] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyHashSet].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a sorted set") {
+ case class EmptySortedSet(a: immutable.SortedSet[Int])
+ val expectedStr = """{}"""
+ val expectedObj = EmptySortedSet(immutable.SortedSet.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(false))
+ implicit val codec: JsonCodec[EmptySortedSet] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptySortedSet].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a map") {
+ case class EmptyMap(a: Map[String, String])
+ val expectedStr = """{}"""
+ val expectedObj = EmptyMap(Map.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(false))
+ implicit val codec: JsonCodec[EmptyMap] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyMap].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a hashMap") {
+ case class EmptyHashMap(a: immutable.HashMap[String, String])
+ val expectedStr = """{}"""
+ val expectedObj = EmptyHashMap(immutable.HashMap.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(false))
+ implicit val codec: JsonCodec[EmptyHashMap] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyHashMap].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a mutable map") {
+ case class EmptyMutableMap(a: mutable.Map[String, String])
+ val expectedStr = """{}"""
+ val expectedObj = EmptyMutableMap(mutable.Map.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(false))
+ implicit val codec: JsonCodec[EmptyMutableMap] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyMutableMap].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a sorted map") {
+ case class EmptySortedMap(a: collection.SortedMap[String, String])
+ val expectedStr = """{}"""
+ val expectedObj = EmptySortedMap(collection.SortedMap.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(false))
+ implicit val codec: JsonCodec[EmptySortedMap] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptySortedMap].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a list map") {
+ case class EmptyListMap(a: immutable.ListMap[String, String])
+ val expectedStr = """{}"""
+ val expectedObj = EmptyListMap(immutable.ListMap.empty)
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(false))
+ implicit val codec: JsonCodec[EmptyListMap] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyListMap].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a transform collection") {
+ case class MappedCollection(a: List[Int])
+ case class EmptyMappedCollection(a: MappedCollection)
+ val expectedStr = """{}"""
+ val expectedObj = EmptyMappedCollection(MappedCollection(List.empty))
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(false))
+ implicit val codec: JsonCodec[MappedCollection] = JsonCodec
+ .list[Int]
+ .transform(
+ v => MappedCollection(v),
+ _.a
+ )
+ implicit val emptyMappedCollectionCodec: JsonCodec[EmptyMappedCollection] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyMappedCollection].isLeft, expectedObj.toJson == expectedStr)
+ },
+ test("for a transformOrFail collection") {
+ case class MappedCollection(a: List[Int])
+ case class EmptyMappedCollection(a: MappedCollection)
+ val expectedStr = """{}"""
+ val expectedObj = EmptyMappedCollection(MappedCollection(List.empty))
+
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(explicitEmptyCollections = ExplicitEmptyCollections(false))
+ implicit val codec: JsonCodec[MappedCollection] = JsonCodec
+ .list[Int]
+ .transformOrFail(
+ v => Right(MappedCollection(v)),
+ _.a
+ )
+ implicit val emptyMappedCollectionCodec: JsonCodec[EmptyMappedCollection] = DeriveJsonCodec.gen
+
+ assertTrue(expectedStr.fromJson[EmptyMappedCollection].isLeft, expectedObj.toJson == expectedStr)
+ }
+ )
+ )
+ )
+}
diff --git a/zio-json/shared/src/test/scala/zio/json/DecoderSpec.scala b/zio-json/shared/src/test/scala/zio/json/DecoderSpec.scala
index 358352882..550d286f6 100644
--- a/zio-json/shared/src/test/scala/zio/json/DecoderSpec.scala
+++ b/zio-json/shared/src/test/scala/zio/json/DecoderSpec.scala
@@ -1,30 +1,218 @@
-package testzio.json
+package zio.json
import zio._
-import zio.json._
import zio.json.ast.Json
import zio.test.Assertion._
import zio.test.TestAspect.jvmOnly
-import zio.test.{ TestAspect, _ }
+import zio.test._
+import java.math.BigInteger
import java.time.{ Duration, OffsetDateTime, ZonedDateTime }
import java.util.UUID
import scala.collection.{ SortedMap, immutable, mutable }
object DecoderSpec extends ZIOSpecDefault {
-
val spec: Spec[Environment, Any] =
suite("Decoder")(
suite("fromJson")(
+ test("string") {
+ assert(""""abc"""".fromJson[String])(isRight(equalTo("abc"))) &&
+ assert(""""abc\n"""".fromJson[String])(isRight(equalTo("abc\n"))) &&
+ assert("\"abc\\u0182\"".fromJson[String])(isRight(equalTo("abcƂ"))) &&
+ assert("\"abc\\u1Ee1\"".fromJson[String])(isRight(equalTo("abcỡ"))) &&
+ assert(""""abc\x"""".fromJson[String])(isLeft(equalTo("""(invalid '\x' in string)"""))) &&
+ assert("\"\u0000\"".fromJson[String])(isLeft(equalTo("""(invalid control in string)"""))) &&
+ assert("\"\\u0000\"".replace('0', 'g').fromJson[String])(isLeft(equalTo("""(invalid charcode in string)""")))
+ },
+ test("char") {
+ assert(""""a"""".fromJson[Char])(isRight(equalTo('a'))) &&
+ assert(""""\n"""".fromJson[Char])(isRight(equalTo('\n'))) &&
+ assert("\"\\u0182\"".fromJson[Char])(isRight(equalTo('Ƃ'))) &&
+ assert("\"\\u1Ee1\"".fromJson[Char])(isRight(equalTo('ỡ'))) &&
+ assert(""""aa"""".fromJson[Char])(isLeft(equalTo("""(expected single character string)"""))) &&
+ assert(""""\x"""".fromJson[Char])(isLeft(equalTo("""(invalid '\x' in string)"""))) &&
+ assert("\"\u0000\"".fromJson[Char])(isLeft(equalTo("""(invalid control in string)"""))) &&
+ assert("\"\\u0000\"".replace('0', 'g').fromJson[Char])(isLeft(equalTo("""(invalid charcode in string)""")))
+ },
+ test("boolean") {
+ assert("true".fromJson[Boolean])(isRight(equalTo(true))) &&
+ assert("false".fromJson[Boolean])(isRight(equalTo(false))) &&
+ assert("x".fromJson[Boolean])(isLeft(equalTo("(expected a Boolean)")))
+ },
+ test("byte") {
+ assert("-128".fromJson[Byte])(isRight(equalTo(Byte.MinValue))) &&
+ assert("127".fromJson[Byte])(isRight(equalTo(Byte.MaxValue))) &&
+ assert("\"-123\"".fromJson[Byte])(isRight(equalTo(-123: Byte))) &&
+ assert("\"123\"".fromJson[Byte])(isRight(equalTo(123: Byte))) &&
+ assertTrue("+123".fromJson[Byte].isLeft) &&
+ assertTrue("\"Infinity\"".fromJson[Byte].isLeft) &&
+ assertTrue("\"+Infinity\"".fromJson[Byte].isLeft) &&
+ assertTrue("\"-Infinity\"".fromJson[Byte].isLeft) &&
+ assertTrue("\"NaN\"".fromJson[Byte].isLeft)
+ },
+ test("short") {
+ assert("-32768".fromJson[Short])(isRight(equalTo(Short.MinValue))) &&
+ assert("32767".fromJson[Short])(isRight(equalTo(Short.MaxValue))) &&
+ assert("\"-12345\"".fromJson[Short])(isRight(equalTo(-12345: Short))) &&
+ assert("\"12345\"".fromJson[Short])(isRight(equalTo(12345: Short))) &&
+ assertTrue("+12345".fromJson[Short].isLeft) &&
+ assertTrue("\"Infinity\"".fromJson[Short].isLeft) &&
+ assertTrue("\"+Infinity\"".fromJson[Short].isLeft) &&
+ assertTrue("\"-Infinity\"".fromJson[Short].isLeft) &&
+ assertTrue("\"NaN\"".fromJson[Short].isLeft)
+ },
+ test("int") {
+ assert("-2147483648".fromJson[Int])(isRight(equalTo(Int.MinValue))) &&
+ assert("2147483647".fromJson[Int])(isRight(equalTo(Int.MaxValue))) &&
+ assert("\"-1234567890\"".fromJson[Int])(isRight(equalTo(-1234567890))) &&
+ assert("\"1234567890\"".fromJson[Int])(isRight(equalTo(1234567890))) &&
+ assertTrue("+1234567890".fromJson[Int].isLeft) &&
+ assertTrue("\"Infinity\"".fromJson[Int].isLeft) &&
+ assertTrue("\"+Infinity\"".fromJson[Int].isLeft) &&
+ assertTrue("\"-Infinity\"".fromJson[Int].isLeft) &&
+ assertTrue("\"NaN\"".fromJson[Int].isLeft)
+ },
+ test("long") {
+ assert("-9223372036854775808".fromJson[Long])(isRight(equalTo(Long.MinValue))) &&
+ assert("9223372036854775807".fromJson[Long])(isRight(equalTo(Long.MaxValue))) &&
+ assert("\"-123456789012345678\"".fromJson[Long])(isRight(equalTo(-123456789012345678L))) &&
+ assert("\"123456789012345678\"".fromJson[Long])(isRight(equalTo(123456789012345678L))) &&
+ assertTrue("+123456789012345678".fromJson[Long].isLeft) &&
+ assertTrue("\"Infinity\"".fromJson[Long].isLeft) &&
+ assertTrue("\"+Infinity\"".fromJson[Long].isLeft) &&
+ assertTrue("\"-Infinity\"".fromJson[Long].isLeft) &&
+ assertTrue("\"NaN\"".fromJson[Long].isLeft)
+ },
+ test("float") {
+ assert("1.234567e9".fromJson[Float])(isRight(equalTo(1.234567e9f))) &&
+ assert("-1.234567e9".fromJson[Float])(isRight(equalTo(-1.234567e9f))) &&
+ assert("\"-1.234567e9\"".fromJson[Float])(isRight(equalTo(-1.234567e9f))) &&
+ assert("1.4e-45".fromJson[Float])(isRight(equalTo(1.4e-45f))) &&
+ assert("8.3e38".fromJson[Float])(isRight(equalTo(Float.PositiveInfinity))) &&
+ assert("-8.3e38".fromJson[Float])(isRight(equalTo(Float.NegativeInfinity))) &&
+ assert("1.23456789012345678901e-2147483648".fromJson[Float])(isLeft(equalTo("(expected a Float)"))) &&
+ assert("123456789012345678901e+2147483647".fromJson[Float])(isLeft(equalTo("(expected a Float)"))) &&
+ assert("12345678901234567890.1e+2147483647".fromJson[Float])(isLeft(equalTo("(expected a Float)"))) &&
+ assert("1.0e-2147483647".fromJson[Float])(isRight(equalTo(0.0f))) &&
+ assert("-1.0e-2147483647".fromJson[Float])(isRight(equalTo(-0.0f))) &&
+ assert("1234567890123456789.01e+2147483647".fromJson[Float])(isRight(equalTo(Float.PositiveInfinity))) &&
+ assert("-1234567890123456789.01e+2147483647".fromJson[Float])(isRight(equalTo(Float.NegativeInfinity))) &&
+ assert("\"Infinity\"".fromJson[Float])(isRight(equalTo(Float.PositiveInfinity))) &&
+ assert("\"+Infinity\"".fromJson[Float])(isRight(equalTo(Float.PositiveInfinity))) &&
+ assert("\"-Infinity\"".fromJson[Float])(isRight(equalTo(Float.NegativeInfinity))) &&
+ assertTrue("\"NaN\"".fromJson[Float].isRight) &&
+ assertTrue("Infinity".fromJson[Float].isLeft) &&
+ assertTrue("+Infinity".fromJson[Float].isLeft) &&
+ assertTrue("-Infinity".fromJson[Float].isLeft) &&
+ assertTrue("NaN".fromJson[Float].isLeft) &&
+ assertTrue("+1.234567e9".fromJson[Float].isLeft)
+ },
+ test("double") {
+ assert("1.23456789012345e9".fromJson[Double])(isRight(equalTo(1.23456789012345e9))) &&
+ assert("-1.23456789012345e9".fromJson[Double])(isRight(equalTo(-1.23456789012345e9))) &&
+ assert("\"-1.23456789012345e9\"".fromJson[Double])(isRight(equalTo(-1.23456789012345e9))) &&
+ assert("4.9e-324".fromJson[Double])(isRight(equalTo(4.9e-324))) &&
+ assert("1.8e308".fromJson[Double])(isRight(equalTo(Double.PositiveInfinity))) &&
+ assert("-1.8e308".fromJson[Double])(isRight(equalTo(Double.NegativeInfinity))) &&
+ assert("1.23456789012345678901e-2147483648".fromJson[Double])(isLeft(equalTo("(expected a Double)"))) &&
+ assert("12345678901234567890.1e+2147483647".fromJson[Double])(isLeft(equalTo("(expected a Double)"))) &&
+ assert("123456789012345678901e+2147483647".fromJson[Double])(isLeft(equalTo("(expected a Double)"))) &&
+ assert("1.0e-2147483647".fromJson[Double])(isRight(equalTo(0.0))) &&
+ assert("-1.0e-2147483647".fromJson[Double])(isRight(equalTo(-0.0))) &&
+ assert("1234567890123456789.01e+2147483647".fromJson[Double])(isRight(equalTo(Double.PositiveInfinity))) &&
+ assert("-1234567890123456789.01e+2147483647".fromJson[Double])(isRight(equalTo(Double.NegativeInfinity))) &&
+ assert("\"Infinity\"".fromJson[Double])(isRight(equalTo(Double.PositiveInfinity))) &&
+ assert("\"+Infinity\"".fromJson[Double])(isRight(equalTo(Double.PositiveInfinity))) &&
+ assert("\"-Infinity\"".fromJson[Double])(isRight(equalTo(Double.NegativeInfinity))) &&
+ assertTrue("\"NaN\"".fromJson[Double].isRight) &&
+ assertTrue("Infinity".fromJson[Double].isLeft) &&
+ assertTrue("+Infinity".fromJson[Double].isLeft) &&
+ assertTrue("-Infinity".fromJson[Double].isLeft) &&
+ assertTrue("NaN".fromJson[Double].isLeft) &&
+ assertTrue("+1.23456789012345e9".fromJson[Double].isLeft)
+ },
test("BigDecimal") {
- assert("123".fromJson[BigDecimal])(isRight(equalTo(BigDecimal(123))))
+ assert("-123.0e123".fromJson[BigDecimal])(isRight(equalTo(BigDecimal("-123.0e123")))) &&
+ assert("123.0e123".fromJson[BigDecimal])(isRight(equalTo(BigDecimal("123.0e123")))) &&
+ assertTrue("\"Infinity\"".fromJson[BigDecimal].isLeft) &&
+ assertTrue("\"+Infinity\"".fromJson[BigDecimal].isLeft) &&
+ assertTrue("\"-Infinity\"".fromJson[BigDecimal].isLeft) &&
+ assertTrue("\"NaN\"".fromJson[BigDecimal].isLeft) &&
+ assert(
+ "170141183460469231731687303715884105728489465165484668486513574864654818964653168465316546851"
+ .fromJson[BigDecimal]
+ )(isLeft(equalTo("(expected a BigDecimal with 256-bit mantissa)"))) &&
+ assert("1.23456789012345678901e-2147483648".fromJson[BigDecimal])(
+ isLeft(equalTo("(expected a BigDecimal with 256-bit mantissa)"))
+ ) &&
+ assert("12345678901234567890.1e+2147483647".fromJson[BigDecimal])(
+ isLeft(equalTo("(expected a BigDecimal with 256-bit mantissa)"))
+ ) &&
+ assert("123456789012345678901e+2147483647".fromJson[BigDecimal])(
+ isLeft(equalTo("(expected a BigDecimal with 256-bit mantissa)"))
+ )
},
- test("BigInteger too large") {
- // this big integer consumes more than 128 bits
- assert("170141183460469231731687303715884105728".fromJson[java.math.BigInteger])(
- isLeft(equalTo("(expected a 128 bit BigInteger)"))
+ test("java.math.BigDecimal") {
+ assert("-123.0e123".fromJson[java.math.BigDecimal])(
+ isRight(equalTo(new java.math.BigDecimal("-123.0e123")))
+ ) &&
+ assert("123.0e123".fromJson[java.math.BigDecimal])(isRight(equalTo(new java.math.BigDecimal("123.0e123")))) &&
+ assertTrue("\"Infinity\"".fromJson[java.math.BigDecimal].isLeft) &&
+ assertTrue("\"+Infinity\"".fromJson[java.math.BigDecimal].isLeft) &&
+ assertTrue("\"-Infinity\"".fromJson[java.math.BigDecimal].isLeft) &&
+ assertTrue("\"NaN\"".fromJson[java.math.BigDecimal].isLeft) &&
+ assert(
+ "170141183460469231731687303715884105728489465165484668486513574864654818964653168465316546851"
+ .fromJson[java.math.BigDecimal]
+ )(isLeft(equalTo("(expected a BigDecimal with 256-bit mantissa)"))) &&
+ assert("1.23456789012345678901e-2147483648".fromJson[java.math.BigDecimal])(
+ isLeft(equalTo("(expected a BigDecimal with 256-bit mantissa)"))
+ ) &&
+ assert("12345678901234567890.1e+2147483647".fromJson[java.math.BigDecimal])(
+ isLeft(equalTo("(expected a BigDecimal with 256-bit mantissa)"))
+ ) &&
+ assert("123456789012345678901e+2147483647".fromJson[java.math.BigDecimal])(
+ isLeft(equalTo("(expected a BigDecimal with 256-bit mantissa)"))
)
},
+ test("BigInteger") {
+ assert("170141183460469231731687303715884105728".fromJson[BigInteger])(
+ isRight(equalTo(new BigInteger("170141183460469231731687303715884105728")))
+ ) &&
+ assert("-170141183460469231731687303715884105728".fromJson[BigInteger])(
+ isRight(equalTo(new BigInteger("-170141183460469231731687303715884105728")))
+ ) &&
+ assertTrue("\"Infinity\"".fromJson[BigInteger].isLeft) &&
+ assertTrue("\"+Infinity\"".fromJson[BigInteger].isLeft) &&
+ assertTrue("\"-Infinity\"".fromJson[BigInteger].isLeft) &&
+ assertTrue("\"NaN\"".fromJson[BigInteger].isLeft) &&
+ assert(
+ "170141183460469231731687303715884105728489465165484668486513574864654818964653168465316546851316546851"
+ .fromJson[BigInteger]
+ )(isLeft(equalTo("(expected a 256-bit BigInteger)"))) &&
+ assert(
+ "17014118346046923173168730371588410572848946516548466848651357486465481896465316846".fromJson[BigInteger]
+ )(isLeft(equalTo("(expected a 256-bit BigInteger)")))
+ },
+ test("BigInt") {
+ assert("170141183460469231731687303715884105728".fromJson[BigInt])(
+ isRight(equalTo(BigInt("170141183460469231731687303715884105728")))
+ ) &&
+ assert("-170141183460469231731687303715884105728".fromJson[BigInt])(
+ isRight(equalTo(BigInt("-170141183460469231731687303715884105728")))
+ ) &&
+ assertTrue("\"Infinity\"".fromJson[BigInt].isLeft) &&
+ assertTrue("\"+Infinity\"".fromJson[BigInt].isLeft) &&
+ assertTrue("\"-Infinity\"".fromJson[BigInt].isLeft) &&
+ assertTrue("\"NaN\"".fromJson[BigInt].isLeft) &&
+ assert(
+ "170141183460469231731687303715884105728489465165484668486513574864654818964653168465316546851316546851"
+ .fromJson[BigInt]
+ )(isLeft(equalTo("(expected a 256-bit BigInt)"))) &&
+ assert(
+ "17014118346046923173168730371588410572848946516548466848651357486465481896465316846".fromJson[BigInt]
+ )(isLeft(equalTo("(expected a 256-bit BigInt)")))
+ },
test("collections") {
val arr = """[1, 2, 3]"""
val obj = """{ "a": 1 }"""
@@ -46,6 +234,30 @@ object DecoderSpec extends ZIOSpecDefault {
forall(isRight(isRight(equalTo(2))))
)
},
+ test("tuples") {
+ assert("""["a",3]""".fromJson[(String, Int)])(isRight(equalTo(("a", 3)))) &&
+ assert("""["a","b"]""".fromJson[(String, Int)])(isLeft(equalTo("[1](expected an Int)"))) &&
+ assert("""[[0.1,0.2],[0.3,0.4],[-0.3,-]]""".fromJson[Seq[(Double, Double)]])(
+ isLeft(equalTo("[2][1](expected a Double)"))
+ )
+ },
+ test("tuples - ast") {
+ val a = Json.Arr(Json.Str("a"), Json.Num(3))
+ val b = Json.Arr(Json.Str("a"), Json.Str("b"))
+ val c = Json.Arr(
+ Json.Arr(Json.Num(0.1), Json.Num(0.2)),
+ Json.Arr(Json.Num(0.3), Json.Num(0.4)),
+ Json.Arr(Json.Num(-0.3), Json.Null)
+ )
+ val d = Json.Arr(Json.Num(0.1))
+
+ assertTrue(
+ a.as[(String, Int)].is(_.right) == ("a" -> 3),
+ b.as[(String, String)].is(_.right) == ("a" -> "b"),
+ c.as[List[(Double, Double)]].is(_.left) == """[2][1](expected a Double)""",
+ d.as[(Double, Double)].is(_.left) == "(Expected array of size 2)"
+ )
+ },
test("parameterless products") {
import exampleproducts._
@@ -89,10 +301,10 @@ object DecoderSpec extends ZIOSpecDefault {
DeriveJsonDecoder.gen[Mango]
}.flip
} yield assertTrue(
- // Class name in Scala 2: testzio.json.DecoderSpec.spec.Mango
- // Class name in Scala 3: testzio.json.DecoderSpec.spec.$anonfun.Mango
+ // Class name in Scala 2: zio.json.DecoderSpec.spec.Mango
+ // Class name in Scala 3: zio.json.DecoderSpec.spec.$anonfun.Mango
error.getMessage.matches(
- "Field names and aliases in case class testzio.json.DecoderSpec.spec(.\\$anonfun)?.Mango must be distinct, alias\\(es\\) r collide with a field or another alias"
+ "Field names and aliases in case class zio.json.DecoderSpec.spec(.\\$anonfun)?.Mango must be distinct, alias\\(es\\) r collide with a field or another alias"
)
)
},
@@ -104,7 +316,7 @@ object DecoderSpec extends ZIOSpecDefault {
}.flip
} yield assertTrue(
error.getMessage.matches(
- "Field names and aliases in case class testzio.json.DecoderSpec.spec(.\\$anonfun)?.Mango must be distinct, alias\\(es\\) r collide with a field or another alias"
+ "Field names and aliases in case class zio.json.DecoderSpec.spec(.\\$anonfun)?.Mango must be distinct, alias\\(es\\) r collide with a field or another alias"
)
)
},
@@ -116,7 +328,7 @@ object DecoderSpec extends ZIOSpecDefault {
}.flip
} yield assertTrue(
error.getMessage.matches(
- "Field names and aliases in case class testzio.json.DecoderSpec.spec(.\\$anonfun)?.Mango must be distinct, alias\\(es\\) r collide with a field or another alias"
+ "Field names and aliases in case class zio.json.DecoderSpec.spec(.\\$anonfun)?.Mango must be distinct, alias\\(es\\) r collide with a field or another alias"
)
)
},
@@ -142,6 +354,25 @@ object DecoderSpec extends ZIOSpecDefault {
assert("""{}""".fromJson[DefaultString])(isRight(equalTo(DefaultString("")))) &&
assert("""{"s": null}""".fromJson[DefaultString])(isRight(equalTo(DefaultString(""))))
},
+ test("dynamic default value") {
+ case class DefaultDynamic(
+ randomNumber: Double = scala.math.random(),
+ instant: java.time.Instant = java.time.Instant.now()
+ )
+
+ object DefaultDynamic {
+ implicit lazy val decoder: JsonDecoder[DefaultDynamic] = DeriveJsonDecoder.gen[DefaultDynamic]
+ }
+
+ def res = """{}""".stripMargin.fromJson[DefaultDynamic]
+
+ for {
+ dynamics1 <- ZIO.fromEither(res)
+ _ <- ZIO.sleep(2.millis)
+ dynamics2 <- ZIO.fromEither(res)
+ } yield assertTrue(dynamics1.randomNumber != dynamics2.randomNumber) &&
+ assertTrue(dynamics1.instant != dynamics2.instant)
+ } @@ TestAspect.withLiveClock,
test("sum encoding") {
import examplesum._
@@ -149,6 +380,26 @@ object DecoderSpec extends ZIOSpecDefault {
assert("""{"Child2":{}}""".fromJson[Parent])(isRight(equalTo(Child2()))) &&
assert("""{"type":"Child1"}""".fromJson[Parent])(isLeft(equalTo("(invalid disambiguator)")))
},
+ test("sum encoding with enumValuesAsStrings = true") {
+ import examplesumobjects1._
+
+ assert(""""Child1"""".fromJson[Parent])(isRight(equalTo(Child1))) &&
+ assert(""""Child2"""".fromJson[Parent])(isRight(equalTo(Child2)))
+ },
+ test("sum encoding with enumValuesAsStrings = false") {
+ import examplesumobjects2._
+
+ assert("""{"Child1":{}}""".fromJson[Parent])(isRight(equalTo(Child1))) &&
+ assert("""{"Child2":{}}""".fromJson[Parent])(isRight(equalTo(Child2)))
+ },
+ test("sum encoding with hint names") {
+ import examplesumhintnames._
+
+ assert("""{"child1":{}}""".fromJson[Parent])(isRight(equalTo(Child1()))) &&
+ assert("""{"child2":{}}""".fromJson[Parent])(isRight(equalTo(Child2()))) &&
+ assert("""{"Child1":{}}""".fromJson[Parent])(isLeft(equalTo("(invalid disambiguator)"))) &&
+ assert("""{"type":"child1"}""".fromJson[Parent])(isLeft(equalTo("(invalid disambiguator)")))
+ },
test("sum alternative encoding") {
import examplealtsum._
@@ -157,6 +408,33 @@ object DecoderSpec extends ZIOSpecDefault {
assert("""{"hint":"Samson"}""".fromJson[Parent])(isLeft(equalTo("(invalid disambiguator)"))) &&
assert("""{"Cain":{}}""".fromJson[Parent])(isLeft(equalTo("(missing hint 'hint')")))
},
+ test("sum alternative encoding with hint names") {
+ import examplealtsumhintnames._
+
+ assert("""{"hint":"child1"}""".fromJson[Parent])(isRight(equalTo(Child1()))) &&
+ assert("""{"hint":"Abel"}""".fromJson[Parent])(isRight(equalTo(Child2()))) &&
+ assert("""{"hint":"Child2"}""".fromJson[Parent])(isLeft(equalTo("(invalid disambiguator)"))) &&
+ assert("""{"child1":{}}""".fromJson[Parent])(isLeft(equalTo("(missing hint 'hint')")))
+ },
+ test("sum with more than 64 cases") {
+ import example100cases._
+
+ assert(""""B100"""".fromJson[A])(isRight(equalTo(A.B100)))
+ },
+ test("sum with duplicated case names") {
+ for {
+ error <- ZIO.attempt {
+ sealed trait Fruit
+ case class Banana(curvature: Double) extends Fruit
+ @jsonHint("Banana") case class Apple(color: String) extends Fruit
+ DeriveJsonDecoder.gen[Fruit]
+ }.flip
+ } yield assertTrue(
+ error.getMessage.matches(
+ """Case names in ADT zio.json.DecoderSpec.spec(.\$anonfun)?.Fruit must be distinct, name\(s\) Banana are duplicated"""
+ )
+ )
+ },
test("unicode") {
assert(""""€🐵🥰"""".fromJson[String])(isRight(equalTo("€🐵🥰")))
},
@@ -225,6 +503,53 @@ object DecoderSpec extends ZIOSpecDefault {
val jsonStr = JsonEncoder[Map[String, String]].encodeJson(expected, None)
assert(jsonStr.fromJson[Map[String, String]])(isRight(equalTo(expected)))
},
+ test("Map with Int keys") {
+ assert("""{"1234567890": "value"}""".fromJson[Map[Int, String]])(
+ isRight(equalTo(Map(1234567890 -> "value")))
+ ) &&
+ assert("""{"xxx": "value"}""".fromJson[Map[Int, String]])(isLeft(containsString("Invalid Int: xxx")))
+ },
+ test("Map with Long keys") {
+ assert("""{"1234567890123456789": "value"}""".fromJson[Map[Long, String]])(
+ isRight(equalTo(Map(1234567890123456789L -> "value")))
+ ) &&
+ assert("""{"xxx": "value"}""".fromJson[Map[Long, String]])(isLeft(containsString("Invalid Long: xxx")))
+ },
+ test("Map with UUID keys") {
+ def expectedMap(str: String): Map[UUID, String] = Map(UUID.fromString(str) -> "value")
+
+ val ok1 = """{"64d7c38d-2afd-4514-9832-4e70afe4b0f8": "value"}"""
+ val ok2 = """{"0000000064D7C38D-FD-14-32-70AFE4B0f8": "value"}"""
+ val ok3 = """{"0-0-0-0-0": "value"}"""
+ val bad1 = """{"": "value"}"""
+ val bad2 = """{"64d7c38d-2afd-4514-9832-4e70afe4b0f80": "value"}"""
+ val bad3 = """{"64d7c38d-2afd-4514-983-4e70afe4b0f80": "value"}"""
+ val bad4 = """{"64d7c38d-2afd--9832-4e70afe4b0f8": "value"}"""
+ val bad5 = """{"64d7c38d-2afd-XXXX-9832-4e70afe4b0f8": "value"}"""
+ val bad6 = """{"64d7c38d-2afd-X-9832-4e70afe4b0f8": "value"}"""
+ val bad7 = """{"0-0-0-0-00000000000000000": "value"}"""
+ val bad8 = """{"64d7c38d-2аfd-4514-9832-4e70afe4b0f8": "value"}"""
+ val bad9 = """{"0000000064D7C38D-FD-14-32-70АFE4B0f8": "value"}"""
+
+ assert(ok1.fromJson[Map[UUID, String]])(
+ isRight(equalTo(expectedMap("64d7c38d-2afd-4514-9832-4e70afe4b0f8")))
+ ) &&
+ assert(ok2.fromJson[Map[UUID, String]])(
+ isRight(equalTo(expectedMap("64D7C38D-00FD-0014-0032-0070AfE4B0f8")))
+ ) &&
+ assert(ok3.fromJson[Map[UUID, String]])(
+ isRight(equalTo(expectedMap("00000000-0000-0000-0000-000000000000")))
+ ) &&
+ assert(bad1.fromJson[Map[UUID, String]])(isLeft(containsString("(expected a UUID)"))) &&
+ assert(bad2.fromJson[Map[UUID, String]])(isLeft(containsString("(expected a UUID)"))) &&
+ assert(bad3.fromJson[Map[UUID, String]])(isLeft(containsString("(expected a UUID)"))) &&
+ assert(bad4.fromJson[Map[UUID, String]])(isLeft(containsString("(expected a UUID)"))) &&
+ assert(bad5.fromJson[Map[UUID, String]])(isLeft(containsString("(expected a UUID)"))) &&
+ assert(bad6.fromJson[Map[UUID, String]])(isLeft(containsString("(expected a UUID)"))) &&
+ assert(bad7.fromJson[Map[UUID, String]])(isLeft(containsString("(expected a UUID)"))) &&
+ assert(bad8.fromJson[Map[UUID, String]])(isLeft(containsString("(expected a UUID)"))) &&
+ assert(bad9.fromJson[Map[UUID, String]])(isLeft(containsString("(expected a UUID)")))
+ },
test("zio.Chunk") {
val jsonStr = """["5XL","2XL","XL"]"""
val expected = Chunk("5XL", "2XL", "XL")
@@ -253,17 +578,21 @@ object DecoderSpec extends ZIOSpecDefault {
val bad5 = """"64d7c38d-2afd-XXXX-9832-4e70afe4b0f8""""
val bad6 = """"64d7c38d-2afd-X-9832-4e70afe4b0f8""""
val bad7 = """"0-0-0-0-00000000000000000""""
+ val bad8 = """"64d7c38d-2аfd-4514-9832-4e70afe4b0f8""""
+ val bad9 = """"0000000064D7C38D-FD-14-32-70АFE4B0f8""""
assert(ok1.fromJson[UUID])(isRight(equalTo(UUID.fromString("64d7c38d-2afd-4514-9832-4e70afe4b0f8")))) &&
assert(ok2.fromJson[UUID])(isRight(equalTo(UUID.fromString("64D7C38D-00FD-0014-0032-0070AfE4B0f8")))) &&
assert(ok3.fromJson[UUID])(isRight(equalTo(UUID.fromString("00000000-0000-0000-0000-000000000000")))) &&
- assert(bad1.fromJson[UUID])(isLeft(containsString("Invalid UUID: "))) &&
- assert(bad2.fromJson[UUID])(isLeft(containsString("Invalid UUID: UUID string too large"))) &&
- assert(bad3.fromJson[UUID])(isLeft(containsString("Invalid UUID: 64d7c38d-2afd-4514-983-4e70afe4b0f80"))) &&
- assert(bad4.fromJson[UUID])(isLeft(containsString("Invalid UUID: 64d7c38d-2afd--9832-4e70afe4b0f8"))) &&
- assert(bad5.fromJson[UUID])(isLeft(containsString("Invalid UUID: 64d7c38d-2afd-XXXX-9832-4e70afe4b0f8"))) &&
- assert(bad6.fromJson[UUID])(isLeft(containsString("Invalid UUID: 64d7c38d-2afd-X-9832-4e70afe4b0f8"))) &&
- assert(bad7.fromJson[UUID])(isLeft(containsString("Invalid UUID: 0-0-0-0-00000000000000000")))
+ assert(bad1.fromJson[UUID])(isLeft(containsString("(expected a UUID)"))) &&
+ assert(bad2.fromJson[UUID])(isLeft(containsString("(expected a UUID)"))) &&
+ assert(bad3.fromJson[UUID])(isLeft(containsString("(expected a UUID)"))) &&
+ assert(bad4.fromJson[UUID])(isLeft(containsString("(expected a UUID)"))) &&
+ assert(bad5.fromJson[UUID])(isLeft(containsString("(expected a UUID)"))) &&
+ assert(bad6.fromJson[UUID])(isLeft(containsString("(expected a UUID)"))) &&
+ assert(bad7.fromJson[UUID])(isLeft(containsString("(expected a UUID)"))) &&
+ assert(bad8.fromJson[UUID])(isLeft(containsString("(expected a UUID)"))) &&
+ assert(bad9.fromJson[UUID])(isLeft(containsString("(expected a UUID)")))
},
test("java.util.Currency") {
assert(""""USD"""".fromJson[java.util.Currency])(isRight(equalTo(java.util.Currency.getInstance("USD")))) &&
@@ -277,7 +606,7 @@ object DecoderSpec extends ZIOSpecDefault {
assert(ok1.fromJson[Duration])(isRight(equalTo(Duration.parse("PT1H2M3S")))) &&
assert(ok2.fromJson[Duration])(isRight(equalTo(Duration.ofNanos(-500000000)))) &&
assert(bad1.fromJson[Duration])(
- isLeft(containsString("PT-H is not a valid ISO-8601 format, expected digit at index 3"))
+ isLeft(containsString("expected a Duration"))
)
},
test("java.time.ZonedDateTime") {
@@ -292,33 +621,297 @@ object DecoderSpec extends ZIOSpecDefault {
assert(ok2.fromJson[ZonedDateTime].map(_.toOffsetDateTime))(
isRight(equalTo(OffsetDateTime.parse("2018-10-28T03:30+01:00")))
) &&
- assert(bad1.fromJson[ZonedDateTime])(
- isLeft(
- equalTo(
- "(2018-10-28T02:30 is not a valid ISO-8601 format, expected ':' or '+' or '-' or 'Z' at index 16)"
- )
- )
+ assert(bad1.fromJson[ZonedDateTime])(isLeft(equalTo("(expected a ZonedDateTime)")))
+ },
+ test("bothWith") {
+ final case class Foo(a: Int)
+ final case class Bar(b: String)
+
+ val fooDecoder: JsonDecoder[Foo] = DeriveJsonDecoder.gen
+ val barDecoder: JsonDecoder[Bar] = DeriveJsonDecoder.gen
+ implicit val fooAndBarDecoder: JsonDecoder[(Foo, Bar)] = fooDecoder.both(barDecoder)
+
+ val json = """{"a": 1, "b": "foo"}"""
+ assertTrue(
+ json.fromJson[(Foo, Bar)] == Right((Foo(1), Bar("foo")))
+ )
+ },
+ test("bothWith - ast") {
+ final case class Foo(a: Int)
+ final case class Bar(b: String)
+
+ val fooDecoder: JsonDecoder[Foo] = DeriveJsonDecoder.gen
+ val barDecoder: JsonDecoder[Bar] = DeriveJsonDecoder.gen
+ implicit val fooAndBarDecoder: JsonDecoder[(Foo, Bar)] = fooDecoder.both(barDecoder)
+
+ val json = Json.Obj("a" -> Json.Num(1), "b" -> Json.Str("foo"))
+ assertTrue(
+ json.as[(Foo, Bar)] == Right((Foo(1), Bar("foo")))
)
+ },
+ test("option custom codec") {
+ val json = """{"keyStatus": "certified"}"""
+ final case class Foo(v: String)
+ final case class RudderSettings(keyStatus: String, policyMode: Option[Foo])
+ implicit val encoderOptionPolicyMode: JsonEncoder[Option[Foo]] = JsonEncoder.string.contramap {
+ case None => "default"
+ case Some(f) => f.v
+ }
+ implicit val decoderOptionPolicyMode: JsonDecoder[Option[Foo]] = JsonDecoder[Option[String]].mapOrFail {
+ case None | Some("default") => Right(None)
+ case Some(s) => Right(Some(Foo(s)))
+ }
+ implicit lazy val codecRudderSettings: JsonCodec[RudderSettings] = DeriveJsonCodec.gen
+ assertTrue(json.fromJson[RudderSettings] == Right(RudderSettings("certified", None)))
}
),
suite("fromJsonAST")(
+ test("boolean") {
+ assert(Json.Bool(true).as[Boolean])(isRight(equalTo(true))) &&
+ assert(Json.Str("true").as[Boolean])(isLeft(equalTo("(expected boolean)")))
+ },
+ test("string") {
+ assert(Json.Str("xxx").as[String])(isRight(equalTo("xxx"))) &&
+ assert(Json.Bool(true).as[String])(isLeft(equalTo("(expected string)")))
+ },
+ test("char") {
+ assert(Json.Str("x").as[Char])(isRight(equalTo('x'))) &&
+ assert(Json.Str("xxx").as[Char])(isLeft(equalTo("(expected single character string)"))) &&
+ assert(Json.Bool(true).as[Char])(isLeft(equalTo("(expected single character string)")))
+ },
+ test("byte") {
+ assert(Json.Num(Byte.MinValue).as[Byte])(isRight(equalTo(Byte.MinValue))) &&
+ assert(Json.Num(Byte.MaxValue).as[Byte])(isRight(equalTo(Byte.MaxValue))) &&
+ assert(Json.Str(Byte.MinValue.toString).as[Byte])(isRight(equalTo(Byte.MinValue))) &&
+ assert(Json.Str(Byte.MaxValue.toString).as[Byte])(isRight(equalTo(Byte.MaxValue))) &&
+ assertTrue(Json.Num(Byte.MinValue.toInt - 1).as[Byte].isLeft) &&
+ assertTrue(Json.Num(Byte.MaxValue.toInt + 1).as[Byte].isLeft) &&
+ assertTrue(Json.Str((Byte.MinValue.toInt - 1).toString).as[Byte].isLeft) &&
+ assertTrue(Json.Str((Byte.MaxValue.toInt + 1).toString).as[Byte].isLeft) &&
+ assertTrue(Json.Str("\"-123\"").as[Byte].isLeft) &&
+ assertTrue(Json.Str("\"123\"").as[Byte].isLeft) &&
+ assertTrue(Json.Str("123abc").as[Byte].isLeft) &&
+ assertTrue(Json.Str("+123").as[Byte].isLeft) &&
+ assertTrue(Json.Str("Infinity").as[Byte].isLeft) &&
+ assertTrue(Json.Str("+Infinity").as[Byte].isLeft) &&
+ assertTrue(Json.Str("-Infinity").as[Byte].isLeft) &&
+ assertTrue(Json.Str("NaN").as[Byte].isLeft)
+ },
+ test("short") {
+ assert(Json.Num(Short.MinValue).as[Short])(isRight(equalTo(Short.MinValue))) &&
+ assert(Json.Num(Short.MaxValue).as[Short])(isRight(equalTo(Short.MaxValue))) &&
+ assert(Json.Str(Short.MinValue.toString).as[Short])(isRight(equalTo(Short.MinValue))) &&
+ assert(Json.Str(Short.MaxValue.toString).as[Short])(isRight(equalTo(Short.MaxValue))) &&
+ assertTrue(Json.Num(Short.MinValue.toInt - 1).as[Short].isLeft) &&
+ assertTrue(Json.Num(Short.MaxValue.toInt + 1).as[Short].isLeft) &&
+ assertTrue(Json.Str((Short.MinValue.toInt - 1).toString).as[Short].isLeft) &&
+ assertTrue(Json.Str((Short.MaxValue.toInt + 1).toString).as[Short].isLeft) &&
+ assertTrue(Json.Str("\"-12345\"").as[Short].isLeft) &&
+ assertTrue(Json.Str("\"12345\"").as[Short].isLeft) &&
+ assertTrue(Json.Str("12345abc").as[Short].isLeft) &&
+ assertTrue(Json.Str("+12345").as[Short].isLeft) &&
+ assertTrue(Json.Str("Infinity").as[Short].isLeft) &&
+ assertTrue(Json.Str("+Infinity").as[Short].isLeft) &&
+ assertTrue(Json.Str("-Infinity").as[Short].isLeft) &&
+ assertTrue(Json.Str("NaN").as[Short].isLeft)
+ },
+ test("int") {
+ assert(Json.Num(Int.MinValue).as[Int])(isRight(equalTo(Int.MinValue))) &&
+ assert(Json.Num(Int.MaxValue).as[Int])(isRight(equalTo(Int.MaxValue))) &&
+ assert(Json.Str(Int.MinValue.toString).as[Int])(isRight(equalTo(Int.MinValue))) &&
+ assert(Json.Str(Int.MaxValue.toString).as[Int])(isRight(equalTo(Int.MaxValue))) &&
+ assertTrue(Json.Num(Int.MinValue.toLong - 1).as[Int].isLeft) &&
+ assertTrue(Json.Num(Int.MaxValue.toLong + 1).as[Int].isLeft) &&
+ assertTrue(Json.Str((Int.MinValue.toLong - 1).toString).as[Int].isLeft) &&
+ assertTrue(Json.Str((Int.MaxValue.toLong + 1).toString).as[Int].isLeft) &&
+ assertTrue(Json.Str("\"-1234567890\"").as[Int].isLeft) &&
+ assertTrue(Json.Str("\"1234567890\"").as[Int].isLeft) &&
+ assertTrue(Json.Str("1234567890abc").as[Int].isLeft) &&
+ assertTrue(Json.Str("+1234567890").as[Int].isLeft) &&
+ assertTrue(Json.Str("Infinity").as[Int].isLeft) &&
+ assertTrue(Json.Str("+Infinity").as[Int].isLeft) &&
+ assertTrue(Json.Str("-Infinity").as[Int].isLeft) &&
+ assertTrue(Json.Str("NaN").as[Int].isLeft)
+ },
+ test("long") {
+ assert(Json.Num(Long.MinValue).as[Long])(isRight(equalTo(Long.MinValue))) &&
+ assert(Json.Num(Long.MaxValue).as[Long])(isRight(equalTo(Long.MaxValue))) &&
+ assert(Json.Str(Long.MinValue.toString).as[Long])(isRight(equalTo(Long.MinValue))) &&
+ assert(Json.Str(Long.MaxValue.toString).as[Long])(isRight(equalTo(Long.MaxValue))) &&
+ assertTrue(Json.Num(BigDecimal(Long.MinValue) - 1).as[Long].isLeft) &&
+ assertTrue(Json.Num(BigDecimal(Long.MaxValue) + 1).as[Long].isLeft) &&
+ assertTrue(Json.Str((BigDecimal(Long.MinValue) - 1).toString).as[Long].isLeft) &&
+ assertTrue(Json.Str((BigDecimal(Long.MaxValue) + 1).toString).as[Long].isLeft) &&
+ assertTrue(Json.Str("\"-123456789012345678\"").as[Long].isLeft) &&
+ assertTrue(Json.Str("\"123456789012345678\"").as[Long].isLeft) &&
+ assertTrue(Json.Str("123456789012345678abc").as[Long].isLeft) &&
+ assertTrue(Json.Str("+123456789012345678").as[Long].isLeft) &&
+ assertTrue(Json.Str("Infinity").as[Long].isLeft) &&
+ assertTrue(Json.Str("+Infinity").as[Long].isLeft) &&
+ assertTrue(Json.Str("-Infinity").as[Long].isLeft) &&
+ assertTrue(Json.Str("NaN").as[Long].isLeft)
+ },
+ test("float") {
+ assert(Json.Num(Float.MinValue).as[Float])(isRight(equalTo(Float.MinValue))) &&
+ assert(Json.Num(Float.MaxValue).as[Float])(isRight(equalTo(Float.MaxValue))) &&
+ assert(Json.Str(Float.MinValue.toString).as[Float])(isRight(equalTo(Float.MinValue))) &&
+ assert(Json.Str(Float.MaxValue.toString).as[Float])(isRight(equalTo(Float.MaxValue))) &&
+ assert(Json.Str("Infinity").as[Float])(isRight(equalTo(Float.PositiveInfinity))) &&
+ assert(Json.Str("+Infinity").as[Float])(isRight(equalTo(Float.PositiveInfinity))) &&
+ assert(Json.Str("-Infinity").as[Float])(isRight(equalTo(Float.NegativeInfinity))) &&
+ assertTrue(Json.Str("NaN").as[Float].isRight) &&
+ assertTrue(Json.Str("\"-1.234567e9\"").as[Float].isLeft) &&
+ assertTrue(Json.Str("\"1.234567e9\"").as[Float].isLeft) &&
+ assertTrue(Json.Str("1.234567e9abc").as[Float].isLeft) &&
+ assertTrue(Json.Str("+1.234567e9").as[Float].isLeft)
+ },
+ test("double") {
+ assert(Json.Num(Double.MinValue).as[Double])(isRight(equalTo(Double.MinValue))) &&
+ assert(Json.Num(Double.MaxValue).as[Double])(isRight(equalTo(Double.MaxValue))) &&
+ assert(Json.Str(Double.MinValue.toString).as[Double])(isRight(equalTo(Double.MinValue))) &&
+ assert(Json.Str(Double.MaxValue.toString).as[Double])(isRight(equalTo(Double.MaxValue))) &&
+ assert(Json.Str("Infinity").as[Double])(isRight(equalTo(Double.PositiveInfinity))) &&
+ assert(Json.Str("+Infinity").as[Double])(isRight(equalTo(Double.PositiveInfinity))) &&
+ assert(Json.Str("-Infinity").as[Double])(isRight(equalTo(Double.NegativeInfinity))) &&
+ assertTrue(Json.Str("NaN").as[Double].isRight) &&
+ assertTrue(Json.Str("\"-1.23456789012345e9\"").as[Double].isLeft) &&
+ assertTrue(Json.Str("\"1.23456789012345e9\"").as[Double].isLeft) &&
+ assertTrue(Json.Str("1.23456789012345e9abc").as[Double].isLeft) &&
+ assertTrue(Json.Str("+1.23456789012345e9").as[Double].isLeft)
+ },
test("BigDecimal") {
- assert(Json.Num(123).as[BigDecimal])(isRight(equalTo(BigDecimal(123))))
+ assert(Json.Num(BigDecimal("-123.0e123")).as[BigDecimal])(isRight(equalTo(BigDecimal("-123.0e123")))) &&
+ assert(Json.Num(BigDecimal("123.0e123")).as[BigDecimal])(isRight(equalTo(BigDecimal("123.0e123")))) &&
+ assert(Json.Str("-123.0e123").as[BigDecimal])(isRight(equalTo(BigDecimal("-123.0e123")))) &&
+ assert(Json.Str("123.0e123").as[BigDecimal])(isRight(equalTo(BigDecimal("123.0e123")))) &&
+ assertTrue(Json.Str("123.0abc").as[BigDecimal].isLeft) &&
+ assertTrue(Json.Str("Infinity").as[BigDecimal].isLeft) &&
+ assertTrue(Json.Str("+Infinity").as[BigDecimal].isLeft) &&
+ assertTrue(Json.Str("-Infinity").as[BigDecimal].isLeft) &&
+ assertTrue(Json.Str("NaN").as[BigDecimal].isLeft) &&
+ assert(
+ Json
+ .Str(
+ "170141183460469231731687303715884105728489465165484668486513574864654818964653168465316546851"
+ )
+ .as[BigDecimal]
+ )(isLeft(equalTo("(expected a BigDecimal with 256-bit mantissa)"))) &&
+ assert(Json.Str("1.23456789012345678901e-2147483648").as[BigDecimal])(
+ isLeft(equalTo("(expected a BigDecimal with 256-bit mantissa)"))
+ ) &&
+ assert(Json.Str("12345678901234567890.1e+2147483647").as[BigDecimal])(
+ isLeft(equalTo("(expected a BigDecimal with 256-bit mantissa)"))
+ ) &&
+ assert(Json.Str("123456789012345678901e+2147483647").as[BigDecimal])(
+ isLeft(equalTo("(expected a BigDecimal with 256-bit mantissa)"))
+ )
+ },
+ test("java.math.BigDecimal") {
+ assert(Json.Num(BigDecimal("-123.0e123")).as[java.math.BigDecimal])(
+ isRight(equalTo(new java.math.BigDecimal("-123.0e123")))
+ ) &&
+ assert(Json.Num(BigDecimal("123.0e123")).as[java.math.BigDecimal])(
+ isRight(equalTo(new java.math.BigDecimal("123.0e123")))
+ ) &&
+ assert(Json.Str("-123.0e123").as[java.math.BigDecimal])(
+ isRight(equalTo(new java.math.BigDecimal("-123.0e123")))
+ ) &&
+ assert(Json.Str("123.0e123").as[java.math.BigDecimal])(
+ isRight(equalTo(new java.math.BigDecimal("123.0e123")))
+ ) &&
+ assertTrue(Json.Str("123.0abc").as[java.math.BigDecimal].isLeft) &&
+ assertTrue(Json.Str("Infinity").as[java.math.BigDecimal].isLeft) &&
+ assertTrue(Json.Str("+Infinity").as[java.math.BigDecimal].isLeft) &&
+ assertTrue(Json.Str("-Infinity").as[java.math.BigDecimal].isLeft) &&
+ assertTrue(Json.Str("NaN").as[java.math.BigDecimal].isLeft) &&
+ assert(
+ Json
+ .Str(
+ "170141183460469231731687303715884105728489465165484668486513574864654818964653168465316546851"
+ )
+ .as[java.math.BigDecimal]
+ )(isLeft(equalTo("(expected a BigDecimal with 256-bit mantissa)"))) &&
+ assert(Json.Str("1.23456789012345678901e-2147483648").as[java.math.BigDecimal])(
+ isLeft(equalTo("(expected a BigDecimal with 256-bit mantissa)"))
+ ) &&
+ assert(Json.Str("12345678901234567890.1e+2147483647").as[java.math.BigDecimal])(
+ isLeft(equalTo("(expected a BigDecimal with 256-bit mantissa)"))
+ ) &&
+ assert(Json.Str("123456789012345678901e+2147483647").as[java.math.BigDecimal])(
+ isLeft(equalTo("(expected a BigDecimal with 256-bit mantissa)"))
+ )
+ },
+ test("BigInteger") {
+ assert(Json.Num(BigInt("170141183460469231731687303715884105728")).as[BigInteger])(
+ isRight(equalTo(new BigInteger("170141183460469231731687303715884105728")))
+ ) &&
+ assert(Json.Num(BigInt("-170141183460469231731687303715884105728")).as[BigInteger])(
+ isRight(equalTo(new BigInteger("-170141183460469231731687303715884105728")))
+ ) &&
+ assert(Json.Str("170141183460469231731687303715884105728").as[BigInteger])(
+ isRight(equalTo(new BigInteger("170141183460469231731687303715884105728")))
+ ) &&
+ assert(Json.Str("-170141183460469231731687303715884105728").as[BigInteger])(
+ isRight(equalTo(new BigInteger("-170141183460469231731687303715884105728")))
+ ) &&
+ assertTrue(Json.Str("123abc").as[BigInteger].isLeft) &&
+ assertTrue(Json.Str("Infinity").as[BigInteger].isLeft) &&
+ assertTrue(Json.Str("+Infinity").as[BigInteger].isLeft) &&
+ assertTrue(Json.Str("-Infinity").as[BigInteger].isLeft) &&
+ assertTrue(Json.Str("NaN").as[BigInteger].isLeft) &&
+ assert(
+ Json
+ .Str(
+ "170141183460469231731687303715884105728489465165484668486513574864654818964653168465316546851316546851"
+ )
+ .as[BigInteger]
+ )(isLeft(equalTo("(expected a 256-bit BigInteger)"))) &&
+ assert(
+ Json
+ .Str("17014118346046923173168730371588410572848946516548466848651357486465481896465316846")
+ .as[BigInteger]
+ )(isLeft(equalTo("(expected a 256-bit BigInteger)")))
+ },
+ test("BigInt") {
+ assert(Json.Num(BigInt("170141183460469231731687303715884105728")).as[BigInt])(
+ isRight(equalTo(BigInt("170141183460469231731687303715884105728")))
+ ) &&
+ assert(Json.Num(BigInt("-170141183460469231731687303715884105728")).as[BigInt])(
+ isRight(equalTo(BigInt("-170141183460469231731687303715884105728")))
+ ) &&
+ assert(Json.Str("170141183460469231731687303715884105728").as[BigInt])(
+ isRight(equalTo(BigInt("170141183460469231731687303715884105728")))
+ ) &&
+ assert(Json.Str("-170141183460469231731687303715884105728").as[BigInt])(
+ isRight(equalTo(BigInt("-170141183460469231731687303715884105728")))
+ ) &&
+ assertTrue(Json.Str("123abc").as[BigInt].isLeft) &&
+ assertTrue(Json.Str("Infinity").as[BigInt].isLeft) &&
+ assertTrue(Json.Str("+Infinity").as[BigInt].isLeft) &&
+ assertTrue(Json.Str("-Infinity").as[BigInt].isLeft) &&
+ assertTrue(Json.Str("NaN").as[BigInt].isLeft) &&
+ assert(
+ Json
+ .Str(
+ "170141183460469231731687303715884105728489465165484668486513574864654818964653168465316546851316546851"
+ )
+ .as[BigInt]
+ )(isLeft(equalTo("(expected a 256-bit BigInt)"))) &&
+ assert(
+ Json.Str("17014118346046923173168730371588410572848946516548466848651357486465481896465316846").as[BigInt]
+ )(isLeft(equalTo("(expected a 256-bit BigInt)")))
},
test("eithers") {
val bernies =
List(Json.Obj("a" -> Json.Num(1)), Json.Obj("left" -> Json.Num(1)), Json.Obj("Left" -> Json.Num(1)))
val trumps =
List(Json.Obj("b" -> Json.Num(2)), Json.Obj("right" -> Json.Num(2)), Json.Obj("Right" -> Json.Num(2)))
-
- assert(bernies.map(_.as[Either[Int, Int]]))(
- forall(isRight(isLeft(equalTo(1))))
- ) && assert(trumps.map(_.as[Either[Int, Int]]))(
- forall(isRight(isRight(equalTo(2))))
- )
+ assert(bernies.map(_.as[Either[Int, Int]]))(forall(isRight(isLeft(equalTo(1))))) &&
+ assert(trumps.map(_.as[Either[Int, Int]]))(forall(isRight(isRight(equalTo(2)))))
},
test("parameterless products") {
import exampleproducts._
+
assert(Json.Obj().as[Parameterless])(isRight(equalTo(Parameterless()))) &&
assert(Json.Null.as[Parameterless])(isRight(equalTo(Parameterless()))) &&
assert(Json.Obj("field" -> Json.Str("value")).as[Parameterless])(isRight(equalTo(Parameterless())))
@@ -335,7 +928,23 @@ object DecoderSpec extends ZIOSpecDefault {
import exampleproducts._
assert(Json.Obj("is" -> Json.Arr(Json.Obj("str" -> Json.Num(1)))).as[Outer])(
- isLeft(equalTo(".is[0].str(Not a string value)"))
+ isLeft(equalTo(".is[0].str(expected string)"))
+ )
+ },
+ test("errors are consistent with direct decoding") {
+ assert("""{}""".fromJson[Message])(isLeft(equalTo(".v1(missing)"))) &&
+ assert("""{}""".fromJson[Json].flatMap(_.as[Message]))(isLeft(equalTo(".v1(missing)"))) &&
+ assert("""{"v1":"","v2":""}""".fromJson[Message])(
+ isLeft(equalTo(".v1(expected a BigDecimal with 256-bit mantissa)"))
+ ) &&
+ assert("""{"v1":"","v2":""}""".fromJson[Json].flatMap(_.as[Message]))(
+ isLeft(equalTo(".v1(expected a BigDecimal with 256-bit mantissa)"))
+ ) &&
+ assert("""{"v1":1,"v2":1}""".fromJson[Message])(
+ isLeft(equalTo(".v2(expected string)"))
+ ) &&
+ assert("""{"v1":1,"v2":1}""".fromJson[Json].flatMap(_.as[Message]))(
+ isLeft(equalTo(".v2(expected string)"))
)
},
test("default field value") {
@@ -344,6 +953,32 @@ object DecoderSpec extends ZIOSpecDefault {
assert(Json.Obj().as[DefaultString])(isRight(equalTo(DefaultString("")))) &&
assert(Json.Obj("s" -> Json.Null).as[DefaultString])(isRight(equalTo(DefaultString(""))))
},
+ test("product with more than 64 fields") {
+ import example100fields._
+
+ assert("""{"f01":1,"F60":60,"f70":70,"f109":100,"f129":129}""".fromJson[A])(
+ isRight(equalTo(A(f01 = Some(1), f60 = Some(60), f70 = Some(70), f100 = Some(100))))
+ )
+ },
+ test("dynamic default value") {
+ case class DefaultDynamic(
+ randomNumber: Double = scala.math.random(),
+ instant: java.time.Instant = java.time.Instant.now()
+ )
+
+ object DefaultDynamic {
+ implicit lazy val decoder: JsonDecoder[DefaultDynamic] = DeriveJsonDecoder.gen[DefaultDynamic]
+ }
+
+ for {
+ dynamics1 <- ZIO.fromEither(Json.Obj().as[DefaultDynamic])
+ _ <- ZIO.sleep(2.millis) // ensure java.time.Instant is different
+ dynamics2 <- ZIO.fromEither(Json.Obj().as[DefaultDynamic])
+ } yield assertTrue(
+ dynamics1.randomNumber != dynamics2.randomNumber,
+ dynamics1.instant != dynamics2.instant
+ )
+ } @@ TestAspect.withLiveClock,
test("aliases") {
import exampleproducts._
@@ -363,15 +998,15 @@ object DecoderSpec extends ZIOSpecDefault {
assert(Json.Obj("Child1" -> Json.Obj()).as[Parent])(isRight(equalTo(Child1()))) &&
assert(Json.Obj("Child2" -> Json.Obj()).as[Parent])(isRight(equalTo(Child2()))) &&
- assert(Json.Obj("type" -> Json.Str("Child1")).as[Parent])(isLeft(equalTo("(Invalid disambiguator)")))
+ assert(Json.Obj("type" -> Json.Str("Child1")).as[Parent])(isLeft(equalTo("(invalid disambiguator)")))
},
test("sum alternative encoding") {
import examplealtsum._
assert(Json.Obj("hint" -> Json.Str("Cain")).as[Parent])(isRight(equalTo(Child1()))) &&
assert(Json.Obj("hint" -> Json.Str("Abel")).as[Parent])(isRight(equalTo(Child2()))) &&
- assert(Json.Obj("hint" -> Json.Str("Samson")).as[Parent])(isLeft(equalTo("(Invalid disambiguator)"))) &&
- assert(Json.Obj("Cain" -> Json.Obj()).as[Parent])(isLeft(equalTo("(Missing hint 'hint')")))
+ assert(Json.Obj("hint" -> Json.Str("Samson")).as[Parent])(isLeft(equalTo("(invalid disambiguator)"))) &&
+ assert(Json.Obj("Cain" -> Json.Obj()).as[Parent])(isLeft(equalTo("(missing hint 'hint')")))
},
test("Seq") {
val json = Json.Arr(Json.Str("5XL"), Json.Str("2XL"), Json.Str("XL"))
@@ -439,6 +1074,12 @@ object DecoderSpec extends ZIOSpecDefault {
assert(json.as[SortedMap[String, Int]])(isRight(equalTo(expected)))
},
+ test("ListMap") {
+ val json = Json.Obj("5XL" -> Json.Num(3), "2XL" -> Json.Num(14), "XL" -> Json.Num(159))
+ val expected = immutable.ListMap("5XL" -> 3, "2XL" -> 14, "XL" -> 159)
+
+ assert(json.as[immutable.ListMap[String, Int]])(isRight(equalTo(expected)))
+ },
test("Map, custom keys") {
val json = Json.Obj("1" -> Json.Str("a"), "2" -> Json.Str("b"))
val expected = Map(1 -> "a", 2 -> "b")
@@ -468,17 +1109,21 @@ object DecoderSpec extends ZIOSpecDefault {
val bad5 = Json.Str("64d7c38d-2afd-XXXX-9832-4e70afe4b0f8")
val bad6 = Json.Str("64d7c38d-2afd-X-9832-4e70afe4b0f8")
val bad7 = Json.Str("0-0-0-0-00000000000000000")
+ val bad8 = Json.Str("64d7c38d-2аfd-4514-9832-4e70afe4b0f8")
+ val bad9 = Json.Str("0000000064D7C38D-FD-14-32-70АFE4B0f8")
assert(ok1.as[UUID])(isRight(equalTo(UUID.fromString("64d7c38d-2afd-4514-9832-4e70afe4b0f8")))) &&
assert(ok2.as[UUID])(isRight(equalTo(UUID.fromString("64D7C38D-00FD-0014-0032-0070AFE4B0f8")))) &&
assert(ok3.as[UUID])(isRight(equalTo(UUID.fromString("00000000-0000-0000-0000-000000000000")))) &&
- assert(bad1.as[UUID])(isLeft(containsString("Invalid UUID: "))) &&
- assert(bad2.as[UUID])(isLeft(containsString("Invalid UUID: UUID string too large"))) &&
- assert(bad3.as[UUID])(isLeft(containsString("Invalid UUID: 64d7c38d-2afd-4514-983-4e70afe4b0f80"))) &&
- assert(bad4.as[UUID])(isLeft(containsString("Invalid UUID: 64d7c38d-2afd--9832-4e70afe4b0f8"))) &&
- assert(bad5.as[UUID])(isLeft(containsString("Invalid UUID: 64d7c38d-2afd-XXXX-9832-4e70afe4b0f8"))) &&
- assert(bad6.as[UUID])(isLeft(containsString("Invalid UUID: 64d7c38d-2afd-X-9832-4e70afe4b0f8"))) &&
- assert(bad7.as[UUID])(isLeft(containsString("Invalid UUID: 0-0-0-0-00000000000000000")))
+ assert(bad1.as[UUID])(isLeft(containsString("(expected a UUID)"))) &&
+ assert(bad2.as[UUID])(isLeft(containsString("(expected a UUID)"))) &&
+ assert(bad3.as[UUID])(isLeft(containsString("(expected a UUID)"))) &&
+ assert(bad4.as[UUID])(isLeft(containsString("(expected a UUID)"))) &&
+ assert(bad5.as[UUID])(isLeft(containsString("(expected a UUID)"))) &&
+ assert(bad6.as[UUID])(isLeft(containsString("(expected a UUID)"))) &&
+ assert(bad7.as[UUID])(isLeft(containsString("(expected a UUID)"))) &&
+ assert(bad8.as[UUID])(isLeft(containsString("(expected a UUID)"))) &&
+ assert(bad9.as[UUID])(isLeft(containsString("(expected a UUID)")))
}
)
)
@@ -544,6 +1189,53 @@ object DecoderSpec extends ZIOSpecDefault {
}
+ object examplesumobjects1 {
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(enumValuesAsStrings = true)
+
+ sealed abstract class Parent
+
+ object Parent {
+ implicit val decoder: JsonDecoder[Parent] = DeriveJsonDecoder.gen[Parent]
+ }
+
+ case object Child1 extends Parent
+
+ case object Child2 extends Parent
+
+ }
+
+ object examplesumobjects2 {
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(enumValuesAsStrings = false)
+
+ sealed abstract class Parent
+
+ object Parent {
+ implicit val decoder: JsonDecoder[Parent] = DeriveJsonDecoder.gen[Parent]
+ }
+
+ case object Child1 extends Parent
+
+ case object Child2 extends Parent
+
+ }
+
+ object examplesumhintnames {
+
+ @jsonHintNames(CamelCase)
+ sealed abstract class Parent
+
+ object Parent {
+ implicit val decoder: JsonDecoder[Parent] = DeriveJsonDecoder.gen[Parent]
+ }
+
+ case class Child1() extends Parent
+
+ case class Child2() extends Parent
+
+ }
+
object examplealtsum {
@jsonDiscriminator("hint")
@@ -561,12 +1253,265 @@ object DecoderSpec extends ZIOSpecDefault {
}
+ object examplealtsumhintnames {
+
+ @jsonDiscriminator("hint")
+ @jsonHintNames(CamelCase)
+ sealed abstract class Parent
+
+ object Parent {
+ implicit val decoder: JsonDecoder[Parent] = DeriveJsonDecoder.gen[Parent]
+ }
+
+ case class Child1() extends Parent
+
+ @jsonHint("Abel")
+ case class Child2() extends Parent
+
+ }
+
object logEvent {
- case class Event(at: Long, message: String)
+ case class Event(at: Long, message: String, fatal: Boolean = false, priority: Double = 0.0)
implicit val eventDecoder: JsonDecoder[Event] = DeriveJsonDecoder.gen[Event]
implicit val eventEncoder: JsonEncoder[Event] = DeriveJsonEncoder.gen[Event]
}
+ object fieldDecoder {
+ case class PersonId(value: String)
+
+ object PersonId {
+ implicit val jsonFieldEncoder: JsonFieldEncoder[PersonId] = JsonFieldEncoder.string.contramap(_.value)
+ implicit val jsonFieldDecoder: JsonFieldDecoder[PersonId] = JsonFieldDecoder.string.map(PersonId.apply)
+ }
+
+ implicitly[JsonFieldEncoder[PersonId]]
+ implicitly[JsonFieldDecoder[PersonId]]
+ }
+
+ case class Message(v1: math.BigDecimal, v2: String)
+
+ object Message {
+ implicit val decoder: JsonDecoder[Message] = DeriveJsonDecoder.gen[Message]
+ }
+
+ object example100cases {
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(enumValuesAsStrings = true)
+
+ sealed trait A extends Product with Serializable
+
+ object A {
+ case object B1 extends A
+ case object B2 extends A
+ case object B3 extends A
+ case object B4 extends A
+ case object B5 extends A
+ case object B6 extends A
+ case object B7 extends A
+ case object B8 extends A
+ case object B9 extends A
+ case object B10 extends A
+ case object B11 extends A
+ case object B12 extends A
+ case object B13 extends A
+ case object B14 extends A
+ case object B15 extends A
+ case object B16 extends A
+ case object B17 extends A
+ case object B18 extends A
+ case object B19 extends A
+ case object B20 extends A
+ case object B21 extends A
+ case object B22 extends A
+ case object B23 extends A
+ case object B24 extends A
+ case object B25 extends A
+ case object B26 extends A
+ case object B27 extends A
+ case object B28 extends A
+ case object B29 extends A
+ case object B30 extends A
+ case object B31 extends A
+ case object B32 extends A
+ case object B33 extends A
+ case object B34 extends A
+ case object B35 extends A
+ case object B36 extends A
+ case object B37 extends A
+ case object B38 extends A
+ case object B39 extends A
+ case object B40 extends A
+ case object B41 extends A
+ case object B42 extends A
+ case object B43 extends A
+ case object B44 extends A
+ case object B45 extends A
+ case object B46 extends A
+ case object B47 extends A
+ case object B48 extends A
+ case object B49 extends A
+ case object B50 extends A
+ case object B51 extends A
+ case object B52 extends A
+ case object B53 extends A
+ case object B54 extends A
+ case object B55 extends A
+ case object B56 extends A
+ case object B57 extends A
+ case object B58 extends A
+ case object B59 extends A
+ case object B60 extends A
+ case object B61 extends A
+ case object B62 extends A
+ case object B63 extends A
+ case object B64 extends A
+ case object B65 extends A
+ case object B66 extends A
+ case object B67 extends A
+ case object B68 extends A
+ case object B69 extends A
+ case object B70 extends A
+ case object B71 extends A
+ case object B72 extends A
+ case object B73 extends A
+ case object B74 extends A
+ case object B75 extends A
+ case object B76 extends A
+ case object B77 extends A
+ case object B78 extends A
+ case object B79 extends A
+ case object B80 extends A
+ case object B81 extends A
+ case object B82 extends A
+ case object B83 extends A
+ case object B84 extends A
+ case object B85 extends A
+ case object B86 extends A
+ case object B87 extends A
+ case object B88 extends A
+ case object B89 extends A
+ case object B90 extends A
+ case object B91 extends A
+ case object B92 extends A
+ case object B93 extends A
+ case object B94 extends A
+ case object B95 extends A
+ case object B96 extends A
+ case object B97 extends A
+ case object B98 extends A
+ case object B99 extends A
+ case object B100 extends A
+
+ implicit val codec: JsonCodec[A] = DeriveJsonCodec.gen[A]
+ }
+ }
+
+ object example100fields {
+ case class A(
+ f01: Option[Int] = None,
+ f02: Option[Int] = None,
+ f03: Option[Int] = None,
+ f04: Option[Int] = None,
+ f05: Option[Int] = None,
+ f06: Option[Int] = None,
+ f07: Option[Int] = None,
+ f08: Option[Int] = None,
+ f09: Option[Int] = None,
+ f10: Option[Int] = None,
+ f11: Option[Int] = None,
+ f12: Option[Int] = None,
+ f13: Option[Int] = None,
+ f14: Option[Int] = None,
+ f15: Option[Int] = None,
+ f16: Option[Int] = None,
+ f17: Option[Int] = None,
+ f18: Option[Int] = None,
+ f19: Option[Int] = None,
+ f20: Option[Int] = None,
+ f21: Option[Int] = None,
+ f22: Option[Int] = None,
+ f23: Option[Int] = None,
+ f24: Option[Int] = None,
+ f25: Option[Int] = None,
+ f26: Option[Int] = None,
+ f27: Option[Int] = None,
+ f28: Option[Int] = None,
+ f29: Option[Int] = None,
+ f30: Option[Int] = None,
+ f31: Option[Int] = None,
+ f32: Option[Int] = None,
+ f33: Option[Int] = None,
+ f34: Option[Int] = None,
+ f35: Option[Int] = None,
+ f36: Option[Int] = None,
+ f37: Option[Int] = None,
+ f38: Option[Int] = None,
+ f39: Option[Int] = None,
+ f40: Option[Int] = None,
+ f41: Option[Int] = None,
+ f42: Option[Int] = None,
+ f43: Option[Int] = None,
+ f44: Option[Int] = None,
+ f45: Option[Int] = None,
+ f46: Option[Int] = None,
+ f47: Option[Int] = None,
+ f48: Option[Int] = None,
+ f49: Option[Int] = None,
+ f50: Option[Int] = None,
+ f51: Option[Int] = None,
+ f52: Option[Int] = None,
+ f53: Option[Int] = None,
+ f54: Option[Int] = None,
+ f55: Option[Int] = None,
+ f56: Option[Int] = None,
+ f57: Option[Int] = None,
+ f58: Option[Int] = None,
+ f59: Option[Int] = None,
+ @jsonAliases("f_60", "f-60", "F60", "_f60") f60: Option[Int] = None,
+ f61: Option[Int] = None,
+ f62: Option[Int] = None,
+ f63: Option[Int] = None,
+ f64: Option[Int] = None,
+ f65: Option[Int] = None,
+ f66: Option[Int] = None,
+ f67: Option[Int] = None,
+ f68: Option[Int] = None,
+ f69: Option[Int] = None,
+ f70: Option[Int] = None,
+ f71: Option[Int] = None,
+ f72: Option[Int] = None,
+ f73: Option[Int] = None,
+ f74: Option[Int] = None,
+ f75: Option[Int] = None,
+ f76: Option[Int] = None,
+ f77: Option[Int] = None,
+ f78: Option[Int] = None,
+ f79: Option[Int] = None,
+ f80: Option[Int] = None,
+ f81: Option[Int] = None,
+ f82: Option[Int] = None,
+ f83: Option[Int] = None,
+ f84: Option[Int] = None,
+ f85: Option[Int] = None,
+ f86: Option[Int] = None,
+ f87: Option[Int] = None,
+ f88: Option[Int] = None,
+ f89: Option[Int] = None,
+ f90: Option[Int] = None,
+ f91: Option[Int] = None,
+ f92: Option[Int] = None,
+ f93: Option[Int] = None,
+ f94: Option[Int] = None,
+ f95: Option[Int] = None,
+ f96: Option[Int] = None,
+ f97: Option[Int] = None,
+ f98: Option[Int] = None,
+ f99: Option[Int] = None,
+ @jsonAliases("f101", "f102", "f103", "f104", "f105", "f106", "f107", "f108", "f109") f100: Option[Int] = None
+ )
+
+ implicit val codec: JsonCodec[A] = DeriveJsonCodec.gen[A]
+ }
}
diff --git a/zio-json/shared/src/test/scala/zio/json/EncoderSpec.scala b/zio-json/shared/src/test/scala/zio/json/EncoderSpec.scala
index 92c8fab59..39985aa7a 100644
--- a/zio-json/shared/src/test/scala/zio/json/EncoderSpec.scala
+++ b/zio-json/shared/src/test/scala/zio/json/EncoderSpec.scala
@@ -1,6 +1,5 @@
-package testzio.json
+package zio.json
-import zio.json._
import zio.json.ast.Json
import zio.test.Assertion._
import zio.test.TestAspect.jvmOnly
@@ -10,19 +9,19 @@ import zio.{ Chunk, NonEmptyChunk }
import java.util.UUID
import scala.collection.{ immutable, mutable }
-// zioJsonJVM/testOnly testzio.json.EncoderSpec
+// zioJsonJVM/testOnly zio.json.EncoderSpec
object EncoderSpec extends ZIOSpecDefault {
val spec: Spec[Environment, Any] =
suite("Encoder")(
suite("toJson")(
+ test("strings") {
+ assert("hello world".toJson)(equalTo("\"hello world\"")) &&
+ assert("hello\nworld".toJson)(equalTo("\"hello\\nworld\"")) &&
+ assert("hello\rworld".toJson)(equalTo("\"hello\\rworld\"")) &&
+ assert("hello\u0000world".toJson)(equalTo("\"hello\\u0000world\""))
+ },
suite("primitives")(
- test("strings") {
- assert("hello world".toJson)(equalTo("\"hello world\"")) &&
- assert("hello\nworld".toJson)(equalTo("\"hello\\nworld\"")) &&
- assert("hello\rworld".toJson)(equalTo("\"hello\\rworld\"")) &&
- assert("hello\u0000world".toJson)(equalTo("\"hello\\u0000world\""))
- },
test("boolean") {
assert(true.toJson)(equalTo("true")) &&
assert(false.toJson)(equalTo("false"))
@@ -31,6 +30,63 @@ object EncoderSpec extends ZIOSpecDefault {
assert('c'.toJson)(equalTo("\"c\"")) &&
assert(Symbol("c").toJson)(equalTo("\"c\""))
},
+ test("byte") {
+ assert((0: Short).toJson)(equalTo("0")) &&
+ assert((1: Short).toJson)(equalTo("1")) &&
+ assert((12: Short).toJson)(equalTo("12")) &&
+ assert((123: Short).toJson)(equalTo("123")) &&
+ assert((127: Short).toJson)(equalTo("127")) &&
+ assert((-128: Short).toJson)(equalTo("-128"))
+ },
+ test("short") {
+ assert((0: Short).toJson)(equalTo("0")) &&
+ assert((1: Short).toJson)(equalTo("1")) &&
+ assert((12: Short).toJson)(equalTo("12")) &&
+ assert((123: Short).toJson)(equalTo("123")) &&
+ assert((1234: Short).toJson)(equalTo("1234")) &&
+ assert((12345: Short).toJson)(equalTo("12345")) &&
+ assert((32767: Short).toJson)(equalTo("32767")) &&
+ assert((-32768: Short).toJson)(equalTo("-32768"))
+ },
+ test("int") {
+ assert(0.toJson)(equalTo("0")) &&
+ assert(1.toJson)(equalTo("1")) &&
+ assert(12.toJson)(equalTo("12")) &&
+ assert(123.toJson)(equalTo("123")) &&
+ assert(1234.toJson)(equalTo("1234")) &&
+ assert(12345.toJson)(equalTo("12345")) &&
+ assert(123456.toJson)(equalTo("123456")) &&
+ assert(1234567.toJson)(equalTo("1234567")) &&
+ assert(12345678.toJson)(equalTo("12345678")) &&
+ assert(123456789.toJson)(equalTo("123456789")) &&
+ assert(1234567890.toJson)(equalTo("1234567890")) &&
+ assert(2147483647.toJson)(equalTo("2147483647")) &&
+ assert(-2147483648.toJson)(equalTo("-2147483648"))
+ },
+ test("long") {
+ assert(0L.toJson)(equalTo("0")) &&
+ assert(1L.toJson)(equalTo("1")) &&
+ assert(12L.toJson)(equalTo("12")) &&
+ assert(123L.toJson)(equalTo("123")) &&
+ assert(1234L.toJson)(equalTo("1234")) &&
+ assert(12345L.toJson)(equalTo("12345")) &&
+ assert(123456L.toJson)(equalTo("123456")) &&
+ assert(1234567L.toJson)(equalTo("1234567")) &&
+ assert(12345678L.toJson)(equalTo("12345678")) &&
+ assert(123456789L.toJson)(equalTo("123456789")) &&
+ assert(1234567890L.toJson)(equalTo("1234567890")) &&
+ assert(12345678901L.toJson)(equalTo("12345678901")) &&
+ assert(123456789012L.toJson)(equalTo("123456789012")) &&
+ assert(1234567890123L.toJson)(equalTo("1234567890123")) &&
+ assert(12345678901234L.toJson)(equalTo("12345678901234")) &&
+ assert(123456789012345L.toJson)(equalTo("123456789012345")) &&
+ assert(1234567890123456L.toJson)(equalTo("1234567890123456")) &&
+ assert(12345678901234567L.toJson)(equalTo("12345678901234567")) &&
+ assert(123456789012345678L.toJson)(equalTo("123456789012345678")) &&
+ assert(1234567890123456789L.toJson)(equalTo("1234567890123456789")) &&
+ assert(9223372036854775807L.toJson)(equalTo("9223372036854775807")) &&
+ assert(-9223372036854775808L.toJson)(equalTo("-9223372036854775808"))
+ },
test("float") {
assert(Float.NaN.toJson)(equalTo("\"NaN\"")) &&
assert(Float.PositiveInfinity.toJson)(equalTo("\"Infinity\"")) &&
@@ -45,9 +101,9 @@ object EncoderSpec extends ZIOSpecDefault {
assert(9999999.0f.toJson)(equalTo("9999999.0")) &&
assert(0.001f.toJson)(equalTo("0.001")) &&
assert(9.999999e-4f.toJson)(equalTo("9.999999E-4")) &&
- // FIXME: sbt fmt cannot parse: assert((-3.4028235E38f).toJson)(equalTo("-3.4028235E38")) && // Float.MinValue
- assert(1.4e-45f.toJson)(equalTo("1.4E-45")) && // Float.MinPositiveValue
- // FIXME: sbt fmt cannot parse: assert(3.4028235E38f.toJson)(equalTo("3.4028235E38")) && // Float.MaxValue
+ assert(Float.MinValue.toJson)(equalTo("-3.4028235E38")) &&
+ assert(Float.MinPositiveValue.toJson)(equalTo("1.4E-45")) &&
+ assert(Float.MaxValue.toJson)(equalTo("3.4028235E38")) &&
assert(3.3554448e7f.toJson)(equalTo("3.355445E7")) &&
assert(8.999999e9f.toJson)(equalTo("9.0E9")) &&
assert(3.4366718e10f.toJson)(equalTo("3.436672E10")) &&
@@ -138,6 +194,7 @@ object EncoderSpec extends ZIOSpecDefault {
assert(7.1202363472230444e-307d.toJson)(equalTo("7.120236347223045E-307")) &&
assert(3.67301024534615e16d.toJson)(equalTo("3.67301024534615E16")) &&
assert(5.9604644775390625e-8d.toJson)(equalTo("5.960464477539063E-8")) &&
+ assert(5.829003601188985e15d.toJson)(equalTo("5.829003601188985E15")) &&
assert(1.0e-322d.toJson)(equalTo("9.9E-323")) && // 20 * 2 ^ -1074 == 9.88... * 10 ^ -323
assert(5.0e-324d.toJson)(equalTo("4.9E-324")) && // 1 * 2 ^ -1074 == 4.94... * 10 ^ -324
assert(1.0e23d.toJson)(
@@ -188,20 +245,30 @@ object EncoderSpec extends ZIOSpecDefault {
assert((-6939.0464d).toJson)(
equalTo("-6939.0464")
) // See the issue: https://github.com/zio/zio-json/pull/375
- },
- test("other numerics") {
- val exampleBigIntStr = "170141183460469231731687303715884105728"
- val exampleBigDecimalStr = "170141183460469231731687303715884105728.4433"
- assert((1: Byte).toJson)(equalTo("1")) &&
- assert((1: Short).toJson)(equalTo("1")) &&
- assert((1: Int).toJson)(equalTo("1")) &&
- assert(1L.toJson)(equalTo("1")) &&
- assert(new java.math.BigInteger("1").toJson)(equalTo("1")) &&
- assert(new java.math.BigInteger(exampleBigIntStr).toJson)(equalTo(exampleBigIntStr)) &&
- assert(BigInt(exampleBigIntStr).toJson)(equalTo(exampleBigIntStr)) &&
- assert(BigDecimal(exampleBigDecimalStr).toJson)(equalTo(exampleBigDecimalStr))
}
),
+ test("BigInt") {
+ assert(BigInt("-1").toJson)(equalTo("-1")) &&
+ assert(BigInt("-316873037158841").toJson)(equalTo("-316873037158841")) &&
+ assert(BigInt("1701411834604692317316873037158841").toJson)(equalTo("1701411834604692317316873037158841"))
+ },
+ test("BigDecimal") {
+ assert(BigDecimal("-1.0").toJson)(equalTo("-1.0")) &&
+ assert(BigDecimal("1.0E+5").toJson)(equalTo("1.0E+5")) &&
+ assert(BigDecimal("0.000100").toJson)(equalTo("0.000100")) &&
+ assert(BigDecimal("-0.000001").toJson)(equalTo("-0.000001")) &&
+ assert(BigDecimal("100000.00").toJson)(equalTo("100000.00")) &&
+ assert(BigDecimal("1E-2147483647").toJson)(equalTo("1E-2147483647")) &&
+ assert(BigDecimal("1E+2147483647").toJson)(equalTo("1E+2147483647")) &&
+ assert(BigDecimal("-234316873037.008841").toJson)(equalTo("-234316873037.008841")) &&
+ assert(BigDecimal("2.999999999999999999E-17").toJson)(equalTo("2.999999999999999999E-17")) &&
+ assert(BigDecimal("-7.812738666512280685E-15").toJson)(equalTo("-7.812738666512280685E-15")) &&
+ assert(BigDecimal("141183460469231731687303715.8841").toJson)(equalTo("141183460469231731687303715.8841")) &&
+ assert(BigDecimal("1.7014118346046923173168730E+119").toJson)(equalTo("1.7014118346046923173168730E+119")) &&
+ assert(
+ BigDecimal("-9.999999999999874791608720182523363282786709588281885514820801359042815031E-4571018").toJson
+ )(equalTo("-9.999999999999874791608720182523363282786709588281885514820801359042815031E-4571018"))
+ },
test("options") {
assert((None: Option[Int]).toJson)(equalTo("null")) &&
assert((Some(1): Option[Int]).toJson)(equalTo("1"))
@@ -256,11 +323,19 @@ object EncoderSpec extends ZIOSpecDefault {
assert(Map("hello" -> "world").toJsonPretty)(equalTo("{\n \"hello\" : \"world\"\n}")) &&
assert(Map("hello" -> Some("world"), "goodbye" -> None).toJsonPretty)(
equalTo("{\n \"hello\" : \"world\"\n}")
+ ) &&
+ assert(immutable.ListMap("hello" -> "world", "goodbye" -> "world").toJson)(
+ equalTo("""{"hello":"world","goodbye":"world"}""")
)
},
test("Map, custom keys") {
assert(Map(1 -> "a").toJson)(equalTo("""{"1":"a"}"""))
},
+ test("Map, UUID keys") {
+ assert(Map(UUID.fromString("e142f1aa-6e9e-4352-adfe-7e6eb9814ccd") -> "abcd").toJson)(
+ equalTo("""{"e142f1aa-6e9e-4352-adfe-7e6eb9814ccd":"abcd"}""")
+ )
+ },
test("java.util.UUID") {
assert(UUID.fromString("e142f1aa-6e9e-4352-adfe-7e6eb9814ccd").toJson)(
equalTo(""""e142f1aa-6e9e-4352-adfe-7e6eb9814ccd"""")
@@ -291,6 +366,7 @@ object EncoderSpec extends ZIOSpecDefault {
) &&
assert(CoupleOfThings(0, None, true).toJsonPretty)(equalTo("{\n \"j\" : 0,\n \"b\" : true\n}")) &&
assert(OptionalAndRequired(None, "foo").toJson)(equalTo("""{"s":"foo"}"""))
+ assert(OptionalExplicitNullAndRequired(None, "foo").toJson)(equalTo("""{"i":null,"s":"foo"}"""))
},
test("sum encoding") {
import examplesum._
@@ -300,6 +376,18 @@ object EncoderSpec extends ZIOSpecDefault {
assert((Child1(): Parent).toJsonPretty)(equalTo("{\n \"Child1\" : {}\n}")) &&
assert((Child2(): Parent).toJsonPretty)(equalTo("{\n \"Cain\" : {}\n}"))
},
+ test("sum encoding with enumValuesAsStrings = true") {
+ import examplesumobjects1._
+
+ assert((Child1: Parent).toJson)(equalTo(""""Child1"""")) &&
+ assert((Child2: Parent).toJson)(equalTo(""""Cain""""))
+ },
+ test("sum encoding with enumValuesAsStrings = false") {
+ import examplesumobjects2._
+
+ assert((Child1: Parent).toJson)(equalTo("""{"Child1":{}}""")) &&
+ assert((Child2: Parent).toJson)(equalTo("""{"Cain":{}}"""))
+ },
test("sum alternative encoding") {
import examplealtsum._
@@ -315,7 +403,7 @@ object EncoderSpec extends ZIOSpecDefault {
},
test("exclude fields") {
import exampleexcludefield._
- assert(Person("Peter", 20).toJson)(equalTo("""{"name":"Peter"}"""))
+ assert(Person(7, "Peter", 20).toJson)(equalTo("""{"name":"Peter"}"""))
},
test("aliases") {
import exampleproducts._
@@ -400,6 +488,12 @@ object EncoderSpec extends ZIOSpecDefault {
assert((Child1(): Parent).toJsonAST)(isRight(equalTo(Json.Obj(Chunk("Child1" -> Json.Obj()))))) &&
assert((Child2(): Parent).toJsonAST)(isRight(equalTo(Json.Obj(Chunk("Cain" -> Json.Obj())))))
},
+ test("sum encoding with hint names") {
+ import examplesumhintnames._
+
+ assert((Child1(): Parent).toJsonAST)(isRight(equalTo(Json.Obj(Chunk("child1" -> Json.Obj()))))) &&
+ assert((Child2(): Parent).toJsonAST)(isRight(equalTo(Json.Obj(Chunk("Cain" -> Json.Obj())))))
+ },
test("sum alternative encoding") {
import examplealtsum._
@@ -409,6 +503,15 @@ object EncoderSpec extends ZIOSpecDefault {
(isRight(equalTo(Json.Obj("s" -> Json.Str("hello"), "hint" -> Json.Str("Abel")))))
)
},
+ test("sum alternative encoding with hint names") {
+ import examplealtsumhintnames._
+
+ assert((Child1(): Parent).toJsonAST)(isRight(equalTo(Json.Obj("hint" -> Json.Str("child1"))))) &&
+ assert((Child2(None): Parent).toJsonAST)(isRight(equalTo(Json.Obj("hint" -> Json.Str("Abel"))))) &&
+ assert((Child2(Some("hello")): Parent).toJsonAST)(
+ (isRight(equalTo(Json.Obj("s" -> Json.Str("hello"), "hint" -> Json.Str("Abel")))))
+ )
+ },
test("alias") {
import exampleproducts._
@@ -453,6 +556,15 @@ object EncoderSpec extends ZIOSpecDefault {
DeriveJsonEncoder.gen[OptionalAndRequired]
}
+ @jsonExplicitNull
+ case class OptionalExplicitNullAndRequired(i: Option[Int], s: String)
+
+ object OptionalExplicitNullAndRequired {
+
+ implicit val encoder: JsonEncoder[OptionalExplicitNullAndRequired] =
+ DeriveJsonEncoder.gen[OptionalExplicitNullAndRequired]
+ }
+
case class Aliases(@jsonAliases("j", "k") i: Int, f: String)
object Aliases {
@@ -465,7 +577,7 @@ object EncoderSpec extends ZIOSpecDefault {
object exampleexcludefield {
- case class Person(name: String, @jsonExclude age: Int)
+ case class Person(@jsonExclude id: Long, name: String, @jsonExclude age: Int)
object Person {
implicit val encoder: JsonEncoder[Person] = DeriveJsonEncoder.gen[Person]
@@ -488,6 +600,56 @@ object EncoderSpec extends ZIOSpecDefault {
}
+ object examplesumobjects1 {
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(enumValuesAsStrings = true)
+
+ sealed abstract class Parent
+
+ object Parent {
+ implicit val encoder: JsonEncoder[Parent] = DeriveJsonEncoder.gen[Parent]
+ }
+
+ case object Child1 extends Parent
+
+ @jsonHint("Cain")
+ case object Child2 extends Parent
+
+ }
+
+ object examplesumobjects2 {
+ implicit val config: JsonCodecConfiguration =
+ JsonCodecConfiguration(enumValuesAsStrings = false)
+
+ sealed abstract class Parent
+
+ object Parent {
+ implicit val encoder: JsonEncoder[Parent] = DeriveJsonEncoder.gen[Parent]
+ }
+
+ case object Child1 extends Parent
+
+ @jsonHint("Cain")
+ case object Child2 extends Parent
+
+ }
+
+ object examplesumhintnames {
+
+ @jsonHintNames(CamelCase)
+ sealed abstract class Parent
+
+ object Parent {
+ implicit val encoder: JsonEncoder[Parent] = DeriveJsonEncoder.gen[Parent]
+ }
+
+ case class Child1() extends Parent
+
+ @jsonHint("Cain")
+ case class Child2() extends Parent
+
+ }
+
object examplealtsum {
@jsonDiscriminator("hint")
@@ -504,4 +666,21 @@ object EncoderSpec extends ZIOSpecDefault {
}
+ object examplealtsumhintnames {
+
+ @jsonDiscriminator("hint")
+ @jsonHintNames(CamelCase)
+ sealed abstract class Parent
+
+ object Parent {
+ implicit val encoder: JsonEncoder[Parent] = DeriveJsonEncoder.gen[Parent]
+ }
+
+ case class Child1() extends Parent
+
+ @jsonHint("Abel")
+ case class Child2(s: Option[String]) extends Parent
+
+ }
+
}
diff --git a/zio-json/shared/src/test/scala/zio/json/Gens.scala b/zio-json/shared/src/test/scala/zio/json/Gens.scala
index 354140c64..1e72df0a3 100644
--- a/zio-json/shared/src/test/scala/zio/json/Gens.scala
+++ b/zio-json/shared/src/test/scala/zio/json/Gens.scala
@@ -1,4 +1,4 @@
-package testzio.json
+package zio.json
import zio.test.Gen
@@ -9,15 +9,18 @@ import scala.util.Try
object Gens {
val genBigInteger =
Gen
- .bigInt((BigInt(2).pow(128) - 1) * -1, BigInt(2).pow(128) - 1)
+ .bigInt((BigInt(2).pow(256) - 1) * -1, BigInt(2).pow(256) - 1)
.map(_.bigInteger)
- .filter(_.bitLength < 128)
+ .filter(_.bitLength < 256)
val genBigDecimal =
- Gen
- .bigDecimal((BigDecimal(2).pow(128) - 1) * -1, BigDecimal(2).pow(128) - 1)
- .map(_.bigDecimal)
- .filter(_.toBigInteger.bitLength < 128)
+ for {
+ unscaled <- Gen
+ .bigInt((BigInt(2).pow(256) - 1) * -1, BigInt(2).pow(256) - 1)
+ .map(_.bigInteger)
+ .filter(_.bitLength < 256)
+ scale <- Gen.oneOf(Gen.int(-20, 20), Gen.int(-1000000000, 1000000000))
+ } yield new java.math.BigDecimal(unscaled, scale)
val genUsAsciiString =
Gen.string(Gen.oneOf(Gen.char('!', '~')))
diff --git a/zio-json/shared/src/test/scala/zio/json/JavaTimeSpec.scala b/zio-json/shared/src/test/scala/zio/json/JavaTimeSpec.scala
index 7684e8187..27e9553f2 100644
--- a/zio-json/shared/src/test/scala/zio/json/JavaTimeSpec.scala
+++ b/zio-json/shared/src/test/scala/zio/json/JavaTimeSpec.scala
@@ -1,23 +1,24 @@
-package testzio.json
+package zio.json
-import zio.json._
+import zio.json.ast._
import zio.test.Assertion._
import zio.test._
-
import java.time._
import java.time.format.DateTimeFormatter
-// zioJsonJVM/testOnly testzio.json.JavaTimeSpec
+// zioJsonJVM/testOnly zio.json.JavaTimeSpec
object JavaTimeSpec extends ZIOSpecDefault {
private def stringify(s: Any): String = s""" "${s.toString}" """
private def equalToStringified(expected: String) = equalTo(s""""$expected"""")
+ private def equalToJsonStr(expected: String): Assertion[Either[String, Json]] = isRight(equalTo(Json.Str(expected)))
+
val spec: Spec[Environment, Any] =
suite("java.time")(
suite("Encoder")(
- test("DayOfWeek") {
+ test("DayOfWeek toJson") {
assert(DayOfWeek.MONDAY.toJson)(equalToStringified("MONDAY")) &&
assert(DayOfWeek.TUESDAY.toJson)(equalToStringified("TUESDAY")) &&
assert(DayOfWeek.WEDNESDAY.toJson)(equalToStringified("WEDNESDAY")) &&
@@ -26,41 +27,80 @@ object JavaTimeSpec extends ZIOSpecDefault {
assert(DayOfWeek.SATURDAY.toJson)(equalToStringified("SATURDAY")) &&
assert(DayOfWeek.SUNDAY.toJson)(equalToStringified("SUNDAY"))
},
- test("Duration") {
+ test("DayOfWeek toJsonAST") {
+ assert(DayOfWeek.MONDAY.toJsonAST)(equalToJsonStr("MONDAY")) &&
+ assert(DayOfWeek.TUESDAY.toJsonAST)(equalToJsonStr("TUESDAY")) &&
+ assert(DayOfWeek.WEDNESDAY.toJsonAST)(equalToJsonStr("WEDNESDAY")) &&
+ assert(DayOfWeek.THURSDAY.toJsonAST)(equalToJsonStr("THURSDAY")) &&
+ assert(DayOfWeek.FRIDAY.toJsonAST)(equalToJsonStr("FRIDAY")) &&
+ assert(DayOfWeek.SATURDAY.toJsonAST)(equalToJsonStr("SATURDAY")) &&
+ assert(DayOfWeek.SUNDAY.toJsonAST)(equalToJsonStr("SUNDAY"))
+ },
+ test("Duration toJson") {
assert(Duration.ofDays(0).toJson)(equalToStringified("PT0S")) &&
assert(Duration.ofDays(1).toJson)(equalToStringified("PT24H")) &&
assert(Duration.ofHours(24).toJson)(equalToStringified("PT24H")) &&
assert(Duration.ofMinutes(1440).toJson)(equalToStringified("PT24H")) &&
assert(Duration.ofSeconds(Long.MaxValue, 999999999L).toJson)(
equalToStringified("PT2562047788015215H30M7.999999999S")
- ) &&
- assert(""""PT-0.5S"""".fromJson[Duration].map(_.toString))(isRight(equalTo("PT-0.5S"))) &&
- assert(""""-PT0.5S"""".fromJson[Duration].map(_.toString))(isRight(equalTo("PT-0.5S")))
+ )
},
- test("Instant") {
+ test("Duration toJsonAST") {
+ assert(Duration.ofDays(0).toJsonAST)(equalToJsonStr("PT0S")) &&
+ assert(Duration.ofDays(1).toJsonAST)(equalToJsonStr("PT24H")) &&
+ assert(Duration.ofHours(24).toJsonAST)(equalToJsonStr("PT24H")) &&
+ assert(Duration.ofMinutes(1440).toJsonAST)(equalToJsonStr("PT24H")) &&
+ assert(Duration.ofSeconds(Long.MaxValue, 999999999L).toJsonAST)(
+ equalToJsonStr("PT2562047788015215H30M7.999999999S")
+ )
+ },
+ test("Instant toJson") {
val n = Instant.now()
assert(Instant.EPOCH.toJson)(equalToStringified("1970-01-01T00:00:00Z")) &&
assert(n.toJson)(equalToStringified(n.toString))
},
- test("LocalDate") {
+ test("Instant toJsonAST") {
+ val n = Instant.now()
+ assert(Instant.EPOCH.toJsonAST)(equalToJsonStr("1970-01-01T00:00:00Z")) &&
+ assert(n.toJsonAST)(equalToJsonStr(n.toString))
+ },
+ test("LocalDate toJson") {
val n = LocalDate.now()
val p = LocalDate.of(2020, 1, 1)
assert(n.toJson)(equalToStringified(n.format(DateTimeFormatter.ISO_LOCAL_DATE))) &&
assert(p.toJson)(equalToStringified("2020-01-01"))
},
- test("LocalDateTime") {
+ test("LocalDate toJsonAST") {
+ val n = LocalDate.now()
+ val p = LocalDate.of(2020, 1, 1)
+ assert(n.toJsonAST)(equalToJsonStr(n.format(DateTimeFormatter.ISO_LOCAL_DATE))) &&
+ assert(p.toJsonAST)(equalToJsonStr("2020-01-01"))
+ },
+ test("LocalDateTime toJson") {
val n = LocalDateTime.now()
val p = LocalDateTime.of(2020, 1, 1, 12, 36, 0)
assert(n.toJson)(equalToStringified(n.format(DateTimeFormatter.ISO_LOCAL_DATE_TIME))) &&
assert(p.toJson)(equalToStringified("2020-01-01T12:36:00"))
},
- test("LocalTime") {
+ test("LocalDateTime toJsonAST") {
+ val n = LocalDateTime.now()
+ val p = LocalDateTime.of(2020, 1, 1, 12, 36, 0)
+ assert(n.toJsonAST)(equalToJsonStr(n.format(DateTimeFormatter.ISO_LOCAL_DATE_TIME))) &&
+ assert(p.toJsonAST)(equalToJsonStr("2020-01-01T12:36:00"))
+ },
+ test("LocalTime toJson") {
val n = LocalTime.now()
val p = LocalTime.of(12, 36, 0)
assert(n.toJson)(equalToStringified(n.format(DateTimeFormatter.ISO_LOCAL_TIME))) &&
assert(p.toJson)(equalToStringified("12:36:00"))
},
- test("Month") {
+ test("LocalTime toJsonAST") {
+ val n = LocalTime.now()
+ val p = LocalTime.of(12, 36, 0)
+ assert(n.toJsonAST)(equalToJsonStr(n.format(DateTimeFormatter.ISO_LOCAL_TIME))) &&
+ assert(p.toJsonAST)(equalToJsonStr("12:36:00"))
+ },
+ test("Month toJson") {
assert(Month.JANUARY.toJson)(equalToStringified("JANUARY")) &&
assert(Month.FEBRUARY.toJson)(equalToStringified("FEBRUARY")) &&
assert(Month.MARCH.toJson)(equalToStringified("MARCH")) &&
@@ -74,44 +114,95 @@ object JavaTimeSpec extends ZIOSpecDefault {
assert(Month.NOVEMBER.toJson)(equalToStringified("NOVEMBER")) &&
assert(Month.DECEMBER.toJson)(equalToStringified("DECEMBER"))
},
- test("MonthDay") {
+ test("Month toJsonAST") {
+ assert(Month.JANUARY.toJsonAST)(equalToJsonStr("JANUARY")) &&
+ assert(Month.FEBRUARY.toJsonAST)(equalToJsonStr("FEBRUARY")) &&
+ assert(Month.MARCH.toJsonAST)(equalToJsonStr("MARCH")) &&
+ assert(Month.APRIL.toJsonAST)(equalToJsonStr("APRIL")) &&
+ assert(Month.MAY.toJsonAST)(equalToJsonStr("MAY")) &&
+ assert(Month.JUNE.toJsonAST)(equalToJsonStr("JUNE")) &&
+ assert(Month.JULY.toJsonAST)(equalToJsonStr("JULY")) &&
+ assert(Month.AUGUST.toJsonAST)(equalToJsonStr("AUGUST")) &&
+ assert(Month.SEPTEMBER.toJsonAST)(equalToJsonStr("SEPTEMBER")) &&
+ assert(Month.OCTOBER.toJsonAST)(equalToJsonStr("OCTOBER")) &&
+ assert(Month.NOVEMBER.toJsonAST)(equalToJsonStr("NOVEMBER")) &&
+ assert(Month.DECEMBER.toJsonAST)(equalToJsonStr("DECEMBER"))
+ },
+ test("MonthDay toJson") {
val n = MonthDay.now()
val p = MonthDay.of(1, 1)
assert(n.toJson)(equalToStringified(n.toString)) &&
assert(p.toJson)(equalToStringified("--01-01"))
},
- test("OffsetDateTime") {
+ test("MonthDay toJsonAST") {
+ val n = MonthDay.now()
+ val p = MonthDay.of(1, 1)
+ assert(n.toJsonAST)(equalToJsonStr(n.toString)) &&
+ assert(p.toJsonAST)(equalToJsonStr("--01-01"))
+ },
+ test("OffsetDateTime toJson") {
val n = OffsetDateTime.now()
val p = OffsetDateTime.of(2020, 1, 1, 12, 36, 12, 0, ZoneOffset.UTC)
assert(n.toJson)(equalToStringified(n.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME))) &&
assert(p.toJson)(equalToStringified("2020-01-01T12:36:12Z"))
},
- test("OffsetTime") {
+ test("OffsetDateTime toJsonAST") {
+ val n = OffsetDateTime.now()
+ val p = OffsetDateTime.of(2020, 1, 1, 12, 36, 12, 0, ZoneOffset.UTC)
+ assert(n.toJsonAST)(equalToJsonStr(n.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME))) &&
+ assert(p.toJsonAST)(equalToJsonStr("2020-01-01T12:36:12Z"))
+ },
+ test("OffsetTime toJson") {
val n = OffsetTime.now()
val p = OffsetTime.of(12, 36, 12, 0, ZoneOffset.ofHours(-4))
assert(n.toJson)(equalToStringified(n.format(DateTimeFormatter.ISO_OFFSET_TIME))) &&
assert(p.toJson)(equalToStringified("12:36:12-04:00"))
},
- test("Period") {
+ test("OffsetTime toJsonAST") {
+ val n = OffsetTime.now()
+ val p = OffsetTime.of(12, 36, 12, 0, ZoneOffset.ofHours(-4))
+ assert(n.toJsonAST)(equalToJsonStr(n.format(DateTimeFormatter.ISO_OFFSET_TIME))) &&
+ assert(p.toJsonAST)(equalToJsonStr("12:36:12-04:00"))
+ },
+ test("Period toJson") {
assert(Period.ZERO.toJson)(equalToStringified("P0D")) &&
assert(Period.ofDays(1).toJson)(equalToStringified("P1D")) &&
assert(Period.ofMonths(2).toJson)(equalToStringified("P2M")) &&
assert(Period.ofWeeks(52).toJson)(equalToStringified("P364D")) &&
assert(Period.ofYears(10).toJson)(equalToStringified("P10Y"))
},
- test("Year") {
+ test("Period toJsonAST") {
+ assert(Period.ZERO.toJsonAST)(equalToJsonStr("P0D")) &&
+ assert(Period.ofDays(1).toJsonAST)(equalToJsonStr("P1D")) &&
+ assert(Period.ofMonths(2).toJsonAST)(equalToJsonStr("P2M")) &&
+ assert(Period.ofWeeks(52).toJsonAST)(equalToJsonStr("P364D")) &&
+ assert(Period.ofYears(10).toJsonAST)(equalToJsonStr("P10Y"))
+ },
+ test("Year toJson") {
val n = Year.now()
assert(n.toJson)(equalToStringified(n.toString)) &&
assert(Year.of(1999).toJson)(equalToStringified("1999")) &&
assert(Year.of(10000).toJson)(equalToStringified("+10000"))
},
- test("YearMonth") {
+ test("Year toJsonAST") {
+ val n = Year.now()
+ assert(n.toJsonAST)(equalToJsonStr(n.toString)) &&
+ assert(Year.of(1999).toJsonAST)(equalToJsonStr("1999")) &&
+ assert(Year.of(10000).toJsonAST)(equalToJsonStr("+10000"))
+ },
+ test("YearMonth toJson") {
val n = YearMonth.now()
assert(n.toJson)(equalToStringified(n.toString)) &&
assert(YearMonth.of(1999, 12).toJson)(equalToStringified("1999-12")) &&
assert(YearMonth.of(1999, 1).toJson)(equalToStringified("1999-01"))
},
- test("ZonedDateTime") {
+ test("YearMonth toJsonAST") {
+ val n = YearMonth.now()
+ assert(n.toJsonAST)(equalToJsonStr(n.toString)) &&
+ assert(YearMonth.of(1999, 12).toJsonAST)(equalToJsonStr("1999-12")) &&
+ assert(YearMonth.of(1999, 1).toJsonAST)(equalToJsonStr("1999-01"))
+ },
+ test("ZonedDateTime toJson") {
val n = ZonedDateTime.now()
val ld = LocalDateTime.of(2020, 1, 1, 12, 36, 0)
val est = ZonedDateTime.of(ld, ZoneId.of("America/New_York"))
@@ -120,21 +211,42 @@ object JavaTimeSpec extends ZIOSpecDefault {
assert(est.toJson)(equalToStringified("2020-01-01T12:36:00-05:00[America/New_York]")) &&
assert(utc.toJson)(equalToStringified("2020-01-01T12:36:00Z[Etc/UTC]"))
},
- test("ZoneId") {
+ test("ZonedDateTime toJsonAST") {
+ val n = ZonedDateTime.now()
+ val ld = LocalDateTime.of(2020, 1, 1, 12, 36, 0)
+ val est = ZonedDateTime.of(ld, ZoneId.of("America/New_York"))
+ val utc = ZonedDateTime.of(ld, ZoneId.of("Etc/UTC"))
+ assert(n.toJsonAST)(equalToJsonStr(n.format(DateTimeFormatter.ISO_ZONED_DATE_TIME))) &&
+ assert(est.toJsonAST)(equalToJsonStr("2020-01-01T12:36:00-05:00[America/New_York]")) &&
+ assert(utc.toJsonAST)(equalToJsonStr("2020-01-01T12:36:00Z[Etc/UTC]"))
+ },
+ test("ZoneId toJson") {
assert(ZoneId.of("America/New_York").toJson)(equalToStringified("America/New_York")) &&
assert(ZoneId.of("Etc/UTC").toJson)(equalToStringified("Etc/UTC")) &&
assert(ZoneId.of("Pacific/Auckland").toJson)(equalToStringified("Pacific/Auckland")) &&
assert(ZoneId.of("Asia/Shanghai").toJson)(equalToStringified("Asia/Shanghai")) &&
assert(ZoneId.of("Africa/Cairo").toJson)(equalToStringified("Africa/Cairo"))
},
- test("ZoneOffset") {
+ test("ZoneId toJsonAST") {
+ assert(ZoneId.of("America/New_York").toJsonAST)(equalToJsonStr("America/New_York")) &&
+ assert(ZoneId.of("Etc/UTC").toJsonAST)(equalToJsonStr("Etc/UTC")) &&
+ assert(ZoneId.of("Pacific/Auckland").toJsonAST)(equalToJsonStr("Pacific/Auckland")) &&
+ assert(ZoneId.of("Asia/Shanghai").toJsonAST)(equalToJsonStr("Asia/Shanghai")) &&
+ assert(ZoneId.of("Africa/Cairo").toJsonAST)(equalToJsonStr("Africa/Cairo"))
+ },
+ test("ZoneOffset toJson") {
assert(ZoneOffset.UTC.toJson)(equalToStringified("Z")) &&
assert(ZoneOffset.ofHours(5).toJson)(equalToStringified("+05:00")) &&
assert(ZoneOffset.ofHours(-5).toJson)(equalToStringified("-05:00"))
+ },
+ test("ZoneOffset toJsonAST") {
+ assert(ZoneOffset.UTC.toJsonAST)(equalToJsonStr("Z")) &&
+ assert(ZoneOffset.ofHours(5).toJsonAST)(equalToJsonStr("+05:00")) &&
+ assert(ZoneOffset.ofHours(-5).toJsonAST)(equalToJsonStr("-05:00"))
}
),
suite("Decoder")(
- test("DayOfWeek") {
+ test("DayOfWeek fromJson") {
assert(stringify("MONDAY").fromJson[DayOfWeek])(isRight(equalTo(DayOfWeek.MONDAY))) &&
assert(stringify("TUESDAY").fromJson[DayOfWeek])(isRight(equalTo(DayOfWeek.TUESDAY))) &&
assert(stringify("WEDNESDAY").fromJson[DayOfWeek])(isRight(equalTo(DayOfWeek.WEDNESDAY))) &&
@@ -143,13 +255,29 @@ object JavaTimeSpec extends ZIOSpecDefault {
assert(stringify("SATURDAY").fromJson[DayOfWeek])(isRight(equalTo(DayOfWeek.SATURDAY))) &&
assert(stringify("SUNDAY").fromJson[DayOfWeek])(isRight(equalTo(DayOfWeek.SUNDAY))) &&
assert(stringify("monday").fromJson[DayOfWeek])(isRight(equalTo(DayOfWeek.MONDAY))) &&
- assert(stringify("MonDay").fromJson[DayOfWeek])(isRight(equalTo(DayOfWeek.MONDAY)))
+ assert(stringify("MonDay").fromJson[DayOfWeek])(isRight(equalTo(DayOfWeek.MONDAY))) &&
+ assert(stringify("MonDa\\u0079").fromJson[DayOfWeek])(isRight(equalTo(DayOfWeek.MONDAY))) &&
+ assert(stringify("Mon").fromJson[DayOfWeek])(isLeft(equalTo("(expected a DayOfWeek)")))
+ },
+ test("DayOfWeek fromJsonAST") {
+ assert(Json.Str("MONDAY").as[DayOfWeek])(isRight(equalTo(DayOfWeek.MONDAY))) &&
+ assert(Json.Str("TUESDAY").as[DayOfWeek])(isRight(equalTo(DayOfWeek.TUESDAY))) &&
+ assert(Json.Str("WEDNESDAY").as[DayOfWeek])(isRight(equalTo(DayOfWeek.WEDNESDAY))) &&
+ assert(Json.Str("THURSDAY").as[DayOfWeek])(isRight(equalTo(DayOfWeek.THURSDAY))) &&
+ assert(Json.Str("FRIDAY").as[DayOfWeek])(isRight(equalTo(DayOfWeek.FRIDAY))) &&
+ assert(Json.Str("SATURDAY").as[DayOfWeek])(isRight(equalTo(DayOfWeek.SATURDAY))) &&
+ assert(Json.Str("SUNDAY").as[DayOfWeek])(isRight(equalTo(DayOfWeek.SUNDAY))) &&
+ assert(Json.Str("monday").as[DayOfWeek])(isRight(equalTo(DayOfWeek.MONDAY))) &&
+ assert(Json.Str("MonDay").as[DayOfWeek])(isRight(equalTo(DayOfWeek.MONDAY))) &&
+ assert(Json.Str("MonDa\\u0079").as[DayOfWeek])(isLeft(equalTo("(expected a DayOfWeek)"))) &&
+ assert(Json.Str("Mon").as[DayOfWeek])(isLeft(equalTo("(expected a DayOfWeek)")))
},
test("Duration") {
assert(stringify("PT24H").fromJson[Duration])(isRight(equalTo(Duration.ofHours(24)))) &&
assert(stringify("-PT24H").fromJson[Duration])(isRight(equalTo(Duration.ofHours(-24)))) &&
assert(stringify("P1D").fromJson[Duration])(isRight(equalTo(Duration.ofHours(24)))) &&
assert(stringify("P1DT0H").fromJson[Duration])(isRight(equalTo(Duration.ofHours(24)))) &&
+ assert(stringify("P1DT0\\u0048").fromJson[Duration])(isRight(equalTo(Duration.ofHours(24)))) &&
assert(stringify("PT2562047788015215H30M7.999999999S").fromJson[Duration])(
isRight(equalTo(Duration.ofSeconds(Long.MaxValue, 999999999L)))
)
@@ -159,6 +287,7 @@ object JavaTimeSpec extends ZIOSpecDefault {
val p = n.toInstant
assert(stringify("1970-01-01T00:00:00Z").fromJson[Instant])(isRight(equalTo(Instant.EPOCH))) &&
assert(stringify("1970-01-01T00:00:00.Z").fromJson[Instant])(isRight(equalTo(Instant.EPOCH))) &&
+ assert(stringify("1970-01-01T00:00:00.\\u005a").fromJson[Instant])(isRight(equalTo(Instant.EPOCH))) &&
assert(stringify(p).fromJson[Instant])(isRight(equalTo(p))) &&
assert(stringify(n).fromJson[Instant])(isRight(equalTo(p)))
},
@@ -173,16 +302,20 @@ object JavaTimeSpec extends ZIOSpecDefault {
val p = LocalDateTime.of(2020, 1, 1, 12, 36, 0)
assert(stringify(n).fromJson[LocalDateTime])(isRight(equalTo(n))) &&
assert(stringify("2020-01-01T12:36").fromJson[LocalDateTime])(isRight(equalTo(p))) &&
- assert(stringify("2020-01-01T12:36:00.").fromJson[LocalDateTime])(isRight(equalTo(p)))
+ assert(stringify("2020-01-01T12:36:00").fromJson[LocalDateTime])(isRight(equalTo(p))) &&
+ assert(stringify("2020-01-01T12:36:00.").fromJson[LocalDateTime])(isRight(equalTo(p))) &&
+ assert(stringify("2020-01-01T12:36:00\\u002e").fromJson[LocalDateTime])(isRight(equalTo(p)))
},
test("LocalTime") {
val n = LocalTime.now()
val p = LocalTime.of(12, 36, 0)
assert(stringify(n).fromJson[LocalTime])(isRight(equalTo(n))) &&
assert(stringify("12:36").fromJson[LocalTime])(isRight(equalTo(p))) &&
- assert(stringify("12:36:00.").fromJson[LocalTime])(isRight(equalTo(p)))
+ assert(stringify("12:36:00").fromJson[LocalTime])(isRight(equalTo(p))) &&
+ assert(stringify("12:36:00.").fromJson[LocalTime])(isRight(equalTo(p))) &&
+ assert(stringify("12:36:00\\u002e").fromJson[LocalTime])(isRight(equalTo(p)))
},
- test("Month") {
+ test("Month fromJson") {
assert(stringify("JANUARY").fromJson[Month])(isRight(equalTo(Month.JANUARY))) &&
assert(stringify("FEBRUARY").fromJson[Month])(isRight(equalTo(Month.FEBRUARY))) &&
assert(stringify("MARCH").fromJson[Month])(isRight(equalTo(Month.MARCH))) &&
@@ -196,27 +329,50 @@ object JavaTimeSpec extends ZIOSpecDefault {
assert(stringify("NOVEMBER").fromJson[Month])(isRight(equalTo(Month.NOVEMBER))) &&
assert(stringify("DECEMBER").fromJson[Month])(isRight(equalTo(Month.DECEMBER))) &&
assert(stringify("december").fromJson[Month])(isRight(equalTo(Month.DECEMBER))) &&
- assert(stringify("December").fromJson[Month])(isRight(equalTo(Month.DECEMBER)))
+ assert(stringify("December").fromJson[Month])(isRight(equalTo(Month.DECEMBER))) &&
+ assert(stringify("Decembe\\u0072").fromJson[Month])(isRight(equalTo(Month.DECEMBER))) &&
+ assert(stringify("Dec").fromJson[Month])(isLeft(equalTo("(expected a Month)")))
+ },
+ test("Month fromJsonAST") {
+ assert(Json.Str("JANUARY").as[Month])(isRight(equalTo(Month.JANUARY))) &&
+ assert(Json.Str("FEBRUARY").as[Month])(isRight(equalTo(Month.FEBRUARY))) &&
+ assert(Json.Str("MARCH").as[Month])(isRight(equalTo(Month.MARCH))) &&
+ assert(Json.Str("APRIL").as[Month])(isRight(equalTo(Month.APRIL))) &&
+ assert(Json.Str("MAY").as[Month])(isRight(equalTo(Month.MAY))) &&
+ assert(Json.Str("JUNE").as[Month])(isRight(equalTo(Month.JUNE))) &&
+ assert(Json.Str("JULY").as[Month])(isRight(equalTo(Month.JULY))) &&
+ assert(Json.Str("AUGUST").as[Month])(isRight(equalTo(Month.AUGUST))) &&
+ assert(Json.Str("SEPTEMBER").as[Month])(isRight(equalTo(Month.SEPTEMBER))) &&
+ assert(Json.Str("OCTOBER").as[Month])(isRight(equalTo(Month.OCTOBER))) &&
+ assert(Json.Str("NOVEMBER").as[Month])(isRight(equalTo(Month.NOVEMBER))) &&
+ assert(Json.Str("DECEMBER").as[Month])(isRight(equalTo(Month.DECEMBER))) &&
+ assert(Json.Str("december").as[Month])(isRight(equalTo(Month.DECEMBER))) &&
+ assert(Json.Str("December").as[Month])(isRight(equalTo(Month.DECEMBER))) &&
+ assert(Json.Str("Decembe\\u0072").as[Month])(isLeft(equalTo("(expected a Month)"))) &&
+ assert(Json.Str("Dec").as[Month])(isLeft(equalTo("(expected a Month)")))
},
test("MonthDay") {
val n = MonthDay.now()
val p = MonthDay.of(1, 1)
assert(stringify(n).fromJson[MonthDay])(isRight(equalTo(n))) &&
- assert(stringify("--01-01").fromJson[MonthDay])(isRight(equalTo(p)))
+ assert(stringify("--01-01").fromJson[MonthDay])(isRight(equalTo(p))) &&
+ assert(stringify("\\u002d-01-01").fromJson[MonthDay])(isRight(equalTo(p)))
},
test("OffsetDateTime") {
val n = OffsetDateTime.now()
val p = OffsetDateTime.of(2020, 1, 1, 12, 36, 12, 0, ZoneOffset.UTC)
assert(stringify(n).fromJson[OffsetDateTime])(isRight(equalTo(n))) &&
assert(stringify("2020-01-01T12:36:12Z").fromJson[OffsetDateTime])(isRight(equalTo(p))) &&
- assert(stringify("2020-01-01T12:36:12.Z").fromJson[OffsetDateTime])(isRight(equalTo(p)))
+ assert(stringify("2020-01-01T12:36:12.Z").fromJson[OffsetDateTime])(isRight(equalTo(p))) &&
+ assert(stringify("2020-01-01T12:36:12.\\u005a").fromJson[OffsetDateTime])(isRight(equalTo(p)))
},
test("OffsetTime") {
val n = OffsetTime.now()
val p = OffsetTime.of(12, 36, 12, 0, ZoneOffset.ofHours(-4))
assert(stringify(n).fromJson[OffsetTime])(isRight(equalTo(n))) &&
assert(stringify("12:36:12-04:00").fromJson[OffsetTime])(isRight(equalTo(p))) &&
- assert(stringify("12:36:12.-04:00").fromJson[OffsetTime])(isRight(equalTo(p)))
+ assert(stringify("12:36:12.-04:00").fromJson[OffsetTime])(isRight(equalTo(p))) &&
+ assert(stringify("12:36:12\\u002e-04:00").fromJson[OffsetTime])(isRight(equalTo(p)))
},
test("Period") {
assert(stringify("P0D").fromJson[Period])(isRight(equalTo(Period.ZERO))) &&
@@ -225,7 +381,8 @@ object JavaTimeSpec extends ZIOSpecDefault {
assert(stringify("-P1D").fromJson[Period])(isRight(equalTo(Period.ofDays(-1)))) &&
assert(stringify("P2M").fromJson[Period])(isRight(equalTo(Period.ofMonths(2)))) &&
assert(stringify("P364D").fromJson[Period])(isRight(equalTo(Period.ofWeeks(52)))) &&
- assert(stringify("P10Y").fromJson[Period])(isRight(equalTo(Period.ofYears(10))))
+ assert(stringify("P10Y").fromJson[Period])(isRight(equalTo(Period.ofYears(10)))) &&
+ assert(stringify("P10\\u0059").fromJson[Period])(isRight(equalTo(Period.ofYears(10))))
},
test("Year") {
val n = Year.now()
@@ -237,7 +394,8 @@ object JavaTimeSpec extends ZIOSpecDefault {
val n = YearMonth.now()
assert(stringify(n).fromJson[YearMonth])(isRight(equalTo(n))) &&
assert(stringify("1999-12").fromJson[YearMonth])(isRight(equalTo(YearMonth.of(1999, 12)))) &&
- assert(stringify("1999-01").fromJson[YearMonth])(isRight(equalTo(YearMonth.of(1999, 1))))
+ assert(stringify("1999-01").fromJson[YearMonth])(isRight(equalTo(YearMonth.of(1999, 1)))) &&
+ assert(stringify("1999\\u002d01").fromJson[YearMonth])(isRight(equalTo(YearMonth.of(1999, 1))))
},
test("ZonedDateTime") {
def zdtAssert(actual: String, expected: ZonedDateTime): TestResult =
@@ -251,8 +409,8 @@ object JavaTimeSpec extends ZIOSpecDefault {
zdtAssert(n.toString, n) &&
zdtAssert("2020-01-01T12:36:00-05:00[America/New_York]", est) &&
+ zdtAssert("2020-01-01T12:36:00-05:00[America\\u002fNew_York]", est) &&
zdtAssert("2020-01-01T12:36:00Z[Etc/UTC]", utc) &&
- zdtAssert("2020-01-01T12:36:00.Z[Etc/UTC]", utc) &&
zdtAssert("2020-01-01T12:36:00+00:00[+00:00]", gmt) &&
zdtAssert(
"2018-02-01T00:00Z",
@@ -314,2799 +472,1133 @@ object JavaTimeSpec extends ZIOSpecDefault {
)
},
test("ZoneId") {
- assert(stringify("America/New_York").fromJson[ZoneId])(
- isRight(
- equalTo(
- ZoneId.of("America/New_York")
- )
- )
+ assert(stringify("America/New_York").fromJson[ZoneId])(isRight(equalTo(ZoneId.of("America/New_York")))) &&
+ assert(stringify("America\\u002fNew_York").fromJson[ZoneId])(
+ isRight(equalTo(ZoneId.of("America/New_York")))
) &&
assert(stringify("Etc/UTC").fromJson[ZoneId])(isRight(equalTo(ZoneId.of("Etc/UTC")))) &&
- assert(stringify("Pacific/Auckland").fromJson[ZoneId])(
- isRight(
- equalTo(
- ZoneId.of("Pacific/Auckland")
- )
- )
- ) &&
- assert(stringify("Asia/Shanghai").fromJson[ZoneId])(
- isRight(equalTo(ZoneId.of("Asia/Shanghai")))
- ) &&
+ assert(stringify("Pacific/Auckland").fromJson[ZoneId])(isRight(equalTo(ZoneId.of("Pacific/Auckland")))) &&
+ assert(stringify("Asia/Shanghai").fromJson[ZoneId])(isRight(equalTo(ZoneId.of("Asia/Shanghai")))) &&
assert(stringify("Africa/Cairo").fromJson[ZoneId])(isRight(equalTo(ZoneId.of("Africa/Cairo"))))
},
test("ZoneOffset") {
assert(stringify("Z").fromJson[ZoneOffset])(isRight(equalTo(ZoneOffset.UTC))) &&
+ assert(stringify("\\u005a").fromJson[ZoneOffset])(isRight(equalTo(ZoneOffset.UTC))) &&
assert(stringify("+05:00").fromJson[ZoneOffset])(isRight(equalTo(ZoneOffset.ofHours(5)))) &&
- assert(stringify("-05:00").fromJson[ZoneOffset])(isRight(equalTo(ZoneOffset.ofHours(-5))))
+ assert(stringify("-05:00").fromJson[ZoneOffset])(isRight(equalTo(ZoneOffset.ofHours(-5)))) &&
+ assert(stringify("+05:10:10").fromJson[ZoneOffset])(
+ isRight(equalTo(ZoneOffset.ofHoursMinutesSeconds(5, 10, 10)))
+ )
}
),
suite("Decoder Sad Path")(
- test("DayOfWeek") {
- assert(stringify("foody").fromJson[DayOfWeek])(
- isLeft(
- equalTo("(No enum constant java.time.DayOfWeek.FOODY)") || // JVM
- equalTo("(Unrecognized day of week name: FOODY)") || // Scala.js 2.
- equalTo("(enum case not found: FOODY)") // Scala.js 3.
- )
- )
- },
test("Duration") {
- assert("""""""".fromJson[Duration])(
- isLeft(containsString(" is not a valid ISO-8601 format, illegal duration at index 0"))
- ) &&
- assert(""""X"""".fromJson[Duration])(
- isLeft(containsString("X is not a valid ISO-8601 format, expected 'P' or '-' at index 0"))
- ) &&
- assert(""""P"""".fromJson[Duration])(
- isLeft(containsString("P is not a valid ISO-8601 format, illegal duration at index 1"))
- ) &&
- assert(""""-"""".fromJson[Duration])(
- isLeft(containsString("- is not a valid ISO-8601 format, illegal duration at index 1"))
- ) &&
- assert(""""-X"""".fromJson[Duration])(
- isLeft(containsString("-X is not a valid ISO-8601 format, expected 'P' at index 1"))
- ) &&
- assert(""""PXD"""".fromJson[Duration])(
- isLeft(containsString("PXD is not a valid ISO-8601 format, expected '-' or digit at index 1"))
- ) &&
- assert(""""P-"""".fromJson[Duration])(
- isLeft(containsString("P- is not a valid ISO-8601 format, illegal duration at index 2"))
- ) &&
- assert(""""P-XD"""".fromJson[Duration])(
- isLeft(containsString("P-XD is not a valid ISO-8601 format, expected digit at index 2"))
- ) &&
- assert(""""P1XD"""".fromJson[Duration])(
- isLeft(containsString("P1XD is not a valid ISO-8601 format, expected 'D' or digit at index 2"))
- ) &&
- assert(""""PT"""".fromJson[Duration])(
- isLeft(containsString("PT is not a valid ISO-8601 format, illegal duration at index 2"))
- ) &&
- assert(""""PT0SX"""".fromJson[Duration])(
- isLeft(containsString("PT0SX is not a valid ISO-8601 format, illegal duration at index 4"))
- ) &&
- assert(""""P1DT"""".fromJson[Duration])(
- isLeft(containsString("P1DT is not a valid ISO-8601 format, illegal duration at index 4"))
- ) &&
- assert(""""P106751991167301D"""".fromJson[Duration])(
- isLeft(containsString("P106751991167301D is not a valid ISO-8601 format, illegal duration at index 16"))
- ) &&
- assert(""""P1067519911673000D"""".fromJson[Duration])(
- isLeft(containsString("P1067519911673000D is not a valid ISO-8601 format, illegal duration at index 17"))
- ) &&
- assert(""""P-106751991167301D"""".fromJson[Duration])(
- isLeft(containsString("P-106751991167301D is not a valid ISO-8601 format, illegal duration at index 17"))
- ) &&
- assert(""""P1DX1H"""".fromJson[Duration])(
- isLeft(containsString("P1DX1H is not a valid ISO-8601 format, expected 'T' or '\"' at index 3"))
- ) &&
- assert(""""P1DTXH"""".fromJson[Duration])(
- isLeft(containsString("P1DTXH is not a valid ISO-8601 format, expected '-' or digit at index 4"))
- ) &&
- assert(""""P1DT-XH"""".fromJson[Duration])(
- isLeft(containsString("P1DT-XH is not a valid ISO-8601 format, expected digit at index 5"))
- ) &&
- assert(""""P1DT1XH"""".fromJson[Duration])(
- isLeft(
- containsString(
- "P1DT1XH is not a valid ISO-8601 format, expected 'H' or 'M' or 'S or '.' or digit at index 5"
- )
- )
- ) &&
- assert(""""P1DT1H1XM"""".fromJson[Duration])(
- isLeft(
- containsString("P1DT1H1XM is not a valid ISO-8601 format, expected 'M' or 'S or '.' or digit at index 7")
- )
- ) &&
- assert(""""P0DT2562047788015216H"""".fromJson[Duration])(
- isLeft(containsString("P0DT2562047788015216H is not a valid ISO-8601 format, illegal duration at index 20"))
- ) &&
- assert(""""P0DT-2562047788015216H"""".fromJson[Duration])(
- isLeft(
- containsString("P0DT-2562047788015216H is not a valid ISO-8601 format, illegal duration at index 21")
- )
- ) &&
- assert(""""P0DT153722867280912931M"""".fromJson[Duration])(
- isLeft(
- containsString("P0DT153722867280912931M is not a valid ISO-8601 format, illegal duration at index 22")
- )
- ) &&
- assert(""""P0DT-153722867280912931M"""".fromJson[Duration])(
- isLeft(
- containsString("P0DT-153722867280912931M is not a valid ISO-8601 format, illegal duration at index 23")
- )
- ) &&
- assert(""""P0DT9223372036854775808S"""".fromJson[Duration])(
- isLeft(
- containsString("P0DT9223372036854775808S is not a valid ISO-8601 format, illegal duration at index 23")
- )
- ) &&
- assert(""""P0DT92233720368547758000S"""".fromJson[Duration])(
- isLeft(
- containsString("P0DT92233720368547758000S is not a valid ISO-8601 format, illegal duration at index 23")
- )
- ) &&
- assert(""""P0DT-9223372036854775809S"""".fromJson[Duration])(
- isLeft(
- containsString("P0DT-9223372036854775809S is not a valid ISO-8601 format, illegal duration at index 23")
- )
- ) &&
- assert(""""P1DT1H1MXS"""".fromJson[Duration])(
- isLeft(
- containsString("P1DT1H1MXS is not a valid ISO-8601 format, expected '\"' or '-' or digit at index 8")
- )
- ) &&
- assert(""""P1DT1H1M-XS"""".fromJson[Duration])(
- isLeft(containsString("P1DT1H1M-XS is not a valid ISO-8601 format, expected digit at index 9"))
- ) &&
- assert(""""P1DT1H1M0XS"""".fromJson[Duration])(
- isLeft(containsString("P1DT1H1M0XS is not a valid ISO-8601 format, expected 'S or '.' or digit at index 9"))
- ) &&
- assert(""""P1DT1H1M0.XS"""".fromJson[Duration])(
- isLeft(containsString("P1DT1H1M0.XS is not a valid ISO-8601 format, expected 'S' or digit at index 10"))
- ) &&
- assert(""""P1DT1H1M0.012345678XS"""".fromJson[Duration])(
- isLeft(containsString("P1DT1H1M0.012345678XS is not a valid ISO-8601 format, expected 'S' at index 19"))
- ) &&
- assert(""""P1DT1H1M0.0123456789S"""".fromJson[Duration])(
- isLeft(containsString("P1DT1H1M0.0123456789S is not a valid ISO-8601 format, expected 'S' at index 19"))
- ) &&
- assert(""""P0DT0H0M9223372036854775808S"""".fromJson[Duration])(
- isLeft(
- containsString(
- "P0DT0H0M9223372036854775808S is not a valid ISO-8601 format, illegal duration at index 27"
- )
- )
- ) &&
- assert(""""P0DT0H0M92233720368547758080S"""".fromJson[Duration])(
- isLeft(
- containsString(
- "P0DT0H0M92233720368547758080S is not a valid ISO-8601 format, illegal duration at index 27"
- )
- )
- ) &&
- assert(""""P0DT0H0M-9223372036854775809S"""".fromJson[Duration])(
- isLeft(
- containsString(
- "P0DT0H0M-9223372036854775809S is not a valid ISO-8601 format, illegal duration at index 27"
- )
- )
- ) &&
- assert(""""P106751991167300DT24H"""".fromJson[Duration])(
- isLeft(containsString("P106751991167300DT24H is not a valid ISO-8601 format, illegal duration at index 20"))
- ) &&
- assert(""""P0DT2562047788015215H60M"""".fromJson[Duration])(
- isLeft(
- containsString("P0DT2562047788015215H60M is not a valid ISO-8601 format, illegal duration at index 23")
- )
- ) &&
- assert(""""P0DT0H153722867280912930M60S"""".fromJson[Duration])(
- isLeft(
- containsString(
- "P0DT0H153722867280912930M60S is not a valid ISO-8601 format, illegal duration at index 27"
- )
- )
+ assert(stringify(" " * 10000).fromJson[Duration])(isLeft(containsString("expected a Duration"))) &&
+ assert(stringify("").fromJson[Duration])(isLeft(containsString("expected a Duration"))) &&
+ assert(stringify("X").fromJson[Duration])(isLeft(containsString("expected a Duration"))) &&
+ assert(stringify("P").fromJson[Duration])(isLeft(containsString("expected a Duration"))) &&
+ assert(stringify("-").fromJson[Duration])(isLeft(containsString("expected a Duration"))) &&
+ assert(stringify("-X").fromJson[Duration])(isLeft(containsString("expected a Duration"))) &&
+ assert(stringify("PXD").fromJson[Duration])(isLeft(containsString("expected a Duration"))) &&
+ assert(stringify("P-").fromJson[Duration])(isLeft(containsString("expected a Duration"))) &&
+ assert(stringify("P-XD").fromJson[Duration])(isLeft(containsString("expected a Duration"))) &&
+ assert(stringify("P1XD").fromJson[Duration])(isLeft(containsString("expected a Duration"))) &&
+ assert(stringify("PT").fromJson[Duration])(isLeft(containsString("expected a Duration"))) &&
+ assert(stringify("PT0SX").fromJson[Duration])(isLeft(containsString("expected a Duration"))) &&
+ assert(stringify("P1DT").fromJson[Duration])(isLeft(containsString("expected a Duration"))) &&
+ assert(stringify("P106751991167301D").fromJson[Duration])(isLeft(containsString("expected a Duration"))) &&
+ assert(stringify("P1067519911673000D").fromJson[Duration])(isLeft(containsString("expected a Duration"))) &&
+ assert(stringify("P-106751991167301D").fromJson[Duration])(isLeft(containsString("expected a Duration"))) &&
+ assert(stringify("P1DX1H").fromJson[Duration])(isLeft(containsString("expected a Duration"))) &&
+ assert(stringify("P1DTXH").fromJson[Duration])(isLeft(containsString("expected a Duration"))) &&
+ assert(stringify("P1DT-XH").fromJson[Duration])(isLeft(containsString("expected a Duration"))) &&
+ assert(stringify("P1DT1XH").fromJson[Duration])(isLeft(containsString("expected a Duration"))) &&
+ assert(stringify("P1DT1H1XM").fromJson[Duration])(isLeft(containsString("expected a Duration"))) &&
+ assert(stringify("P0DT2562047788015216H").fromJson[Duration])(
+ isLeft(containsString("expected a Duration"))
+ ) &&
+ assert(stringify("P0DT-2562047788015216H").fromJson[Duration])(
+ isLeft(containsString("expected a Duration"))
+ ) &&
+ assert(stringify("P0DT153722867280912931M").fromJson[Duration])(
+ isLeft(containsString("expected a Duration"))
+ ) &&
+ assert(stringify("P0DT-153722867280912931M").fromJson[Duration])(
+ isLeft(containsString("expected a Duration"))
+ ) &&
+ assert(stringify("P0DT9223372036854775808S").fromJson[Duration])(
+ isLeft(containsString("expected a Duration"))
+ ) &&
+ assert(stringify("P0DT92233720368547758000S").fromJson[Duration])(
+ isLeft(containsString("expected a Duration"))
+ ) &&
+ assert(stringify("P0DT-9223372036854775809S").fromJson[Duration])(
+ isLeft(containsString("expected a Duration"))
+ ) &&
+ assert(stringify("P1DT1H1MXS").fromJson[Duration])(isLeft(containsString("expected a Duration"))) &&
+ assert(stringify("P1DT1H1M-XS").fromJson[Duration])(isLeft(containsString("expected a Duration"))) &&
+ assert(stringify("P1DT1H1M0XS").fromJson[Duration])(isLeft(containsString("expected a Duration"))) &&
+ assert(stringify("P1DT1H1M0.XS").fromJson[Duration])(isLeft(containsString("expected a Duration"))) &&
+ assert(stringify("P1DT1H1M0.012345678XS").fromJson[Duration])(
+ isLeft(containsString("expected a Duration"))
+ ) &&
+ assert(stringify("P1DT1H1M0.0123456789S").fromJson[Duration])(
+ isLeft(containsString("expected a Duration"))
+ ) &&
+ assert(stringify("P0DT0H0M9223372036854775808S").fromJson[Duration])(
+ isLeft(containsString("expected a Duration"))
+ ) &&
+ assert(stringify("P0DT0H0M92233720368547758080S").fromJson[Duration])(
+ isLeft(containsString("expected a Duration"))
+ ) &&
+ assert(stringify("P0DT0H0M-9223372036854775809S").fromJson[Duration])(
+ isLeft(containsString("expected a Duration"))
+ ) &&
+ assert(stringify("P106751991167300DT24H").fromJson[Duration])(
+ isLeft(containsString("expected a Duration"))
+ ) &&
+ assert(stringify("P0DT2562047788015215H60M").fromJson[Duration])(
+ isLeft(containsString("expected a Duration"))
+ ) &&
+ assert(stringify("P0DT0H153722867280912930M60S").fromJson[Duration])(
+ isLeft(containsString("expected a Duration"))
)
},
test("Instant") {
- assert(stringify("").fromJson[Instant])(
- isLeft(
- equalTo("( is not a valid ISO-8601 format, illegal instant at index 0)")
- )
- ) &&
- assert(stringify("2020").fromJson[Instant])(
- isLeft(
- equalTo("(2020 is not a valid ISO-8601 format, illegal instant at index 0)")
- )
- ) &&
- assert(stringify("2020-0").fromJson[Instant])(
- isLeft(
- equalTo("(2020-0 is not a valid ISO-8601 format, illegal instant at index 5)")
- )
- ) &&
- assert(stringify("2020-01-0").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-0 is not a valid ISO-8601 format, illegal instant at index 8)")
- )
- ) &&
- assert(stringify("2020-01-01T0").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-01T0 is not a valid ISO-8601 format, illegal instant at index 11)")
- )
- ) &&
- assert(stringify("2020-01-01T01:0").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-01T01:0 is not a valid ISO-8601 format, illegal instant at index 14)")
- )
- ) &&
- assert(stringify("X020-01-01T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(X020-01-01T01:01Z is not a valid ISO-8601 format, expected '-' or '+' or digit at index 0)")
- )
- ) &&
- assert(stringify("2X20-01-01T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(2X20-01-01T01:01Z is not a valid ISO-8601 format, expected digit at index 1)")
- )
- ) &&
- assert(stringify("20X0-01-01T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(20X0-01-01T01:01Z is not a valid ISO-8601 format, expected digit at index 2)")
- )
- ) &&
- assert(stringify("202X-01-01T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(202X-01-01T01:01Z is not a valid ISO-8601 format, expected digit at index 3)")
- )
- ) &&
- assert(stringify("2020X01-01T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(2020X01-01T01:01Z is not a valid ISO-8601 format, expected '-' at index 4)")
- )
- ) &&
- assert(stringify("2020-X1-01T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(2020-X1-01T01:01Z is not a valid ISO-8601 format, expected digit at index 5)")
- )
- ) &&
- assert(stringify("2020-0X-01T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(2020-0X-01T01:01Z is not a valid ISO-8601 format, expected digit at index 6)")
- )
- ) &&
- assert(stringify("2020-01X01T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01X01T01:01Z is not a valid ISO-8601 format, expected '-' at index 7)")
- )
- ) &&
- assert(stringify("2020-01-X1T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-X1T01:01Z is not a valid ISO-8601 format, expected digit at index 8)")
- )
- ) &&
- assert(stringify("2020-01-0XT01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-0XT01:01Z is not a valid ISO-8601 format, expected digit at index 9)")
- )
- ) &&
- assert(stringify("2020-01-01X01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-01X01:01Z is not a valid ISO-8601 format, expected 'T' at index 10)")
- )
- ) &&
- assert(stringify("2020-01-01TX1:01").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-01TX1:01 is not a valid ISO-8601 format, expected digit at index 11)")
- )
- ) &&
- assert(stringify("2020-01-01T0X:01").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-01T0X:01 is not a valid ISO-8601 format, expected digit at index 12)")
- )
- ) &&
- assert(stringify("2020-01-01T24:01").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-01T24:01 is not a valid ISO-8601 format, illegal hour at index 12)")
- )
- ) &&
- assert(stringify("2020-01-01T01X01").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-01T01X01 is not a valid ISO-8601 format, expected ':' at index 13)")
- )
- ) &&
- assert(stringify("2020-01-01T01:X1").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-01T01:X1 is not a valid ISO-8601 format, expected digit at index 14)")
- )
- ) &&
- assert(stringify("2020-01-01T01:0X").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-01T01:0X is not a valid ISO-8601 format, expected digit at index 15)")
- )
- ) &&
- assert(stringify("2020-01-01T01:60").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-01T01:60 is not a valid ISO-8601 format, illegal minute at index 15)")
- )
- ) &&
- assert(stringify("2020-01-01T01:01X").fromJson[Instant])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01X is not a valid ISO-8601 format, expected ':' or '+' or '-' or 'Z' at index 16)"
- )
- )
- ) &&
- assert(stringify("2020-01-01T01:01:0").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-01T01:01:0 is not a valid ISO-8601 format, illegal instant at index 17)")
- )
- ) &&
- assert(stringify("2020-01-01T01:01:X1Z").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-01T01:01:X1Z is not a valid ISO-8601 format, expected digit at index 17)")
- )
- ) &&
- assert(stringify("2020-01-01T01:01:0XZ").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-01T01:01:0XZ is not a valid ISO-8601 format, expected digit at index 18)")
- )
- ) &&
- assert(stringify("2020-01-01T01:01:60Z").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-01T01:01:60Z is not a valid ISO-8601 format, illegal second at index 18)")
- )
- ) &&
- assert(stringify("2020-01-01T01:01:012").fromJson[Instant])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:012 is not a valid ISO-8601 format, expected '.' or '+' or '-' or 'Z' at index 19)"
- )
- )
- ) &&
- assert(stringify("2020-01-01T01:01:01.X").fromJson[Instant])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01.X is not a valid ISO-8601 format, expected digit or '+' or '-' or 'Z' at index 20)"
- )
- )
- ) &&
+ assert(stringify(" " * 10000).fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-0").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-01-0").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-01-01T0").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-01-01T01:0").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("X020-01-01T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2X20-01-01T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("20X0-01-01T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("202X-01-01T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020X01-01T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-X1-01T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-0X-01T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-01X01T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-01-X1T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-01-0XT01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-01-01X01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-01-01TX1:01").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-01-01T0X:01").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-01-01T24:01").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-01-01T01X01").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-01-01T01:X1").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-01-01T01:0X").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-01-01T01:60").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-01-01T01:01X").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-01-01T01:01:0").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-01-01T01:01:X1Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-01-01T01:01:0XZ").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-01-01T01:01:60Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-01-01T01:01:012").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-01-01T01:01:01.X").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
assert(stringify("2020-01-01T01:01:01.123456789X").fromJson[Instant])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01.123456789X is not a valid ISO-8601 format, expected '+' or '-' or 'Z' at index 29)"
- )
- )
- ) &&
- assert(stringify("2020-01-01T01:01:01ZX").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-01T01:01:01ZX is not a valid ISO-8601 format, illegal instant at index 20)")
- )
+ isLeft(containsString("expected an Instant"))
) &&
+ assert(stringify("2020-01-01T01:01:01ZX").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
assert(stringify("2020-01-01T01:01:01+X1:01:01").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-01T01:01:01+X1:01:01 is not a valid ISO-8601 format, expected digit at index 20)")
- )
- ) &&
- assert(stringify("2020-01-01T01:01:01+0").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-01T01:01:01+0 is not a valid ISO-8601 format, illegal instant at index 20)")
- )
+ isLeft(containsString("expected an Instant"))
) &&
+ assert(stringify("2020-01-01T01:01:01+0").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
assert(stringify("2020-01-01T01:01:01+0X:01:01").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-01T01:01:01+0X:01:01 is not a valid ISO-8601 format, expected digit at index 21)")
- )
+ isLeft(containsString("expected an Instant"))
) &&
assert(stringify("2020-01-01T01:01:01+19:01:01").fromJson[Instant])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01+19:01:01 is not a valid ISO-8601 format, illegal timezone offset hour at index 21)"
- )
- )
+ isLeft(containsString("expected an Instant"))
) &&
assert(stringify("2020-01-01T01:01:01+01X01:01").fromJson[Instant])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01+01X01:01 is not a valid ISO-8601 format, illegal instant at index 23)"
- )
- )
+ isLeft(containsString("expected an Instant"))
) &&
assert(stringify("2020-01-01T01:01:01+01:0").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-01T01:01:01+01:0 is not a valid ISO-8601 format, illegal instant at index 23)")
- )
+ isLeft(containsString("expected an Instant"))
) &&
assert(stringify("2020-01-01T01:01:01+01:X1:01").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-01T01:01:01+01:X1:01 is not a valid ISO-8601 format, expected digit at index 23)")
- )
+ isLeft(containsString("expected an Instant"))
) &&
assert(stringify("2020-01-01T01:01:01+01:0X:01").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-01T01:01:01+01:0X:01 is not a valid ISO-8601 format, expected digit at index 24)")
- )
+ isLeft(containsString("expected an Instant"))
) &&
assert(stringify("2020-01-01T01:01:01+01:60:01").fromJson[Instant])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01+01:60:01 is not a valid ISO-8601 format, illegal timezone offset minute at index 24)"
- )
- )
+ isLeft(containsString("expected an Instant"))
) &&
assert(stringify("2020-01-01T01:01:01+01:01X01").fromJson[Instant])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01+01:01X01 is not a valid ISO-8601 format, illegal instant at index 26)"
- )
- )
+ isLeft(containsString("expected an Instant"))
) &&
assert(stringify("2020-01-01T01:01:01+01:01:0").fromJson[Instant])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01+01:01:0 is not a valid ISO-8601 format, illegal instant at index 26)"
- )
- )
+ isLeft(containsString("expected an Instant"))
) &&
assert(stringify("2020-01-01T01:01:01+01:01:X1").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-01T01:01:01+01:01:X1 is not a valid ISO-8601 format, expected digit at index 26)")
- )
+ isLeft(containsString("expected an Instant"))
) &&
assert(stringify("2020-01-01T01:01:01+01:01:0X").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-01T01:01:01+01:01:0X is not a valid ISO-8601 format, expected digit at index 27)")
- )
+ isLeft(containsString("expected an Instant"))
) &&
assert(stringify("2020-01-01T01:01:01+01:01:60").fromJson[Instant])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01+01:01:60 is not a valid ISO-8601 format, illegal timezone offset second at index 27)"
- )
- )
- ) &&
- assert(stringify("+X0000-01-01T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(+X0000-01-01T01:01Z is not a valid ISO-8601 format, expected digit at index 1)")
- )
- ) &&
- assert(stringify("+1X000-01-01T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(+1X000-01-01T01:01Z is not a valid ISO-8601 format, expected digit at index 2)")
- )
- ) &&
- assert(stringify("+10X00-01-01T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(+10X00-01-01T01:01Z is not a valid ISO-8601 format, expected digit at index 3)")
- )
- ) &&
- assert(stringify("+100X0-01-01T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(+100X0-01-01T01:01Z is not a valid ISO-8601 format, expected digit at index 4)")
- )
- ) &&
- assert(stringify("+1000X-01-01T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(+1000X-01-01T01:01Z is not a valid ISO-8601 format, expected digit at index 5)")
- )
- ) &&
- assert(stringify("+10000X-01-01T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(+10000X-01-01T01:01Z is not a valid ISO-8601 format, expected '-' or digit at index 6)")
- )
- ) &&
- assert(stringify("+100000X-01-01T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(+100000X-01-01T01:01Z is not a valid ISO-8601 format, expected '-' or digit at index 7)")
- )
- ) &&
+ isLeft(containsString("expected an Instant"))
+ ) &&
+ assert(stringify("+X0000-01-01T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("+1X000-01-01T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("+10X00-01-01T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("+100X0-01-01T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("+1000X-01-01T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("+10000X-01-01T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("+100000X-01-01T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
assert(stringify("+1000000X-01-01T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(+1000000X-01-01T01:01Z is not a valid ISO-8601 format, expected '-' or digit at index 8)")
- )
+ isLeft(containsString("expected an Instant"))
) &&
assert(stringify("+1000000001-01-01T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(+1000000001-01-01T01:01Z is not a valid ISO-8601 format, illegal year at index 10)")
- )
+ isLeft(containsString("expected an Instant"))
) &&
assert(stringify("+3333333333-01-01T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(+3333333333-01-01T01:01Z is not a valid ISO-8601 format, illegal year at index 10)")
- )
+ isLeft(containsString("expected an Instant"))
) &&
assert(stringify("-1000000001-01-01T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(-1000000001-01-01T01:01Z is not a valid ISO-8601 format, illegal year at index 10)")
- )
- ) &&
- assert(stringify("-0000-01-01T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(-0000-01-01T01:01Z is not a valid ISO-8601 format, illegal year at index 4)")
- )
- ) &&
- assert(stringify("+10000").fromJson[Instant])(
- isLeft(
- equalTo("(+10000 is not a valid ISO-8601 format, illegal instant at index 6)")
- )
- ) &&
- assert(stringify("2020-00-01T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(2020-00-01T01:01Z is not a valid ISO-8601 format, illegal month at index 6)")
- )
- ) &&
- assert(stringify("2020-13-01T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(2020-13-01T01:01Z is not a valid ISO-8601 format, illegal month at index 6)")
- )
- ) &&
- assert(stringify("2020-01-00T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-00T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
- ) &&
- assert(stringify("2020-01-32T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(2020-01-32T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
- ) &&
- assert(stringify("2020-02-30T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(2020-02-30T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
- ) &&
- assert(stringify("2020-03-32T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(2020-03-32T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
- ) &&
- assert(stringify("2020-04-31T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(2020-04-31T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
- ) &&
- assert(stringify("2020-05-32T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(2020-05-32T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
- ) &&
- assert(stringify("2020-06-31T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(2020-06-31T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
- ) &&
- assert(stringify("2020-07-32T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(2020-07-32T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
- ) &&
- assert(stringify("2020-08-32T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(2020-08-32T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
- ) &&
- assert(stringify("2020-09-31T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(2020-09-31T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
- ) &&
- assert(stringify("2020-10-32T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(2020-10-32T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
- ) &&
- assert(stringify("2020-11-31T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(2020-11-31T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
- ) &&
- assert(stringify("2020-12-32T01:01Z").fromJson[Instant])(
- isLeft(
- equalTo("(2020-12-32T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
- )
+ isLeft(containsString("expected an Instant"))
+ ) &&
+ assert(stringify("-0000-01-01T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("+10000").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-00-01T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-13-01T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-01-00T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-01-32T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-02-30T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-03-32T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-04-31T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-05-32T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-06-31T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-07-32T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-08-32T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-09-31T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-10-32T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-11-31T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant"))) &&
+ assert(stringify("2020-12-32T01:01Z").fromJson[Instant])(isLeft(containsString("expected an Instant")))
},
test("LocalDate") {
- assert(stringify("").fromJson[LocalDate])(
- isLeft(
- equalTo("( is not a valid ISO-8601 format, illegal local date at index 0)")
- )
- ) &&
- assert(stringify("2020").fromJson[LocalDate])(
- isLeft(
- equalTo("(2020 is not a valid ISO-8601 format, illegal local date at index 0)")
- )
- ) &&
- assert(stringify("2020-0").fromJson[LocalDate])(
- isLeft(
- equalTo("(2020-0 is not a valid ISO-8601 format, illegal local date at index 5)")
- )
- ) &&
- assert(stringify("2020-01-0").fromJson[LocalDate])(
- isLeft(
- equalTo("(2020-01-0 is not a valid ISO-8601 format, illegal local date at index 8)")
- )
- ) &&
- assert(stringify("2020-01-012").fromJson[LocalDate])(
- isLeft(
- equalTo("(2020-01-012 is not a valid ISO-8601 format, illegal local date at index 10)")
- )
- ) &&
- assert(stringify("X020-01-01").fromJson[LocalDate])(
- isLeft(
- equalTo("(X020-01-01 is not a valid ISO-8601 format, expected '-' or '+' or digit at index 0)")
- )
- ) &&
- assert(stringify("2X20-01-01").fromJson[LocalDate])(
- isLeft(
- equalTo("(2X20-01-01 is not a valid ISO-8601 format, expected digit at index 1)")
- )
- ) &&
- assert(stringify("20X0-01-01").fromJson[LocalDate])(
- isLeft(
- equalTo("(20X0-01-01 is not a valid ISO-8601 format, expected digit at index 2)")
- )
- ) &&
- assert(stringify("202X-01-01").fromJson[LocalDate])(
- isLeft(
- equalTo("(202X-01-01 is not a valid ISO-8601 format, expected digit at index 3)")
- )
- ) &&
- assert(stringify("2020X01-01").fromJson[LocalDate])(
- isLeft(
- equalTo("(2020X01-01 is not a valid ISO-8601 format, expected '-' at index 4)")
- )
- ) &&
- assert(stringify("2020-X1-01").fromJson[LocalDate])(
- isLeft(
- equalTo("(2020-X1-01 is not a valid ISO-8601 format, expected digit at index 5)")
- )
- ) &&
- assert(stringify("2020-0X-01").fromJson[LocalDate])(
- isLeft(
- equalTo("(2020-0X-01 is not a valid ISO-8601 format, expected digit at index 6)")
- )
- ) &&
- assert(stringify("2020-01X01").fromJson[LocalDate])(
- isLeft(
- equalTo("(2020-01X01 is not a valid ISO-8601 format, expected '-' at index 7)")
- )
- ) &&
- assert(stringify("2020-01-X1").fromJson[LocalDate])(
- isLeft(
- equalTo("(2020-01-X1 is not a valid ISO-8601 format, expected digit at index 8)")
- )
- ) &&
- assert(stringify("2020-01-0X").fromJson[LocalDate])(
- isLeft(
- equalTo("(2020-01-0X is not a valid ISO-8601 format, expected digit at index 9)")
- )
- ) &&
- assert(stringify("+X0000-01-01").fromJson[LocalDate])(
- isLeft(
- equalTo("(+X0000-01-01 is not a valid ISO-8601 format, expected digit at index 1)")
- )
- ) &&
- assert(stringify("+1X000-01-01").fromJson[LocalDate])(
- isLeft(
- equalTo("(+1X000-01-01 is not a valid ISO-8601 format, expected digit at index 2)")
- )
- ) &&
- assert(stringify("+10X00-01-01").fromJson[LocalDate])(
- isLeft(
- equalTo("(+10X00-01-01 is not a valid ISO-8601 format, expected digit at index 3)")
- )
- ) &&
- assert(stringify("+100X0-01-01").fromJson[LocalDate])(
- isLeft(
- equalTo("(+100X0-01-01 is not a valid ISO-8601 format, expected digit at index 4)")
- )
- ) &&
- assert(stringify("+1000X-01-01").fromJson[LocalDate])(
- isLeft(
- equalTo("(+1000X-01-01 is not a valid ISO-8601 format, expected digit at index 5)")
- )
- ) &&
- assert(stringify("+10000X-01-01").fromJson[LocalDate])(
- isLeft(
- equalTo("(+10000X-01-01 is not a valid ISO-8601 format, expected '-' or digit at index 6)")
- )
- ) &&
- assert(stringify("+100000X-01-01").fromJson[LocalDate])(
- isLeft(
- equalTo("(+100000X-01-01 is not a valid ISO-8601 format, expected '-' or digit at index 7)")
- )
- ) &&
- assert(stringify("+1000000X-01-01").fromJson[LocalDate])(
- isLeft(
- equalTo("(+1000000X-01-01 is not a valid ISO-8601 format, expected '-' or digit at index 8)")
- )
- ) &&
- assert(stringify("+1000000000-01-01").fromJson[LocalDate])(
- isLeft(
- equalTo("(+1000000000-01-01 is not a valid ISO-8601 format, expected '-' at index 10)")
- )
- ) &&
- assert(stringify("-1000000000-01-01").fromJson[LocalDate])(
- isLeft(
- equalTo("(-1000000000-01-01 is not a valid ISO-8601 format, expected '-' at index 10)")
- )
- ) &&
- assert(stringify("-0000-01-01").fromJson[LocalDate])(
- isLeft(
- equalTo("(-0000-01-01 is not a valid ISO-8601 format, illegal year at index 4)")
- )
- ) &&
- assert(stringify("+10000").fromJson[LocalDate])(
- isLeft(
- equalTo("(+10000 is not a valid ISO-8601 format, illegal local date at index 6)")
- )
- ) &&
- assert(stringify("2020-00-01").fromJson[LocalDate])(
- isLeft(
- equalTo("(2020-00-01 is not a valid ISO-8601 format, illegal month at index 6)")
- )
- ) &&
- assert(stringify("2020-13-01").fromJson[LocalDate])(
- isLeft(
- equalTo("(2020-13-01 is not a valid ISO-8601 format, illegal month at index 6)")
- )
- ) &&
- assert(stringify("2020-01-00").fromJson[LocalDate])(
- isLeft(
- equalTo("(2020-01-00 is not a valid ISO-8601 format, illegal day at index 9)")
- )
- ) &&
- assert(stringify("2020-01-32").fromJson[LocalDate])(
- isLeft(
- equalTo("(2020-01-32 is not a valid ISO-8601 format, illegal day at index 9)")
- )
- ) &&
- assert(stringify("2020-02-30").fromJson[LocalDate])(
- isLeft(
- equalTo("(2020-02-30 is not a valid ISO-8601 format, illegal day at index 9)")
- )
- ) &&
- assert(stringify("2020-03-32").fromJson[LocalDate])(
- isLeft(
- equalTo("(2020-03-32 is not a valid ISO-8601 format, illegal day at index 9)")
- )
- ) &&
- assert(stringify("2020-04-31").fromJson[LocalDate])(
- isLeft(
- equalTo("(2020-04-31 is not a valid ISO-8601 format, illegal day at index 9)")
- )
- ) &&
- assert(stringify("2020-05-32").fromJson[LocalDate])(
- isLeft(
- equalTo("(2020-05-32 is not a valid ISO-8601 format, illegal day at index 9)")
- )
- ) &&
- assert(stringify("2020-06-31").fromJson[LocalDate])(
- isLeft(
- equalTo("(2020-06-31 is not a valid ISO-8601 format, illegal day at index 9)")
- )
- ) &&
- assert(stringify("2020-07-32").fromJson[LocalDate])(
- isLeft(
- equalTo("(2020-07-32 is not a valid ISO-8601 format, illegal day at index 9)")
- )
- ) &&
- assert(stringify("2020-08-32").fromJson[LocalDate])(
- isLeft(
- equalTo("(2020-08-32 is not a valid ISO-8601 format, illegal day at index 9)")
- )
- ) &&
- assert(stringify("2020-09-31").fromJson[LocalDate])(
- isLeft(
- equalTo("(2020-09-31 is not a valid ISO-8601 format, illegal day at index 9)")
- )
- ) &&
- assert(stringify("2020-10-32").fromJson[LocalDate])(
- isLeft(
- equalTo("(2020-10-32 is not a valid ISO-8601 format, illegal day at index 9)")
- )
- ) &&
- assert(stringify("2020-11-31").fromJson[LocalDate])(
- isLeft(
- equalTo("(2020-11-31 is not a valid ISO-8601 format, illegal day at index 9)")
- )
- ) &&
- assert(stringify("2020-12-32").fromJson[LocalDate])(
- isLeft(
- equalTo("(2020-12-32 is not a valid ISO-8601 format, illegal day at index 9)")
- )
- )
+ assert(stringify(" " * 10000).fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("2020").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("2020-0").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("2020-01-0").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("2020-01-012").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("X020-01-01").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("2X20-01-01").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("20X0-01-01").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("202X-01-01").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("2020X01-01").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("2020-X1-01").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("2020-0X-01").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("2020-01X01").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("2020-01-X1").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("2020-01-0X").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("+X0000-01-01").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("+1X000-01-01").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("+10X00-01-01").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("+100X0-01-01").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("+1000X-01-01").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("+10000X-01-01").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("+100000X-01-01").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("+1000000X-01-01").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("+1000000000-01-01").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("-1000000000-01-01").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("-0000-01-01").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("+10000").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("2020-00-01").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("2020-13-01").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("2020-01-00").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("2020-01-32").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("2020-02-30").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("2020-03-32").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("2020-04-31").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("2020-05-32").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("2020-06-31").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("2020-07-32").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("2020-08-32").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("2020-09-31").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("2020-10-32").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("2020-11-31").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate"))) &&
+ assert(stringify("2020-12-32").fromJson[LocalDate])(isLeft(containsString("expected a LocalDate")))
},
test("LocalDateTime") {
- assert(stringify("").fromJson[LocalDateTime])(
- isLeft(
- equalTo("( is not a valid ISO-8601 format, illegal local date time at index 0)")
- )
- ) &&
- assert(stringify("2020").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020 is not a valid ISO-8601 format, illegal local date time at index 0)")
- )
- ) &&
- assert(stringify("2020-0").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-0 is not a valid ISO-8601 format, illegal local date time at index 5)")
- )
- ) &&
- assert(stringify("2020-01-0").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-01-0 is not a valid ISO-8601 format, illegal local date time at index 8)")
- )
- ) &&
+ assert(stringify(" " * 10000).fromJson[LocalDateTime])(isLeft(containsString("expected a LocalDateTime"))) &&
+ assert(stringify("").fromJson[LocalDateTime])(isLeft(containsString("expected a LocalDateTime"))) &&
+ assert(stringify("2020").fromJson[LocalDateTime])(isLeft(containsString("expected a LocalDateTime"))) &&
+ assert(stringify("2020-0").fromJson[LocalDateTime])(isLeft(containsString("expected a LocalDateTime"))) &&
+ assert(stringify("2020-01-0").fromJson[LocalDateTime])(isLeft(containsString("expected a LocalDateTime"))) &&
assert(stringify("2020-01-01T0").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-01-01T0 is not a valid ISO-8601 format, illegal local date time at index 11)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-01-01T01:0").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:0 is not a valid ISO-8601 format, illegal local date time at index 14)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("X020-01-01T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(X020-01-01T01:01 is not a valid ISO-8601 format, expected '-' or '+' or digit at index 0)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2X20-01-01T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2X20-01-01T01:01 is not a valid ISO-8601 format, expected digit at index 1)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("20X0-01-01T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(20X0-01-01T01:01 is not a valid ISO-8601 format, expected digit at index 2)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("202X-01-01T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(202X-01-01T01:01 is not a valid ISO-8601 format, expected digit at index 3)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020X01-01T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020X01-01T01:01 is not a valid ISO-8601 format, expected '-' at index 4)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-X1-01T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-X1-01T01:01 is not a valid ISO-8601 format, expected digit at index 5)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-0X-01T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-0X-01T01:01 is not a valid ISO-8601 format, expected digit at index 6)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-01X01T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-01X01T01:01 is not a valid ISO-8601 format, expected '-' at index 7)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-01-X1T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-01-X1T01:01 is not a valid ISO-8601 format, expected digit at index 8)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-01-0XT01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-01-0XT01:01 is not a valid ISO-8601 format, expected digit at index 9)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-01-01X01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-01-01X01:01 is not a valid ISO-8601 format, expected 'T' at index 10)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-01-01TX1:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-01-01TX1:01 is not a valid ISO-8601 format, expected digit at index 11)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-01-01T0X:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-01-01T0X:01 is not a valid ISO-8601 format, expected digit at index 12)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-01-01T24:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-01-01T24:01 is not a valid ISO-8601 format, illegal hour at index 12)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-01-01T01X01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-01-01T01X01 is not a valid ISO-8601 format, expected ':' at index 13)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-01-01T01:X1").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:X1 is not a valid ISO-8601 format, expected digit at index 14)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-01-01T01:0X").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:0X is not a valid ISO-8601 format, expected digit at index 15)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-01-01T01:60").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:60 is not a valid ISO-8601 format, illegal minute at index 15)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-01-01T01:01X").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01X is not a valid ISO-8601 format, expected ':' at index 16)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-01-01T01:01:0").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:0 is not a valid ISO-8601 format, illegal local date time at index 17)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-01-01T01:01:X1").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:X1 is not a valid ISO-8601 format, expected digit at index 17)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-01-01T01:01:0X").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:0X is not a valid ISO-8601 format, expected digit at index 18)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-01-01T01:01:60").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:60 is not a valid ISO-8601 format, illegal second at index 18)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-01-01T01:01:012").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:012 is not a valid ISO-8601 format, expected '.' at index 19)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01.X").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:01.X is not a valid ISO-8601 format, illegal local date time at index 20)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("+X0000-01-01T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(+X0000-01-01T01:01 is not a valid ISO-8601 format, expected digit at index 1)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("+1X000-01-01T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(+1X000-01-01T01:01 is not a valid ISO-8601 format, expected digit at index 2)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("+10X00-01-01T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(+10X00-01-01T01:01 is not a valid ISO-8601 format, expected digit at index 3)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("+100X0-01-01T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(+100X0-01-01T01:01 is not a valid ISO-8601 format, expected digit at index 4)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("+1000X-01-01T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(+1000X-01-01T01:01 is not a valid ISO-8601 format, expected digit at index 5)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("+10000X-01-01T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(+10000X-01-01T01:01 is not a valid ISO-8601 format, expected '-' or digit at index 6)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("+100000X-01-01T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(+100000X-01-01T01:01 is not a valid ISO-8601 format, expected '-' or digit at index 7)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("+1000000X-01-01T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(+1000000X-01-01T01:01 is not a valid ISO-8601 format, expected '-' or digit at index 8)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("+1000000000-01-01T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(+1000000000-01-01T01:01 is not a valid ISO-8601 format, expected '-' at index 10)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("-1000000000-01-01T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(-1000000000-01-01T01:01 is not a valid ISO-8601 format, expected '-' at index 10)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("-0000-01-01T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(-0000-01-01T01:01 is not a valid ISO-8601 format, illegal year at index 4)")
- )
- ) &&
- assert(stringify("+10000").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(+10000 is not a valid ISO-8601 format, illegal local date time at index 6)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
+ assert(stringify("+10000").fromJson[LocalDateTime])(isLeft(containsString("expected a LocalDateTime"))) &&
assert(stringify("2020-00-01T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-00-01T01:01 is not a valid ISO-8601 format, illegal month at index 6)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-13-01T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-13-01T01:01 is not a valid ISO-8601 format, illegal month at index 6)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-01-00T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-01-00T01:01 is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-01-32T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-01-32T01:01 is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-02-30T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-02-30T01:01 is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-03-32T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-03-32T01:01 is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-04-31T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-04-31T01:01 is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-05-32T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-05-32T01:01 is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-06-31T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-06-31T01:01 is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-07-32T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-07-32T01:01 is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-08-32T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-08-32T01:01 is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-09-31T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-09-31T01:01 is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-10-32T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-10-32T01:01 is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-11-31T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-11-31T01:01 is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
) &&
assert(stringify("2020-12-32T01:01").fromJson[LocalDateTime])(
- isLeft(
- equalTo("(2020-12-32T01:01 is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected a LocalDateTime"))
)
},
test("LocalTime") {
- assert(stringify("").fromJson[LocalTime])(
- isLeft(
- equalTo("( is not a valid ISO-8601 format, illegal local time at index 0)")
- )
- ) &&
- assert(stringify("0").fromJson[LocalTime])(
- isLeft(
- equalTo("(0 is not a valid ISO-8601 format, illegal local time at index 0)")
- )
- ) &&
- assert(stringify("01:0").fromJson[LocalTime])(
- isLeft(
- equalTo("(01:0 is not a valid ISO-8601 format, illegal local time at index 3)")
- )
- ) &&
- assert(stringify("X1:01").fromJson[LocalTime])(
- isLeft(
- equalTo("(X1:01 is not a valid ISO-8601 format, expected digit at index 0)")
- )
- ) &&
- assert(stringify("0X:01").fromJson[LocalTime])(
- isLeft(
- equalTo("(0X:01 is not a valid ISO-8601 format, expected digit at index 1)")
- )
- ) &&
- assert(stringify("24:01").fromJson[LocalTime])(
- isLeft(
- equalTo("(24:01 is not a valid ISO-8601 format, illegal hour at index 1)")
- )
- ) &&
- assert(stringify("01X01").fromJson[LocalTime])(
- isLeft(
- equalTo("(01X01 is not a valid ISO-8601 format, expected ':' at index 2)")
- )
- ) &&
- assert(stringify("01:X1").fromJson[LocalTime])(
- isLeft(
- equalTo("(01:X1 is not a valid ISO-8601 format, expected digit at index 3)")
- )
- ) &&
- assert(stringify("01:0X").fromJson[LocalTime])(
- isLeft(
- equalTo("(01:0X is not a valid ISO-8601 format, expected digit at index 4)")
- )
- ) &&
- assert(stringify("01:60").fromJson[LocalTime])(
- isLeft(
- equalTo("(01:60 is not a valid ISO-8601 format, illegal minute at index 4)")
- )
- ) &&
- assert(stringify("01:01X").fromJson[LocalTime])(
- isLeft(
- equalTo("(01:01X is not a valid ISO-8601 format, expected ':' at index 5)")
- )
- ) &&
- assert(stringify("01:01:0").fromJson[LocalTime])(
- isLeft(
- equalTo("(01:01:0 is not a valid ISO-8601 format, illegal local time at index 6)")
- )
- ) &&
- assert(stringify("01:01:X1").fromJson[LocalTime])(
- isLeft(
- equalTo("(01:01:X1 is not a valid ISO-8601 format, expected digit at index 6)")
- )
- ) &&
- assert(stringify("01:01:0X").fromJson[LocalTime])(
- isLeft(
- equalTo("(01:01:0X is not a valid ISO-8601 format, expected digit at index 7)")
- )
- ) &&
- assert(stringify("01:01:60").fromJson[LocalTime])(
- isLeft(
- equalTo("(01:01:60 is not a valid ISO-8601 format, illegal second at index 7)")
- )
- ) &&
- assert(stringify("01:01:012").fromJson[LocalTime])(
- isLeft(
- equalTo("(01:01:012 is not a valid ISO-8601 format, expected '.' at index 8)")
- )
- ) &&
- assert(stringify("01:01:01.X").fromJson[LocalTime])(
- isLeft(
- equalTo("(01:01:01.X is not a valid ISO-8601 format, illegal local time at index 9)")
- )
- )
- },
- test("Month") {
- assert(stringify("FebTober").fromJson[Month])(
- isLeft(
- equalTo("(No enum constant java.time.Month.FEBTOBER)") || // JVM
- equalTo("(Unrecognized month name: FEBTOBER)") || // Scala.js 2.
- equalTo("(enum case not found: FEBTOBER)") // Scala.js 3.
- )
- )
+ assert(stringify(" " * 10000).fromJson[LocalTime])(isLeft(containsString("expected a LocalTime"))) &&
+ assert(stringify("").fromJson[LocalTime])(isLeft(containsString("expected a LocalTime"))) &&
+ assert(stringify("0").fromJson[LocalTime])(isLeft(containsString("expected a LocalTime"))) &&
+ assert(stringify("01:0").fromJson[LocalTime])(isLeft(containsString("expected a LocalTime"))) &&
+ assert(stringify("X1:01").fromJson[LocalTime])(isLeft(containsString("expected a LocalTime"))) &&
+ assert(stringify("0X:01").fromJson[LocalTime])(isLeft(containsString("expected a LocalTime"))) &&
+ assert(stringify("24:01").fromJson[LocalTime])(isLeft(containsString("expected a LocalTime"))) &&
+ assert(stringify("01X01").fromJson[LocalTime])(isLeft(containsString("expected a LocalTime"))) &&
+ assert(stringify("01:X1").fromJson[LocalTime])(isLeft(containsString("expected a LocalTime"))) &&
+ assert(stringify("01:0X").fromJson[LocalTime])(isLeft(containsString("expected a LocalTime"))) &&
+ assert(stringify("01:60").fromJson[LocalTime])(isLeft(containsString("expected a LocalTime"))) &&
+ assert(stringify("01:01X").fromJson[LocalTime])(isLeft(containsString("expected a LocalTime"))) &&
+ assert(stringify("01:01:0").fromJson[LocalTime])(isLeft(containsString("expected a LocalTime"))) &&
+ assert(stringify("01:01:X1").fromJson[LocalTime])(isLeft(containsString("expected a LocalTime"))) &&
+ assert(stringify("01:01:0X").fromJson[LocalTime])(isLeft(containsString("expected a LocalTime"))) &&
+ assert(stringify("01:01:60").fromJson[LocalTime])(isLeft(containsString("expected a LocalTime"))) &&
+ assert(stringify("01:01:012").fromJson[LocalTime])(isLeft(containsString("expected a LocalTime"))) &&
+ assert(stringify("01:01:01.X").fromJson[LocalTime])(isLeft(containsString("expected a LocalTime")))
},
test("MonthDay") {
- assert(stringify("").fromJson[MonthDay])(
- isLeft(equalTo("( is not a valid ISO-8601 format, illegal month day at index 0)"))
- ) &&
- assert(stringify("X-01-01").fromJson[MonthDay])(
- isLeft(equalTo("(X-01-01 is not a valid ISO-8601 format, expected '-' at index 0)"))
- ) &&
- assert(stringify("-X01-01").fromJson[MonthDay])(
- isLeft(equalTo("(-X01-01 is not a valid ISO-8601 format, expected '-' at index 1)"))
- ) &&
- assert(stringify("--X1-01").fromJson[MonthDay])(
- isLeft(equalTo("(--X1-01 is not a valid ISO-8601 format, expected digit at index 2)"))
- ) &&
- assert(stringify("--0X-01").fromJson[MonthDay])(
- isLeft(equalTo("(--0X-01 is not a valid ISO-8601 format, expected digit at index 3)"))
- ) &&
- assert(stringify("--00-01").fromJson[MonthDay])(
- isLeft(equalTo("(--00-01 is not a valid ISO-8601 format, illegal month at index 3)"))
- ) &&
- assert(stringify("--13-01").fromJson[MonthDay])(
- isLeft(equalTo("(--13-01 is not a valid ISO-8601 format, illegal month at index 3)"))
- ) &&
- assert(stringify("--01X01").fromJson[MonthDay])(
- isLeft(equalTo("(--01X01 is not a valid ISO-8601 format, expected '-' at index 4)"))
- ) &&
- assert(stringify("--01-X1").fromJson[MonthDay])(
- isLeft(equalTo("(--01-X1 is not a valid ISO-8601 format, expected digit at index 5)"))
- ) &&
- assert(stringify("--01-0X").fromJson[MonthDay])(
- isLeft(equalTo("(--01-0X is not a valid ISO-8601 format, expected digit at index 6)"))
- ) &&
- assert(stringify("--01-00").fromJson[MonthDay])(
- isLeft(equalTo("(--01-00 is not a valid ISO-8601 format, illegal day at index 6)"))
- ) &&
- assert(stringify("--01-32").fromJson[MonthDay])(
- isLeft(equalTo("(--01-32 is not a valid ISO-8601 format, illegal day at index 6)"))
- ) &&
- assert(stringify("--02-30").fromJson[MonthDay])(
- isLeft(equalTo("(--02-30 is not a valid ISO-8601 format, illegal day at index 6)"))
- ) &&
- assert(stringify("--03-32").fromJson[MonthDay])(
- isLeft(equalTo("(--03-32 is not a valid ISO-8601 format, illegal day at index 6)"))
- ) &&
- assert(stringify("--04-31").fromJson[MonthDay])(
- isLeft(equalTo("(--04-31 is not a valid ISO-8601 format, illegal day at index 6)"))
- ) &&
- assert(stringify("--05-32").fromJson[MonthDay])(
- isLeft(equalTo("(--05-32 is not a valid ISO-8601 format, illegal day at index 6)"))
- ) &&
- assert(stringify("--06-31").fromJson[MonthDay])(
- isLeft(equalTo("(--06-31 is not a valid ISO-8601 format, illegal day at index 6)"))
- ) &&
- assert(stringify("--07-32").fromJson[MonthDay])(
- isLeft(equalTo("(--07-32 is not a valid ISO-8601 format, illegal day at index 6)"))
- ) &&
- assert(stringify("--08-32").fromJson[MonthDay])(
- isLeft(equalTo("(--08-32 is not a valid ISO-8601 format, illegal day at index 6)"))
- ) &&
- assert(stringify("--09-31").fromJson[MonthDay])(
- isLeft(equalTo("(--09-31 is not a valid ISO-8601 format, illegal day at index 6)"))
- ) &&
- assert(stringify("--10-32").fromJson[MonthDay])(
- isLeft(equalTo("(--10-32 is not a valid ISO-8601 format, illegal day at index 6)"))
- ) &&
- assert(stringify("--11-31").fromJson[MonthDay])(
- isLeft(equalTo("(--11-31 is not a valid ISO-8601 format, illegal day at index 6)"))
- ) &&
- assert(stringify("--12-32").fromJson[MonthDay])(
- isLeft(equalTo("(--12-32 is not a valid ISO-8601 format, illegal day at index 6)"))
- )
+ assert(stringify(" " * 10000).fromJson[MonthDay])(isLeft(containsString("expected a MonthDay"))) &&
+ assert(stringify("").fromJson[MonthDay])(isLeft(containsString("expected a MonthDay"))) &&
+ assert(stringify("X-01-01").fromJson[MonthDay])(isLeft(containsString("expected a MonthDay"))) &&
+ assert(stringify("-X01-01").fromJson[MonthDay])(isLeft(containsString("expected a MonthDay"))) &&
+ assert(stringify("--X1-01").fromJson[MonthDay])(isLeft(containsString("expected a MonthDay"))) &&
+ assert(stringify("--0X-01").fromJson[MonthDay])(isLeft(containsString("expected a MonthDay"))) &&
+ assert(stringify("--00-01").fromJson[MonthDay])(isLeft(containsString("expected a MonthDay"))) &&
+ assert(stringify("--13-01").fromJson[MonthDay])(isLeft(containsString("expected a MonthDay"))) &&
+ assert(stringify("--01X01").fromJson[MonthDay])(isLeft(containsString("expected a MonthDay"))) &&
+ assert(stringify("--01-X1").fromJson[MonthDay])(isLeft(containsString("expected a MonthDay"))) &&
+ assert(stringify("--01-0X").fromJson[MonthDay])(isLeft(containsString("expected a MonthDay"))) &&
+ assert(stringify("--01-00").fromJson[MonthDay])(isLeft(containsString("expected a MonthDay"))) &&
+ assert(stringify("--01-32").fromJson[MonthDay])(isLeft(containsString("expected a MonthDay"))) &&
+ assert(stringify("--02-30").fromJson[MonthDay])(isLeft(containsString("expected a MonthDay"))) &&
+ assert(stringify("--03-32").fromJson[MonthDay])(isLeft(containsString("expected a MonthDay"))) &&
+ assert(stringify("--04-31").fromJson[MonthDay])(isLeft(containsString("expected a MonthDay"))) &&
+ assert(stringify("--05-32").fromJson[MonthDay])(isLeft(containsString("expected a MonthDay"))) &&
+ assert(stringify("--06-31").fromJson[MonthDay])(isLeft(containsString("expected a MonthDay"))) &&
+ assert(stringify("--07-32").fromJson[MonthDay])(isLeft(containsString("expected a MonthDay"))) &&
+ assert(stringify("--08-32").fromJson[MonthDay])(isLeft(containsString("expected a MonthDay"))) &&
+ assert(stringify("--09-31").fromJson[MonthDay])(isLeft(containsString("expected a MonthDay"))) &&
+ assert(stringify("--10-32").fromJson[MonthDay])(isLeft(containsString("expected a MonthDay"))) &&
+ assert(stringify("--11-31").fromJson[MonthDay])(isLeft(containsString("expected a MonthDay"))) &&
+ assert(stringify("--12-32").fromJson[MonthDay])(isLeft(containsString("expected a MonthDay")))
},
test("OffsetDateTime") {
- assert(stringify("").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("( is not a valid ISO-8601 format, illegal offset date time at index 0)")
- )
- ) &&
- assert(stringify("2020").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020 is not a valid ISO-8601 format, illegal offset date time at index 0)")
- )
- ) &&
- assert(stringify("2020-0").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-0 is not a valid ISO-8601 format, illegal offset date time at index 5)")
- )
+ assert(stringify(" " * 10000).fromJson[OffsetDateTime])(
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
+ assert(stringify("").fromJson[OffsetDateTime])(isLeft(containsString("expected an OffsetDateTime"))) &&
+ assert(stringify("2020").fromJson[OffsetDateTime])(isLeft(containsString("expected an OffsetDateTime"))) &&
+ assert(stringify("2020-0").fromJson[OffsetDateTime])(isLeft(containsString("expected an OffsetDateTime"))) &&
assert(stringify("2020-01-0").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-0 is not a valid ISO-8601 format, illegal offset date time at index 8)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T0").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-01T0 is not a valid ISO-8601 format, illegal offset date time at index 11)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:0").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:0 is not a valid ISO-8601 format, illegal offset date time at index 14)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("X020-01-01T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(X020-01-01T01:01Z is not a valid ISO-8601 format, expected '-' or '+' or digit at index 0)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2X20-01-01T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2X20-01-01T01:01Z is not a valid ISO-8601 format, expected digit at index 1)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("20X0-01-01T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(20X0-01-01T01:01Z is not a valid ISO-8601 format, expected digit at index 2)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("202X-01-01T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(202X-01-01T01:01Z is not a valid ISO-8601 format, expected digit at index 3)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020X01-01T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020X01-01T01:01Z is not a valid ISO-8601 format, expected '-' at index 4)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-X1-01T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-X1-01T01:01Z is not a valid ISO-8601 format, expected digit at index 5)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-0X-01T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-0X-01T01:01Z is not a valid ISO-8601 format, expected digit at index 6)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01X01T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01X01T01:01Z is not a valid ISO-8601 format, expected '-' at index 7)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-X1T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-X1T01:01Z is not a valid ISO-8601 format, expected digit at index 8)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-0XT01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-0XT01:01Z is not a valid ISO-8601 format, expected digit at index 9)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01X01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-01X01:01Z is not a valid ISO-8601 format, expected 'T' at index 10)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01TX1:01").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-01TX1:01 is not a valid ISO-8601 format, expected digit at index 11)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T0X:01").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-01T0X:01 is not a valid ISO-8601 format, expected digit at index 12)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T24:01").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-01T24:01 is not a valid ISO-8601 format, illegal hour at index 12)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
+ ) &&
+ assert(stringify("2020-01-01T01X").fromJson[OffsetDateTime])(
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01X01").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-01T01X01 is not a valid ISO-8601 format, expected ':' at index 13)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:X1").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:X1 is not a valid ISO-8601 format, expected digit at index 14)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:0X").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:0X is not a valid ISO-8601 format, expected digit at index 15)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:60").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:60 is not a valid ISO-8601 format, illegal minute at index 15)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:01").fromJson[OffsetDateTime])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01 is not a valid ISO-8601 format, expected ':' or '+' or '-' or 'Z' at index 16)"
- )
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:01X").fromJson[OffsetDateTime])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01X is not a valid ISO-8601 format, expected ':' or '+' or '-' or 'Z' at index 16)"
- )
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:01:0").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:0 is not a valid ISO-8601 format, illegal offset date time at index 17)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:01:X1Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:X1Z is not a valid ISO-8601 format, expected digit at index 17)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:01:0XZ").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:0XZ is not a valid ISO-8601 format, expected digit at index 18)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:01:60Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:60Z is not a valid ISO-8601 format, illegal second at index 18)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01").fromJson[OffsetDateTime])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01 is not a valid ISO-8601 format, expected '.' or '+' or '-' or 'Z' at index 19)"
- )
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:01:012").fromJson[OffsetDateTime])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:012 is not a valid ISO-8601 format, expected '.' or '+' or '-' or 'Z' at index 19)"
- )
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01.").fromJson[OffsetDateTime])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01. is not a valid ISO-8601 format, expected digit or '+' or '-' or 'Z' at index 20)"
- )
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01.X").fromJson[OffsetDateTime])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01.X is not a valid ISO-8601 format, expected digit or '+' or '-' or 'Z' at index 20)"
- )
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01.123456789X").fromJson[OffsetDateTime])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01.123456789X is not a valid ISO-8601 format, expected '+' or '-' or 'Z' at index 29)"
- )
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01ZX").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:01ZX is not a valid ISO-8601 format, illegal offset date time at index 20)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+X1:01:01").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:01+X1:01:01 is not a valid ISO-8601 format, expected digit at index 20)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+0").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:01+0 is not a valid ISO-8601 format, illegal offset date time at index 20)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+0X:01:01").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:01+0X:01:01 is not a valid ISO-8601 format, expected digit at index 21)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+19:01:01").fromJson[OffsetDateTime])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01+19:01:01 is not a valid ISO-8601 format, illegal timezone offset hour at index 21)"
- )
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+01X01:01").fromJson[OffsetDateTime])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01+01X01:01 is not a valid ISO-8601 format, illegal offset date time at index 23)"
- )
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+01:0").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:01+01:0 is not a valid ISO-8601 format, illegal offset date time at index 23)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+01:X1:01").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:01+01:X1:01 is not a valid ISO-8601 format, expected digit at index 23)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+01:0X:01").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:01+01:0X:01 is not a valid ISO-8601 format, expected digit at index 24)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+01:60:01").fromJson[OffsetDateTime])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01+01:60:01 is not a valid ISO-8601 format, illegal timezone offset minute at index 24)"
- )
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+01:01X01").fromJson[OffsetDateTime])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01+01:01X01 is not a valid ISO-8601 format, illegal offset date time at index 26)"
- )
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+01:01:0").fromJson[OffsetDateTime])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01+01:01:0 is not a valid ISO-8601 format, illegal offset date time at index 26)"
- )
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+01:01:X1").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:01+01:01:X1 is not a valid ISO-8601 format, expected digit at index 26)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+01:01:0X").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:01+01:01:0X is not a valid ISO-8601 format, expected digit at index 27)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+01:01:60").fromJson[OffsetDateTime])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01+01:01:60 is not a valid ISO-8601 format, illegal timezone offset second at index 27)"
- )
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("+X0000-01-01T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(+X0000-01-01T01:01Z is not a valid ISO-8601 format, expected digit at index 1)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("+1X000-01-01T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(+1X000-01-01T01:01Z is not a valid ISO-8601 format, expected digit at index 2)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("+10X00-01-01T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(+10X00-01-01T01:01Z is not a valid ISO-8601 format, expected digit at index 3)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("+100X0-01-01T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(+100X0-01-01T01:01Z is not a valid ISO-8601 format, expected digit at index 4)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("+1000X-01-01T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(+1000X-01-01T01:01Z is not a valid ISO-8601 format, expected digit at index 5)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("+10000X-01-01T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(+10000X-01-01T01:01Z is not a valid ISO-8601 format, expected '-' or digit at index 6)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("+100000X-01-01T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(+100000X-01-01T01:01Z is not a valid ISO-8601 format, expected '-' or digit at index 7)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("+1000000X-01-01T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(+1000000X-01-01T01:01Z is not a valid ISO-8601 format, expected '-' or digit at index 8)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("+1000000000-01-01T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(+1000000000-01-01T01:01Z is not a valid ISO-8601 format, expected '-' at index 10)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("-1000000000-01-01T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(-1000000000-01-01T01:01Z is not a valid ISO-8601 format, expected '-' at index 10)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("-0000-01-01T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(-0000-01-01T01:01Z is not a valid ISO-8601 format, illegal year at index 4)")
- )
- ) &&
- assert(stringify("+10000").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(+10000 is not a valid ISO-8601 format, illegal offset date time at index 6)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
+ assert(stringify("+10000").fromJson[OffsetDateTime])(isLeft(containsString("expected an OffsetDateTime"))) &&
assert(stringify("2020-00-01T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-00-01T01:01Z is not a valid ISO-8601 format, illegal month at index 6)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-13-01T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-13-01T01:01Z is not a valid ISO-8601 format, illegal month at index 6)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-00T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-00T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-01-32T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-01-32T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-02-30T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-02-30T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-03-32T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-03-32T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-04-31T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-04-31T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-05-32T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-05-32T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-06-31T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-06-31T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-07-32T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-07-32T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-08-32T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-08-32T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-09-31T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-09-31T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-10-32T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-10-32T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-11-31T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-11-31T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
) &&
assert(stringify("2020-12-32T01:01Z").fromJson[OffsetDateTime])(
- isLeft(
- equalTo("(2020-12-32T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected an OffsetDateTime"))
)
},
test("OffsetTime") {
- assert(stringify("").fromJson[OffsetTime])(
- isLeft(
- equalTo("( is not a valid ISO-8601 format, illegal offset time at index 0)")
- )
- ) &&
- assert(stringify("0").fromJson[OffsetTime])(
- isLeft(
- equalTo("(0 is not a valid ISO-8601 format, illegal offset time at index 0)")
- )
- ) &&
- assert(stringify("01:0").fromJson[OffsetTime])(
- isLeft(
- equalTo("(01:0 is not a valid ISO-8601 format, illegal offset time at index 3)")
- )
- ) &&
- assert(stringify("X1:01").fromJson[OffsetTime])(
- isLeft(
- equalTo("(X1:01 is not a valid ISO-8601 format, expected digit at index 0)")
- )
- ) &&
- assert(stringify("0X:01").fromJson[OffsetTime])(
- isLeft(
- equalTo("(0X:01 is not a valid ISO-8601 format, expected digit at index 1)")
- )
- ) &&
- assert(stringify("24:01").fromJson[OffsetTime])(
- isLeft(
- equalTo("(24:01 is not a valid ISO-8601 format, illegal hour at index 1)")
- )
- ) &&
- assert(stringify("01X01").fromJson[OffsetTime])(
- isLeft(
- equalTo("(01X01 is not a valid ISO-8601 format, expected ':' at index 2)")
- )
- ) &&
- assert(stringify("01:X1").fromJson[OffsetTime])(
- isLeft(
- equalTo("(01:X1 is not a valid ISO-8601 format, expected digit at index 3)")
- )
- ) &&
- assert(stringify("01:0X").fromJson[OffsetTime])(
- isLeft(
- equalTo("(01:0X is not a valid ISO-8601 format, expected digit at index 4)")
- )
- ) &&
- assert(stringify("01:60").fromJson[OffsetTime])(
- isLeft(
- equalTo("(01:60 is not a valid ISO-8601 format, illegal minute at index 4)")
- )
- ) &&
- assert(stringify("01:01").fromJson[OffsetTime])(
- isLeft(
- equalTo(
- "(01:01 is not a valid ISO-8601 format, expected ':' or '+' or '-' or 'Z' at index 5)"
- )
- )
- ) &&
- assert(stringify("01:01X").fromJson[OffsetTime])(
- isLeft(
- equalTo(
- "(01:01X is not a valid ISO-8601 format, expected ':' or '+' or '-' or 'Z' at index 5)"
- )
- )
- ) &&
- assert(stringify("01:01:0").fromJson[OffsetTime])(
- isLeft(
- equalTo("(01:01:0 is not a valid ISO-8601 format, illegal offset time at index 6)")
- )
- ) &&
- assert(stringify("01:01:X1Z").fromJson[OffsetTime])(
- isLeft(
- equalTo("(01:01:X1Z is not a valid ISO-8601 format, expected digit at index 6)")
- )
- ) &&
- assert(stringify("01:01:0XZ").fromJson[OffsetTime])(
- isLeft(
- equalTo("(01:01:0XZ is not a valid ISO-8601 format, expected digit at index 7)")
- )
- ) &&
- assert(stringify("01:01:60Z").fromJson[OffsetTime])(
- isLeft(
- equalTo("(01:01:60Z is not a valid ISO-8601 format, illegal second at index 7)")
- )
- ) &&
- assert(stringify("01:01:01").fromJson[OffsetTime])(
- isLeft(
- equalTo(
- "(01:01:01 is not a valid ISO-8601 format, expected '.' or '+' or '-' or 'Z' at index 8)"
- )
- )
- ) &&
- assert(stringify("01:01:012").fromJson[OffsetTime])(
- isLeft(
- equalTo(
- "(01:01:012 is not a valid ISO-8601 format, expected '.' or '+' or '-' or 'Z' at index 8)"
- )
- )
- ) &&
- assert(stringify("01:01:01.").fromJson[OffsetTime])(
- isLeft(
- equalTo(
- "(01:01:01. is not a valid ISO-8601 format, expected digit or '+' or '-' or 'Z' at index 9)"
- )
- )
- ) &&
- assert(stringify("01:01:01.X").fromJson[OffsetTime])(
- isLeft(
- equalTo(
- "(01:01:01.X is not a valid ISO-8601 format, expected digit or '+' or '-' or 'Z' at index 9)"
- )
- )
- ) &&
+ assert(stringify(" " * 10000).fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime"))) &&
+ assert(stringify("").fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime"))) &&
+ assert(stringify("0").fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime"))) &&
+ assert(stringify("01:0").fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime"))) &&
+ assert(stringify("X1:01").fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime"))) &&
+ assert(stringify("0X:01").fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime"))) &&
+ assert(stringify("24:01").fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime"))) &&
+ assert(stringify("01X01").fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime"))) &&
+ assert(stringify("01:X1").fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime"))) &&
+ assert(stringify("01:0X").fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime"))) &&
+ assert(stringify("01:60").fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime"))) &&
+ assert(stringify("01:01").fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime"))) &&
+ assert(stringify("01:01X").fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime"))) &&
+ assert(stringify("01:01:0").fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime"))) &&
+ assert(stringify("01:01:X1Z").fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime"))) &&
+ assert(stringify("01:01:0XZ").fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime"))) &&
+ assert(stringify("01:01:60Z").fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime"))) &&
+ assert(stringify("01:01:01").fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime"))) &&
+ assert(stringify("01:01:012").fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime"))) &&
+ assert(stringify("01:01:01.").fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime"))) &&
+ assert(stringify("01:01:01.X").fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime"))) &&
assert(stringify("01:01:01.123456789X").fromJson[OffsetTime])(
- isLeft(
- equalTo(
- "(01:01:01.123456789X is not a valid ISO-8601 format, expected '+' or '-' or 'Z' at index 18)"
- )
- )
- ) &&
- assert(stringify("01:01:01ZX").fromJson[OffsetTime])(
- isLeft(
- equalTo("(01:01:01ZX is not a valid ISO-8601 format, illegal offset time at index 9)")
- )
+ isLeft(containsString("expected an OffsetTime"))
) &&
+ assert(stringify("01:01:01ZX").fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime"))) &&
assert(stringify("01:01:01+X1:01:01").fromJson[OffsetTime])(
- isLeft(
- equalTo("(01:01:01+X1:01:01 is not a valid ISO-8601 format, expected digit at index 9)")
- )
- ) &&
- assert(stringify("01:01:01+0").fromJson[OffsetTime])(
- isLeft(
- equalTo("(01:01:01+0 is not a valid ISO-8601 format, illegal offset time at index 9)")
- )
+ isLeft(containsString("expected an OffsetTime"))
) &&
+ assert(stringify("01:01:01+0").fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime"))) &&
assert(stringify("01:01:01+0X:01:01").fromJson[OffsetTime])(
- isLeft(
- equalTo("(01:01:01+0X:01:01 is not a valid ISO-8601 format, expected digit at index 10)")
- )
+ isLeft(containsString("expected an OffsetTime"))
) &&
assert(stringify("01:01:01+19:01:01").fromJson[OffsetTime])(
- isLeft(
- equalTo("(01:01:01+19:01:01 is not a valid ISO-8601 format, illegal timezone offset hour at index 10)")
- )
+ isLeft(containsString("expected an OffsetTime"))
) &&
assert(stringify("01:01:01+01X01:01").fromJson[OffsetTime])(
- isLeft(
- equalTo("(01:01:01+01X01:01 is not a valid ISO-8601 format, illegal offset time at index 12)")
- )
- ) &&
- assert(stringify("01:01:01+01:0").fromJson[OffsetTime])(
- isLeft(
- equalTo("(01:01:01+01:0 is not a valid ISO-8601 format, illegal offset time at index 12)")
- )
+ isLeft(containsString("expected an OffsetTime"))
) &&
+ assert(stringify("01:01:01+01X").fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime"))) &&
+ assert(stringify("01:01:01+01:0").fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime"))) &&
assert(stringify("01:01:01+01:X1:01").fromJson[OffsetTime])(
- isLeft(
- equalTo("(01:01:01+01:X1:01 is not a valid ISO-8601 format, expected digit at index 12)")
- )
+ isLeft(containsString("expected an OffsetTime"))
) &&
assert(stringify("01:01:01+01:0X:01").fromJson[OffsetTime])(
- isLeft(
- equalTo("(01:01:01+01:0X:01 is not a valid ISO-8601 format, expected digit at index 13)")
- )
+ isLeft(containsString("expected an OffsetTime"))
) &&
assert(stringify("01:01:01+01:60:01").fromJson[OffsetTime])(
- isLeft(
- equalTo("(01:01:01+01:60:01 is not a valid ISO-8601 format, illegal timezone offset minute at index 13)")
- )
+ isLeft(containsString("expected an OffsetTime"))
) &&
+ assert(stringify("01:01:01+01:01X").fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime"))) &&
assert(stringify("01:01:01+01:01X01").fromJson[OffsetTime])(
- isLeft(
- equalTo("(01:01:01+01:01X01 is not a valid ISO-8601 format, illegal offset time at index 15)")
- )
+ isLeft(containsString("expected an OffsetTime"))
) &&
assert(stringify("01:01:01+01:01:0").fromJson[OffsetTime])(
- isLeft(
- equalTo("(01:01:01+01:01:0 is not a valid ISO-8601 format, illegal offset time at index 15)")
- )
+ isLeft(containsString("expected an OffsetTime"))
) &&
assert(stringify("01:01:01+01:01:X1").fromJson[OffsetTime])(
- isLeft(
- equalTo("(01:01:01+01:01:X1 is not a valid ISO-8601 format, expected digit at index 15)")
- )
+ isLeft(containsString("expected an OffsetTime"))
) &&
assert(stringify("01:01:01+01:01:0X").fromJson[OffsetTime])(
- isLeft(
- equalTo("(01:01:01+01:01:0X is not a valid ISO-8601 format, expected digit at index 16)")
- )
- ) &&
- assert(stringify("01:01:01+01:01:60").fromJson[OffsetTime])(
- isLeft(
- equalTo("(01:01:01+01:01:60 is not a valid ISO-8601 format, illegal timezone offset second at index 16)")
- )
- )
+ isLeft(containsString("expected an OffsetTime"))
+ ) &&
+ assert(stringify("01:01:01+01:01:60").fromJson[OffsetTime])(isLeft(containsString("expected an OffsetTime")))
},
test("Period") {
- assert(stringify("").fromJson[Period])(
- isLeft(equalTo("( is not a valid ISO-8601 format, illegal period at index 0)"))
- ) &&
- assert(stringify("X").fromJson[Period])(
- isLeft(equalTo("(X is not a valid ISO-8601 format, expected 'P' or '-' at index 0)"))
- ) &&
- assert(stringify("P").fromJson[Period])(
- isLeft(equalTo("(P is not a valid ISO-8601 format, illegal period at index 1)"))
- ) &&
- assert(stringify("-").fromJson[Period])(
- isLeft(equalTo("(- is not a valid ISO-8601 format, illegal period at index 1)"))
- ) &&
- assert(stringify("PXY").fromJson[Period])(
- isLeft(equalTo("(PXY is not a valid ISO-8601 format, expected '-' or digit at index 1)"))
- ) &&
- assert(stringify("P-").fromJson[Period])(
- isLeft(equalTo("(P- is not a valid ISO-8601 format, illegal period at index 2)"))
- ) &&
- assert(stringify("P-XY").fromJson[Period])(
- isLeft(equalTo("(P-XY is not a valid ISO-8601 format, expected digit at index 2)"))
- ) &&
- assert(stringify("P1XY").fromJson[Period])(
- isLeft(
- equalTo("(P1XY is not a valid ISO-8601 format, expected 'Y' or 'M' or 'W' or 'D' or digit at index 2)")
- )
- ) &&
- assert(stringify("P2147483648Y").fromJson[Period])(
- isLeft(equalTo("(P2147483648Y is not a valid ISO-8601 format, illegal period at index 11)"))
- ) &&
- assert(stringify("P21474836470Y").fromJson[Period])(
- isLeft(equalTo("(P21474836470Y is not a valid ISO-8601 format, illegal period at index 11)"))
- ) &&
- assert(stringify("P-2147483649Y").fromJson[Period])(
- isLeft(equalTo("(P-2147483649Y is not a valid ISO-8601 format, illegal period at index 11)"))
- ) &&
- assert(stringify("P2147483648M").fromJson[Period])(
- isLeft(equalTo("(P2147483648M is not a valid ISO-8601 format, illegal period at index 11)"))
- ) &&
- assert(stringify("P21474836470M").fromJson[Period])(
- isLeft(equalTo("(P21474836470M is not a valid ISO-8601 format, illegal period at index 11)"))
- ) &&
- assert(stringify("P-2147483649M").fromJson[Period])(
- isLeft(equalTo("(P-2147483649M is not a valid ISO-8601 format, illegal period at index 11)"))
- ) &&
- assert(stringify("P2147483648W").fromJson[Period])(
- isLeft(equalTo("(P2147483648W is not a valid ISO-8601 format, illegal period at index 11)"))
- ) &&
- assert(stringify("P21474836470W").fromJson[Period])(
- isLeft(equalTo("(P21474836470W is not a valid ISO-8601 format, illegal period at index 11)"))
- ) &&
- assert(stringify("P-2147483649W").fromJson[Period])(
- isLeft(equalTo("(P-2147483649W is not a valid ISO-8601 format, illegal period at index 11)"))
- ) &&
- assert(stringify("P2147483648D").fromJson[Period])(
- isLeft(equalTo("(P2147483648D is not a valid ISO-8601 format, illegal period at index 11)"))
- ) &&
- assert(stringify("P21474836470D").fromJson[Period])(
- isLeft(equalTo("(P21474836470D is not a valid ISO-8601 format, illegal period at index 11)"))
- ) &&
- assert(stringify("P-2147483649D").fromJson[Period])(
- isLeft(equalTo("(P-2147483649D is not a valid ISO-8601 format, illegal period at index 11)"))
- ) &&
- assert(stringify("P1YXM").fromJson[Period])(
- isLeft(equalTo("(P1YXM is not a valid ISO-8601 format, expected '-' or digit at index 3)"))
- ) &&
- assert(stringify("P1Y-XM").fromJson[Period])(
- isLeft(equalTo("(P1Y-XM is not a valid ISO-8601 format, expected digit at index 4)"))
- ) &&
- assert(stringify("P1Y1XM").fromJson[Period])(
- isLeft(equalTo("(P1Y1XM is not a valid ISO-8601 format, expected 'M' or 'W' or 'D' or digit at index 4)"))
- ) &&
- assert(stringify("P1Y2147483648M").fromJson[Period])(
- isLeft(equalTo("(P1Y2147483648M is not a valid ISO-8601 format, illegal period at index 13)"))
- ) &&
- assert(stringify("P1Y21474836470M").fromJson[Period])(
- isLeft(equalTo("(P1Y21474836470M is not a valid ISO-8601 format, illegal period at index 13)"))
- ) &&
- assert(stringify("P1Y-2147483649M").fromJson[Period])(
- isLeft(equalTo("(P1Y-2147483649M is not a valid ISO-8601 format, illegal period at index 13)"))
- ) &&
- assert(stringify("P1Y2147483648W").fromJson[Period])(
- isLeft(equalTo("(P1Y2147483648W is not a valid ISO-8601 format, illegal period at index 13)"))
- ) &&
- assert(stringify("P1Y21474836470W").fromJson[Period])(
- isLeft(equalTo("(P1Y21474836470W is not a valid ISO-8601 format, illegal period at index 13)"))
- ) &&
- assert(stringify("P1Y-2147483649W").fromJson[Period])(
- isLeft(equalTo("(P1Y-2147483649W is not a valid ISO-8601 format, illegal period at index 13)"))
- ) &&
- assert(stringify("P1Y2147483648D").fromJson[Period])(
- isLeft(equalTo("(P1Y2147483648D is not a valid ISO-8601 format, illegal period at index 13)"))
- ) &&
- assert(stringify("P1Y21474836470D").fromJson[Period])(
- isLeft(equalTo("(P1Y21474836470D is not a valid ISO-8601 format, illegal period at index 13)"))
- ) &&
- assert(stringify("P1Y-2147483649D").fromJson[Period])(
- isLeft(equalTo("(P1Y-2147483649D is not a valid ISO-8601 format, illegal period at index 13)"))
- ) &&
- assert(stringify("P1Y1MXW").fromJson[Period])(
- isLeft(equalTo("(P1Y1MXW is not a valid ISO-8601 format, expected '\"' or '-' or digit at index 5)"))
- ) &&
- assert(stringify("P1Y1M-XW").fromJson[Period])(
- isLeft(equalTo("(P1Y1M-XW is not a valid ISO-8601 format, expected digit at index 6)"))
- ) &&
- assert(stringify("P1Y1M1XW").fromJson[Period])(
- isLeft(equalTo("(P1Y1M1XW is not a valid ISO-8601 format, expected 'W' or 'D' or digit at index 6)"))
- ) &&
- assert(stringify("P1Y1M306783379W").fromJson[Period])(
- isLeft(equalTo("(P1Y1M306783379W is not a valid ISO-8601 format, illegal period at index 14)"))
- ) &&
- assert(stringify("P1Y1M3067833790W").fromJson[Period])(
- isLeft(equalTo("(P1Y1M3067833790W is not a valid ISO-8601 format, illegal period at index 14)"))
- ) &&
- assert(stringify("P1Y1M-306783379W").fromJson[Period])(
- isLeft(equalTo("(P1Y1M-306783379W is not a valid ISO-8601 format, illegal period at index 15)"))
- ) &&
- assert(stringify("P1Y1M2147483648D").fromJson[Period])(
- isLeft(equalTo("(P1Y1M2147483648D is not a valid ISO-8601 format, illegal period at index 15)"))
- ) &&
- assert(stringify("P1Y1M21474836470D").fromJson[Period])(
- isLeft(equalTo("(P1Y1M21474836470D is not a valid ISO-8601 format, illegal period at index 15)"))
- ) &&
- assert(stringify("P1Y1M-2147483649D").fromJson[Period])(
- isLeft(equalTo("(P1Y1M-2147483649D is not a valid ISO-8601 format, illegal period at index 15)"))
- ) &&
- assert(stringify("P1Y1M1WXD").fromJson[Period])(
- isLeft(equalTo("(P1Y1M1WXD is not a valid ISO-8601 format, expected '\"' or '-' or digit at index 7)"))
- ) &&
- assert(stringify("P1Y1M1W-XD").fromJson[Period])(
- isLeft(equalTo("(P1Y1M1W-XD is not a valid ISO-8601 format, expected digit at index 8)"))
- ) &&
- assert(stringify("P1Y1M1W1XD").fromJson[Period])(
- isLeft(equalTo("(P1Y1M1W1XD is not a valid ISO-8601 format, expected 'D' or digit at index 8)"))
- ) &&
- assert(stringify("P1Y1M306783378W8D").fromJson[Period])(
- isLeft(equalTo("(P1Y1M306783378W8D is not a valid ISO-8601 format, illegal period at index 16)"))
- ) &&
- assert(stringify("P1Y1M-306783378W-8D").fromJson[Period])(
- isLeft(equalTo("(P1Y1M-306783378W-8D is not a valid ISO-8601 format, illegal period at index 18)"))
- ) &&
- assert(stringify("P1Y1M1W2147483647D").fromJson[Period])(
- isLeft(equalTo("(P1Y1M1W2147483647D is not a valid ISO-8601 format, illegal period at index 17)"))
- ) &&
- assert(stringify("P1Y1M-1W-2147483648D").fromJson[Period])(
- isLeft(equalTo("(P1Y1M-1W-2147483648D is not a valid ISO-8601 format, illegal period at index 19)"))
- ) &&
- assert(stringify("P1Y1M0W2147483648D").fromJson[Period])(
- isLeft(equalTo("(P1Y1M0W2147483648D is not a valid ISO-8601 format, illegal period at index 17)"))
- ) &&
- assert(stringify("P1Y1M0W21474836470D").fromJson[Period])(
- isLeft(equalTo("(P1Y1M0W21474836470D is not a valid ISO-8601 format, illegal period at index 17)"))
- ) &&
- assert(stringify("P1Y1M0W-2147483649D").fromJson[Period])(
- isLeft(equalTo("(P1Y1M0W-2147483649D is not a valid ISO-8601 format, illegal period at index 17)"))
- ) &&
- assert(stringify("P1Y1M1W1DX").fromJson[Period])(
- isLeft(equalTo("(P1Y1M1W1DX is not a valid ISO-8601 format, illegal period at index 9)"))
- )
+ assert(stringify(" " * 10000).fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("X").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("-").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("PXY").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P-").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P-XY").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1XY").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P2147483648Y").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P21474836470Y").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P-2147483649Y").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P2147483648M").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P21474836470M").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P-2147483649M").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P2147483648W").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P21474836470W").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P-2147483649W").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P2147483648D").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P21474836470D").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P-2147483649D").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1YXM").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y-XM").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y1XM").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y2147483648M").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y21474836470M").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y-2147483649M").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y2147483648W").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y21474836470W").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y-2147483649W").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y2147483648D").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y21474836470D").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y-2147483649D").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y1MXW").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y1M-XW").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y1M1XW").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y1M306783379W").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y1M3067833790W").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y1M-306783379W").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y1M2147483648D").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y1M21474836470D").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y1M-2147483649D").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y1M1WXD").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y1M1W-XD").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y1M1W1XD").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y1M306783378W8D").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y1M-306783378W-8D").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y1M1W2147483647D").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y1M-1W-2147483648D").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y1M0W2147483648D").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y1M0W21474836470D").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y1M0W-2147483649D").fromJson[Period])(isLeft(containsString("expected a Period"))) &&
+ assert(stringify("P1Y1M1W1DX").fromJson[Period])(isLeft(containsString("expected a Period")))
},
test("Year") {
- assert(stringify("").fromJson[Year])(
- isLeft(
- equalTo("( is not a valid ISO-8601 format, illegal year at index 0)")
- )
- ) &&
- assert(stringify("2").fromJson[Year])(
- isLeft(
- equalTo("(2 is not a valid ISO-8601 format, illegal year at index 0)")
- )
- ) &&
- assert(stringify("22").fromJson[Year])(
- isLeft(
- equalTo("(22 is not a valid ISO-8601 format, illegal year at index 0)")
- )
- ) &&
- assert(stringify("222").fromJson[Year])(
- isLeft(
- equalTo("(222 is not a valid ISO-8601 format, illegal year at index 0)")
- )
- ) &&
- assert(stringify("X020").fromJson[Year])(
- isLeft(
- equalTo("(X020 is not a valid ISO-8601 format, expected '-' or '+' or digit at index 0)")
- )
- ) &&
- assert(stringify("2X20").fromJson[Year])(
- isLeft(
- equalTo("(2X20 is not a valid ISO-8601 format, expected digit at index 1)")
- )
- ) &&
- assert(stringify("20X0").fromJson[Year])(
- isLeft(
- equalTo("(20X0 is not a valid ISO-8601 format, expected digit at index 2)")
- )
- ) &&
- assert(stringify("202X").fromJson[Year])(
- isLeft(
- equalTo("(202X is not a valid ISO-8601 format, expected digit at index 3)")
- )
- ) &&
- assert(stringify("+X0000").fromJson[Year])(
- isLeft(
- equalTo("(+X0000 is not a valid ISO-8601 format, expected digit at index 1)")
- )
- ) &&
- assert(stringify("+1X000").fromJson[Year])(
- isLeft(
- equalTo("(+1X000 is not a valid ISO-8601 format, expected digit at index 2)")
- )
- ) &&
- assert(stringify("+10X00").fromJson[Year])(
- isLeft(
- equalTo("(+10X00 is not a valid ISO-8601 format, expected digit at index 3)")
- )
- ) &&
- assert(stringify("+100X0").fromJson[Year])(
- isLeft(
- equalTo("(+100X0 is not a valid ISO-8601 format, expected digit at index 4)")
- )
- ) &&
- assert(stringify("+1000X").fromJson[Year])(
- isLeft(
- equalTo("(+1000X is not a valid ISO-8601 format, expected digit at index 5)")
- )
- ) &&
- assert(stringify("+10000X").fromJson[Year])(
- isLeft(
- equalTo("(+10000X is not a valid ISO-8601 format, expected digit at index 6)")
- )
- ) &&
- assert(stringify("+100000X").fromJson[Year])(
- isLeft(
- equalTo("(+100000X is not a valid ISO-8601 format, expected digit at index 7)")
- )
- ) &&
- assert(stringify("+1000000X").fromJson[Year])(
- isLeft(
- equalTo("(+1000000X is not a valid ISO-8601 format, expected digit at index 8)")
- )
- ) &&
- assert(stringify("+1000000000").fromJson[Year])(
- isLeft(
- equalTo("(+1000000000 is not a valid ISO-8601 format, illegal year at index 10)")
- )
- ) &&
- assert(stringify("-1000000000").fromJson[Year])(
- isLeft(
- equalTo("(-1000000000 is not a valid ISO-8601 format, illegal year at index 10)")
- )
- ) &&
- assert(stringify("-0000").fromJson[Year])(
- isLeft(
- equalTo("(-0000 is not a valid ISO-8601 format, illegal year at index 4)")
- )
- ) &&
- assert(stringify("10000").fromJson[Year])(
- isLeft(
- equalTo("(10000 is not a valid ISO-8601 format, illegal year at index 4)")
- )
- )
+ assert(stringify(" " * 10000).fromJson[Year])(isLeft(containsString("expected a Year"))) &&
+ assert(stringify("").fromJson[Year])(isLeft(containsString("expected a Year"))) &&
+ assert(stringify("2").fromJson[Year])(isLeft(containsString("expected a Year"))) &&
+ assert(stringify("22").fromJson[Year])(isLeft(containsString("expected a Year"))) &&
+ assert(stringify("222").fromJson[Year])(isLeft(containsString("expected a Year"))) &&
+ assert(stringify("X020").fromJson[Year])(isLeft(containsString("expected a Year"))) &&
+ assert(stringify("2X20").fromJson[Year])(isLeft(containsString("expected a Year"))) &&
+ assert(stringify("20X0").fromJson[Year])(isLeft(containsString("expected a Year"))) &&
+ assert(stringify("202X").fromJson[Year])(isLeft(containsString("expected a Year"))) &&
+ assert(stringify("+X0000").fromJson[Year])(isLeft(containsString("expected a Year"))) &&
+ assert(stringify("+1X000").fromJson[Year])(isLeft(containsString("expected a Year"))) &&
+ assert(stringify("+10X00").fromJson[Year])(isLeft(containsString("expected a Year"))) &&
+ assert(stringify("+100X0").fromJson[Year])(isLeft(containsString("expected a Year"))) &&
+ assert(stringify("+1000X").fromJson[Year])(isLeft(containsString("expected a Year"))) &&
+ assert(stringify("+10000X").fromJson[Year])(isLeft(containsString("expected a Year"))) &&
+ assert(stringify("+100000X").fromJson[Year])(isLeft(containsString("expected a Year"))) &&
+ assert(stringify("+1000000X").fromJson[Year])(isLeft(containsString("expected a Year"))) &&
+ assert(stringify("+1000000000").fromJson[Year])(isLeft(containsString("expected a Year"))) &&
+ assert(stringify("-1000000000").fromJson[Year])(isLeft(containsString("expected a Year"))) &&
+ assert(stringify("-0000").fromJson[Year])(isLeft(containsString("expected a Year"))) &&
+ assert(stringify("10000").fromJson[Year])(isLeft(containsString("expected a Year")))
},
test("YearMonth") {
- assert(stringify("").fromJson[YearMonth])(
- isLeft(
- equalTo("( is not a valid ISO-8601 format, illegal year month at index 0)")
- )
- ) &&
- assert(stringify("2020").fromJson[YearMonth])(
- isLeft(
- equalTo("(2020 is not a valid ISO-8601 format, illegal year month at index 0)")
- )
- ) &&
- assert(stringify("2020-0").fromJson[YearMonth])(
- isLeft(
- equalTo("(2020-0 is not a valid ISO-8601 format, illegal year month at index 5)")
- )
- ) &&
- assert(stringify("2020-012").fromJson[YearMonth])(
- isLeft(
- equalTo("(2020-012 is not a valid ISO-8601 format, illegal year month at index 7)")
- )
- ) &&
- assert(stringify("X020-01").fromJson[YearMonth])(
- isLeft(
- equalTo("(X020-01 is not a valid ISO-8601 format, expected '-' or '+' or digit at index 0)")
- )
- ) &&
- assert(stringify("2X20-01").fromJson[YearMonth])(
- isLeft(
- equalTo("(2X20-01 is not a valid ISO-8601 format, expected digit at index 1)")
- )
- ) &&
- assert(stringify("20X0-01").fromJson[YearMonth])(
- isLeft(
- equalTo("(20X0-01 is not a valid ISO-8601 format, expected digit at index 2)")
- )
- ) &&
- assert(stringify("202X-01").fromJson[YearMonth])(
- isLeft(
- equalTo("(202X-01 is not a valid ISO-8601 format, expected digit at index 3)")
- )
- ) &&
- assert(stringify("2020X01").fromJson[YearMonth])(
- isLeft(
- equalTo("(2020X01 is not a valid ISO-8601 format, expected '-' at index 4)")
- )
- ) &&
- assert(stringify("2020-X1").fromJson[YearMonth])(
- isLeft(
- equalTo("(2020-X1 is not a valid ISO-8601 format, expected digit at index 5)")
- )
- ) &&
- assert(stringify("2020-0X").fromJson[YearMonth])(
- isLeft(
- equalTo("(2020-0X is not a valid ISO-8601 format, expected digit at index 6)")
- )
- ) &&
- assert(stringify("+X0000-01").fromJson[YearMonth])(
- isLeft(
- equalTo("(+X0000-01 is not a valid ISO-8601 format, expected digit at index 1)")
- )
- ) &&
- assert(stringify("+1X000-01").fromJson[YearMonth])(
- isLeft(
- equalTo("(+1X000-01 is not a valid ISO-8601 format, expected digit at index 2)")
- )
- ) &&
- assert(stringify("+10X00-01").fromJson[YearMonth])(
- isLeft(
- equalTo("(+10X00-01 is not a valid ISO-8601 format, expected digit at index 3)")
- )
- ) &&
- assert(stringify("+100X0-01").fromJson[YearMonth])(
- isLeft(
- equalTo("(+100X0-01 is not a valid ISO-8601 format, expected digit at index 4)")
- )
- ) &&
- assert(stringify("+1000X-01").fromJson[YearMonth])(
- isLeft(
- equalTo("(+1000X-01 is not a valid ISO-8601 format, expected digit at index 5)")
- )
- ) &&
- assert(stringify("+10000X-01").fromJson[YearMonth])(
- isLeft(
- equalTo("(+10000X-01 is not a valid ISO-8601 format, expected '-' or digit at index 6)")
- )
- ) &&
- assert(stringify("+100000X-01").fromJson[YearMonth])(
- isLeft(
- equalTo("(+100000X-01 is not a valid ISO-8601 format, expected '-' or digit at index 7)")
- )
- ) &&
- assert(stringify("+1000000X-01").fromJson[YearMonth])(
- isLeft(
- equalTo("(+1000000X-01 is not a valid ISO-8601 format, expected '-' or digit at index 8)")
- )
- ) &&
- assert(stringify("+1000000000-01").fromJson[YearMonth])(
- isLeft(
- equalTo("(+1000000000-01 is not a valid ISO-8601 format, expected '-' at index 10)")
- )
- ) &&
- assert(stringify("-1000000000-01").fromJson[YearMonth])(
- isLeft(
- equalTo("(-1000000000-01 is not a valid ISO-8601 format, expected '-' at index 10)")
- )
- ) &&
- assert(stringify("-0000-01").fromJson[YearMonth])(
- isLeft(
- equalTo("(-0000-01 is not a valid ISO-8601 format, illegal year at index 4)")
- )
- ) &&
- assert(stringify("+10000").fromJson[YearMonth])(
- isLeft(
- equalTo("(+10000 is not a valid ISO-8601 format, illegal year month at index 6)")
- )
- ) &&
- assert(stringify("2020-00").fromJson[YearMonth])(
- isLeft(
- equalTo("(2020-00 is not a valid ISO-8601 format, illegal month at index 6)")
- )
- ) &&
- assert(stringify("2020-13").fromJson[YearMonth])(
- isLeft(
- equalTo("(2020-13 is not a valid ISO-8601 format, illegal month at index 6)")
- )
- )
+ assert(stringify(" " * 10000).fromJson[YearMonth])(isLeft(containsString("expected a YearMonth"))) &&
+ assert(stringify("").fromJson[YearMonth])(isLeft(containsString("expected a YearMonth"))) &&
+ assert(stringify("2020").fromJson[YearMonth])(isLeft(containsString("expected a YearMonth"))) &&
+ assert(stringify("2020-0").fromJson[YearMonth])(isLeft(containsString("expected a YearMonth"))) &&
+ assert(stringify("2020-012").fromJson[YearMonth])(isLeft(containsString("expected a YearMonth"))) &&
+ assert(stringify("X020-01").fromJson[YearMonth])(isLeft(containsString("expected a YearMonth"))) &&
+ assert(stringify("2X20-01").fromJson[YearMonth])(isLeft(containsString("expected a YearMonth"))) &&
+ assert(stringify("20X0-01").fromJson[YearMonth])(isLeft(containsString("expected a YearMonth"))) &&
+ assert(stringify("202X-01").fromJson[YearMonth])(isLeft(containsString("expected a YearMonth"))) &&
+ assert(stringify("2020X01").fromJson[YearMonth])(isLeft(containsString("expected a YearMonth"))) &&
+ assert(stringify("2020-X1").fromJson[YearMonth])(isLeft(containsString("expected a YearMonth"))) &&
+ assert(stringify("2020-0X").fromJson[YearMonth])(isLeft(containsString("expected a YearMonth"))) &&
+ assert(stringify("+X0000-01").fromJson[YearMonth])(isLeft(containsString("expected a YearMonth"))) &&
+ assert(stringify("+1X000-01").fromJson[YearMonth])(isLeft(containsString("expected a YearMonth"))) &&
+ assert(stringify("+10X00-01").fromJson[YearMonth])(isLeft(containsString("expected a YearMonth"))) &&
+ assert(stringify("+100X0-01").fromJson[YearMonth])(isLeft(containsString("expected a YearMonth"))) &&
+ assert(stringify("+1000X-01").fromJson[YearMonth])(isLeft(containsString("expected a YearMonth"))) &&
+ assert(stringify("+10000X-01").fromJson[YearMonth])(isLeft(containsString("expected a YearMonth"))) &&
+ assert(stringify("+100000X-01").fromJson[YearMonth])(isLeft(containsString("expected a YearMonth"))) &&
+ assert(stringify("+1000000X-01").fromJson[YearMonth])(isLeft(containsString("expected a YearMonth"))) &&
+ assert(stringify("+1000000000-01").fromJson[YearMonth])(isLeft(containsString("expected a YearMonth"))) &&
+ assert(stringify("-1000000000-01").fromJson[YearMonth])(isLeft(containsString("expected a YearMonth"))) &&
+ assert(stringify("-0000-01").fromJson[YearMonth])(isLeft(containsString("expected a YearMonth"))) &&
+ assert(stringify("+10000").fromJson[YearMonth])(isLeft(containsString("expected a YearMonth"))) &&
+ assert(stringify("2020-00").fromJson[YearMonth])(isLeft(containsString("expected a YearMonth"))) &&
+ assert(stringify("2020-13").fromJson[YearMonth])(isLeft(containsString("expected a YearMonth")))
},
test("ZonedDateTime") {
- assert(stringify("").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("( is not a valid ISO-8601 format, illegal zoned date time at index 0)")
- )
- ) &&
- assert(stringify("2020").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020 is not a valid ISO-8601 format, illegal zoned date time at index 0)")
- )
- ) &&
- assert(stringify("2020-0").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-0 is not a valid ISO-8601 format, illegal zoned date time at index 5)")
- )
- ) &&
- assert(stringify("2020-01-0").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-0 is not a valid ISO-8601 format, illegal zoned date time at index 8)")
- )
- ) &&
+ assert(stringify(" " * 10000).fromJson[ZonedDateTime])(isLeft(containsString("expected a ZonedDateTime"))) &&
+ assert(stringify("").fromJson[ZonedDateTime])(isLeft(containsString("expected a ZonedDateTime"))) &&
+ assert(stringify("2020").fromJson[ZonedDateTime])(isLeft(containsString("expected a ZonedDateTime"))) &&
+ assert(stringify("2020-0").fromJson[ZonedDateTime])(isLeft(containsString("expected a ZonedDateTime"))) &&
+ assert(stringify("2020-01-0").fromJson[ZonedDateTime])(isLeft(containsString("expected a ZonedDateTime"))) &&
assert(stringify("2020-01-01T0").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01T0 is not a valid ISO-8601 format, illegal zoned date time at index 11)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:0").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:0 is not a valid ISO-8601 format, illegal zoned date time at index 14)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("X020-01-01T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(X020-01-01T01:01Z is not a valid ISO-8601 format, expected '-' or '+' or digit at index 0)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2X20-01-01T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2X20-01-01T01:01Z is not a valid ISO-8601 format, expected digit at index 1)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("20X0-01-01T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(20X0-01-01T01:01Z is not a valid ISO-8601 format, expected digit at index 2)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("202X-01-01T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(202X-01-01T01:01Z is not a valid ISO-8601 format, expected digit at index 3)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020X01-01T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020X01-01T01:01Z is not a valid ISO-8601 format, expected '-' at index 4)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-X1-01T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-X1-01T01:01Z is not a valid ISO-8601 format, expected digit at index 5)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-0X-01T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-0X-01T01:01Z is not a valid ISO-8601 format, expected digit at index 6)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01X01T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01X01T01:01Z is not a valid ISO-8601 format, expected '-' at index 7)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-X1T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-X1T01:01Z is not a valid ISO-8601 format, expected digit at index 8)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-0XT01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-0XT01:01Z is not a valid ISO-8601 format, expected digit at index 9)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01X01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01X01:01Z is not a valid ISO-8601 format, expected 'T' at index 10)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01TX1:01").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01TX1:01 is not a valid ISO-8601 format, expected digit at index 11)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T0X:01").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01T0X:01 is not a valid ISO-8601 format, expected digit at index 12)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T24:01").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01T24:01 is not a valid ISO-8601 format, illegal hour at index 12)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01X01").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01T01X01 is not a valid ISO-8601 format, expected ':' at index 13)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:X1").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:X1 is not a valid ISO-8601 format, expected digit at index 14)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:0X").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:0X is not a valid ISO-8601 format, expected digit at index 15)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:60").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:60 is not a valid ISO-8601 format, illegal minute at index 15)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01").fromJson[ZonedDateTime])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01 is not a valid ISO-8601 format, expected ':' or '+' or '-' or 'Z' at index 16)"
- )
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01X").fromJson[ZonedDateTime])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01X is not a valid ISO-8601 format, expected ':' or '+' or '-' or 'Z' at index 16)"
- )
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01:0").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:0 is not a valid ISO-8601 format, illegal zoned date time at index 17)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01:X1Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:X1Z is not a valid ISO-8601 format, expected digit at index 17)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01:0XZ").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:0XZ is not a valid ISO-8601 format, expected digit at index 18)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01:60Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:60Z is not a valid ISO-8601 format, illegal second at index 18)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01").fromJson[ZonedDateTime])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01 is not a valid ISO-8601 format, expected '.' or '+' or '-' or 'Z' at index 19)"
- )
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01:012").fromJson[ZonedDateTime])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:012 is not a valid ISO-8601 format, expected '.' or '+' or '-' or 'Z' at index 19)"
- )
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01.").fromJson[ZonedDateTime])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01. is not a valid ISO-8601 format, expected digit or '+' or '-' or 'Z' at index 20)"
- )
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01.X").fromJson[ZonedDateTime])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01.X is not a valid ISO-8601 format, expected digit or '+' or '-' or 'Z' at index 20)"
- )
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01.123456789X").fromJson[ZonedDateTime])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01.123456789X is not a valid ISO-8601 format, expected '+' or '-' or 'Z' at index 29)"
- )
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01ZX").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:01ZX is not a valid ISO-8601 format, expected '[' at index 20)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+X1:01:01").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:01+X1:01:01 is not a valid ISO-8601 format, expected digit at index 20)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+0").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:01+0 is not a valid ISO-8601 format, illegal zoned date time at index 20)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+0X:01:01").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:01+0X:01:01 is not a valid ISO-8601 format, expected digit at index 21)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+19:01:01").fromJson[ZonedDateTime])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01+19:01:01 is not a valid ISO-8601 format, illegal timezone offset hour at index 21)"
- )
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+01X01:01").fromJson[ZonedDateTime])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01+01X01:01 is not a valid ISO-8601 format, expected '[' at index 22)"
- )
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+01:0").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:01+01:0 is not a valid ISO-8601 format, illegal zoned date time at index 23)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+01:X1:01").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:01+01:X1:01 is not a valid ISO-8601 format, expected digit at index 23)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+01:0X:01").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:01+01:0X:01 is not a valid ISO-8601 format, expected digit at index 24)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+01:60:01").fromJson[ZonedDateTime])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01+01:60:01 is not a valid ISO-8601 format, illegal timezone offset minute at index 24)"
- )
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+01:01X01").fromJson[ZonedDateTime])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01+01:01X01 is not a valid ISO-8601 format, expected '[' at index 25)"
- )
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+01:01:0").fromJson[ZonedDateTime])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01+01:01:0 is not a valid ISO-8601 format, illegal zoned date time at index 26)"
- )
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+01:01:X1").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:01+01:01:X1 is not a valid ISO-8601 format, expected digit at index 26)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+01:01:0X").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:01+01:01:0X is not a valid ISO-8601 format, expected digit at index 27)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+01:01:01X").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01:01+01:01:01X is not a valid ISO-8601 format, expected '[' at index 28)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01:01+01:01:60").fromJson[ZonedDateTime])(
- isLeft(
- equalTo(
- "(2020-01-01T01:01:01+01:01:60 is not a valid ISO-8601 format, illegal timezone offset second at index 27)"
- )
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("+X0000-01-01T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(+X0000-01-01T01:01Z is not a valid ISO-8601 format, expected digit at index 1)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("+1X000-01-01T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(+1X000-01-01T01:01Z is not a valid ISO-8601 format, expected digit at index 2)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("+10X00-01-01T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(+10X00-01-01T01:01Z is not a valid ISO-8601 format, expected digit at index 3)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("+100X0-01-01T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(+100X0-01-01T01:01Z is not a valid ISO-8601 format, expected digit at index 4)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("+1000X-01-01T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(+1000X-01-01T01:01Z is not a valid ISO-8601 format, expected digit at index 5)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("+10000X-01-01T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(+10000X-01-01T01:01Z is not a valid ISO-8601 format, expected '-' or digit at index 6)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("+100000X-01-01T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(+100000X-01-01T01:01Z is not a valid ISO-8601 format, expected '-' or digit at index 7)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("+1000000X-01-01T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(+1000000X-01-01T01:01Z is not a valid ISO-8601 format, expected '-' or digit at index 8)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("+1000000000-01-01T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(+1000000000-01-01T01:01Z is not a valid ISO-8601 format, expected '-' at index 10)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("-1000000000-01-01T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(-1000000000-01-01T01:01Z is not a valid ISO-8601 format, expected '-' at index 10)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("-0000-01-01T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(-0000-01-01T01:01Z is not a valid ISO-8601 format, illegal year at index 4)")
- )
- ) &&
- assert(stringify("+10000").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(+10000 is not a valid ISO-8601 format, illegal zoned date time at index 6)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
+ assert(stringify("+10000").fromJson[ZonedDateTime])(isLeft(containsString("expected a ZonedDateTime"))) &&
assert(stringify("2020-00-01T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-00-01T01:01Z is not a valid ISO-8601 format, illegal month at index 6)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-13-01T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-13-01T01:01Z is not a valid ISO-8601 format, illegal month at index 6)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-00T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-00T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-32T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-32T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-02-30T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-02-30T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-03-32T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-03-32T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-04-31T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-04-31T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-05-32T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-05-32T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-06-31T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-06-31T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-07-32T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-07-32T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-08-32T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-08-32T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-09-31T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-09-31T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-10-32T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-10-32T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-11-31T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-11-31T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-12-32T01:01Z").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-12-32T01:01Z is not a valid ISO-8601 format, illegal day at index 9)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01Z[").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01Z[ is not a valid ISO-8601 format, illegal zoned date time at index 17)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01Z[X]").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01Z[X] is not a valid ISO-8601 format, illegal zoned date time at index 18)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
) &&
assert(stringify("2020-01-01T01:01Z[GMT]X").fromJson[ZonedDateTime])(
- isLeft(
- equalTo("(2020-01-01T01:01Z[GMT]X is not a valid ISO-8601 format, illegal zoned date time at index 22)")
- )
+ isLeft(containsString("expected a ZonedDateTime"))
)
},
test("ZoneId") {
- assert(stringify("America/New York").fromJson[ZoneId])(
- isLeft(equalTo("(America/New York is not a valid ISO-8601 format, illegal zone id at index 0)"))
- ) &&
- assert(stringify("Solar_System/Mars").fromJson[ZoneId])(
- isLeft(equalTo("(Solar_System/Mars is not a valid ISO-8601 format, illegal zone id at index 0)"))
- )
+ assert(stringify(" " * 10000).fromJson[ZoneId])(isLeft(containsString("expected a ZoneId"))) &&
+ assert(stringify("America/New York").fromJson[ZoneId])(isLeft(containsString("expected a ZoneId"))) &&
+ assert(stringify("Solar_System/Mars").fromJson[ZoneId])(isLeft(containsString("expected a ZoneId")))
},
test("ZoneOffset") {
- assert(stringify("").fromJson[ZoneOffset])(
- isLeft(
- equalTo("( is not a valid ISO-8601 format, illegal zone offset at index 0)")
- )
- ) &&
- assert(stringify("X").fromJson[ZoneOffset])(
- isLeft(
- equalTo("(X is not a valid ISO-8601 format, expected '+' or '-' or 'Z' at index 0)")
- )
- ) &&
- assert(stringify("+X1:01:01").fromJson[ZoneOffset])(
- isLeft(
- equalTo("(+X1:01:01 is not a valid ISO-8601 format, expected digit at index 1)")
- )
- ) &&
- assert(stringify("+0").fromJson[ZoneOffset])(
- isLeft(
- equalTo("(+0 is not a valid ISO-8601 format, illegal zone offset at index 1)")
- )
- ) &&
- assert(stringify("+0X:01:01").fromJson[ZoneOffset])(
- isLeft(
- equalTo("(+0X:01:01 is not a valid ISO-8601 format, expected digit at index 2)")
- )
- ) &&
- assert(stringify("+19:01:01").fromJson[ZoneOffset])(
- isLeft(
- equalTo(
- "(+19:01:01 is not a valid ISO-8601 format, illegal timezone offset hour at index 2)"
- )
- )
- ) &&
- assert(stringify("+01X01:01").fromJson[ZoneOffset])(
- isLeft(
- equalTo(
- "(+01X01:01 is not a valid ISO-8601 format, illegal zone offset at index 4)"
- )
- )
- ) &&
- assert(stringify("+01:0").fromJson[ZoneOffset])(
- isLeft(
- equalTo("(+01:0 is not a valid ISO-8601 format, illegal zone offset at index 4)")
- )
- ) &&
- assert(stringify("+01:X1:01").fromJson[ZoneOffset])(
- isLeft(
- equalTo("(+01:X1:01 is not a valid ISO-8601 format, expected digit at index 4)")
- )
- ) &&
- assert(stringify("+01:0X:01").fromJson[ZoneOffset])(
- isLeft(
- equalTo("(+01:0X:01 is not a valid ISO-8601 format, expected digit at index 5)")
- )
- ) &&
- assert(stringify("+01:60:01").fromJson[ZoneOffset])(
- isLeft(
- equalTo(
- "(+01:60:01 is not a valid ISO-8601 format, illegal timezone offset minute at index 5)"
- )
- )
- ) &&
- assert(stringify("+01:01X01").fromJson[ZoneOffset])(
- isLeft(
- equalTo(
- "(+01:01X01 is not a valid ISO-8601 format, illegal zone offset at index 7)"
- )
- )
- ) &&
- assert(stringify("+01:01:0").fromJson[ZoneOffset])(
- isLeft(
- equalTo(
- "(+01:01:0 is not a valid ISO-8601 format, illegal zone offset at index 7)"
- )
- )
- ) &&
- assert(stringify("+01:01:X1").fromJson[ZoneOffset])(
- isLeft(
- equalTo("(+01:01:X1 is not a valid ISO-8601 format, expected digit at index 7)")
- )
- ) &&
- assert(stringify("+01:01:0X").fromJson[ZoneOffset])(
- isLeft(
- equalTo("(+01:01:0X is not a valid ISO-8601 format, expected digit at index 8)")
- )
- ) &&
- assert(stringify("+01:01:60").fromJson[ZoneOffset])(
- isLeft(
- equalTo(
- "(+01:01:60 is not a valid ISO-8601 format, illegal timezone offset second at index 8)"
- )
- )
- )
+ assert(stringify(" " * 10000).fromJson[ZoneOffset])(isLeft(containsString("expected a ZoneOffset"))) &&
+ assert(stringify("").fromJson[ZoneOffset])(isLeft(containsString("expected a ZoneOffset"))) &&
+ assert(stringify("X").fromJson[ZoneOffset])(isLeft(containsString("expected a ZoneOffset"))) &&
+ assert(stringify("+X1:01:01").fromJson[ZoneOffset])(isLeft(containsString("expected a ZoneOffset"))) &&
+ assert(stringify("+0").fromJson[ZoneOffset])(isLeft(containsString("expected a ZoneOffset"))) &&
+ assert(stringify("+0X:01:01").fromJson[ZoneOffset])(isLeft(containsString("expected a ZoneOffset"))) &&
+ assert(stringify("+19:01:01").fromJson[ZoneOffset])(isLeft(containsString("expected a ZoneOffset"))) &&
+ assert(stringify("+01X01:01").fromJson[ZoneOffset])(isLeft(containsString("expected a ZoneOffset"))) &&
+ assert(stringify("+01X").fromJson[ZoneOffset])(isLeft(containsString("expected a ZoneOffset"))) &&
+ assert(stringify("+01:0").fromJson[ZoneOffset])(isLeft(containsString("expected a ZoneOffset"))) &&
+ assert(stringify("+01:X1:01").fromJson[ZoneOffset])(isLeft(containsString("expected a ZoneOffset"))) &&
+ assert(stringify("+01:0X:01").fromJson[ZoneOffset])(isLeft(containsString("expected a ZoneOffset"))) &&
+ assert(stringify("+01:60:01").fromJson[ZoneOffset])(isLeft(containsString("expected a ZoneOffset"))) &&
+ assert(stringify("+01:01X").fromJson[ZoneOffset])(isLeft(containsString("expected a ZoneOffset"))) &&
+ assert(stringify("+01:01X01").fromJson[ZoneOffset])(isLeft(containsString("expected a ZoneOffset"))) &&
+ assert(stringify("+01:01:0").fromJson[ZoneOffset])(isLeft(containsString("expected a ZoneOffset"))) &&
+ assert(stringify("+01:01:X1").fromJson[ZoneOffset])(isLeft(containsString("expected a ZoneOffset"))) &&
+ assert(stringify("+01:01:0X").fromJson[ZoneOffset])(isLeft(containsString("expected a ZoneOffset"))) &&
+ assert(stringify("+01:01:60").fromJson[ZoneOffset])(isLeft(containsString("expected a ZoneOffset")))
}
)
)
diff --git a/zio-json/shared/src/test/scala/zio/json/RoundTripSpec.scala b/zio-json/shared/src/test/scala/zio/json/RoundTripSpec.scala
index e13b653c6..3e62faf98 100644
--- a/zio-json/shared/src/test/scala/zio/json/RoundTripSpec.scala
+++ b/zio-json/shared/src/test/scala/zio/json/RoundTripSpec.scala
@@ -1,7 +1,6 @@
-package testzio.json
+package zio.json
-import testzio.json.Gens._
-import zio.json._
+import zio.json.Gens._
import zio.json.ast.Json
import zio.test.Assertion._
import zio.test.TestAspect._
@@ -31,6 +30,9 @@ object RoundTripSpec extends ZIOSpecDefault {
test("bigInts") {
check(genBigInteger)(assertRoundtrips[java.math.BigInteger])
} @@ jvm(samples(10000)),
+ test("bigDecimals") {
+ check(genBigDecimal)(assertRoundtrips[java.math.BigDecimal])
+ } @@ jvm(samples(10000)),
test("floats") {
// NaN / Infinity is tested manually, because of == semantics
check(Gen.float.filter(java.lang.Float.isFinite))(assertRoundtrips[Float])
diff --git a/zio-json/shared/src/test/scala/zio/json/ast/JsonSpec.scala b/zio-json/shared/src/test/scala/zio/json/ast/JsonSpec.scala
index ab50390b2..6322cdb88 100644
--- a/zio-json/shared/src/test/scala/zio/json/ast/JsonSpec.scala
+++ b/zio-json/shared/src/test/scala/zio/json/ast/JsonSpec.scala
@@ -1,12 +1,64 @@
package zio.json.ast
+import zio.Chunk
+import zio.json._
import zio.test.Assertion._
import zio.test._
+import java.math.BigInteger
+
object JsonSpec extends ZIOSpecDefault {
val spec: Spec[Environment, Any] =
suite("Json")(
+ suite("apply")(
+ test("Num()") {
+ assertTrue(
+ Json.Num(0).toString == "0",
+ Json.Num(0.0).toString == "0.0",
+ Json.Num(1.0).toString == "1.0",
+ Json.Num(-0.0).toString == "0.0",
+ Json.Num(-1.0).toString == "-1.0",
+ Json.Num(7: Byte).toString == "7",
+ Json.Num(777: Short).toString == "777",
+ Json.Num(123456789).toString == "123456789",
+ Json.Num(1.2345678f).toString == "1.2345678",
+ Json.Num(1.2345678901234567).toString == "1.2345678901234567",
+ Json.Num(1234567890123456789L).toString == "1234567890123456789",
+ Json.Num(BigInteger.valueOf(1234567890123456789L)).toString == "1234567890123456789",
+ Json.Num(new BigInteger("12345678901234567890")).toString == "12345678901234567890",
+ Json.Num(BigInt(1234567890123456789L)).toString == "1234567890123456789",
+ Json.Num(BigInt("12345678901234567890")).toString == "12345678901234567890",
+ Json.Num(BigDecimal(1234567890123456789L)).toString == "1234567890123456789",
+ Json.Num(BigDecimal("12345678901234567890")).toString == "12345678901234567890"
+ )
+ },
+ test("Bool()") {
+ assertTrue(
+ Json.Bool.True eq Json.Bool(true),
+ Json.Bool.False eq Json.Bool(false)
+ )
+ },
+ test("()") {
+ assertTrue(
+ Json.Obj.empty eq Json(),
+ Json.Obj.empty eq Json.Obj(),
+ Json.Arr.empty eq Json.Arr()
+ )
+ },
+ test("(Chunk.empty)") {
+ assertTrue(
+ Json.Obj.empty eq Json.Obj(Chunk.empty),
+ Json.Arr.empty eq Json.Arr(Chunk.empty)
+ )
+ },
+ test("Obj()") {
+ assertTrue(
+ Json.Obj("key", Json.Str("value")).toString == """{"key":"value"}""",
+ Json.Obj("key", Json.Str("value")) == Json.Obj("key" -> Json.Str("value"))
+ )
+ }
+ ),
suite("delete")(
suite("scalar")(
test("success") {
@@ -27,7 +79,7 @@ object JsonSpec extends ZIOSpecDefault {
isRight(
equalTo(
Json.Obj(
- "id" -> Json.Num(8500),
+ "id" -> Json.Num(8500),
"user" -> Json.Obj(
"id" -> Json.Num(6200),
"name" -> Json.Str("Twitter API")
@@ -45,7 +97,7 @@ object JsonSpec extends ZIOSpecDefault {
isRight(
equalTo(
Json.Obj(
- "id" -> Json.Num(8500),
+ "id" -> Json.Num(8500),
"user" -> Json.Obj(
"name" -> Json.Str("Twitter API")
),
@@ -73,7 +125,7 @@ object JsonSpec extends ZIOSpecDefault {
val str: Json = Json.Str("hello")
val bool: Json = Json.Bool(true)
val arr: Json = Json.Arr(nul, num, str)
- val obj: Json = Json.Obj(
+ val obj: Json = Json.Obj(
"nul" -> nul,
"num" -> num,
"str" -> str,
@@ -86,6 +138,8 @@ object JsonSpec extends ZIOSpecDefault {
case fst :: snd :: Nil => fst != snd
case _ => false
})
+ assertTrue(Json.Obj.empty == Json.Obj(Chunk.empty))
+ assertTrue(Json.Arr.empty == Json.Arr(Chunk.empty))
},
test("object order does not matter for equality") {
val obj1 = Json.Obj(
@@ -145,6 +199,7 @@ object JsonSpec extends ZIOSpecDefault {
)
assertTrue(obj1.hashCode == obj2.hashCode)
+ assertTrue(Json.Obj.empty.hashCode == Json.Obj(Chunk.empty).hashCode)
}
),
suite("foldUp")(
@@ -171,7 +226,7 @@ object JsonSpec extends ZIOSpecDefault {
val obj =
Json.Obj(
"one" -> Json.Obj(
- "two" -> Json.Bool(true),
+ "two" -> Json.Bool(true),
"three" -> Json.Obj(
"four" -> Json.Null,
"five" -> Json.Obj(
@@ -243,6 +298,39 @@ object JsonSpec extends ZIOSpecDefault {
assert(tweet.get(combined))(
isRight(equalTo(Json.Str("twitter")))
)
+ },
+ test(">>>, array, filterType (second operand of >>> is complex)") {
+ val downEntities = JsonCursor.field("entities")
+ val downHashtag =
+ JsonCursor.isObject >>> JsonCursor.field("hashtags") >>> JsonCursor.isArray >>> JsonCursor.element(0)
+
+ val combined = downEntities >>> downHashtag
+
+ assert(tweet.get(combined))(
+ isRight(equalTo(Json.Str("twitter")))
+ )
+ },
+ test(">>>, combination of some methods of JsonCursor (second operand of >>> is complex)") {
+ val posts: Json = """{"posts": [{"id": 0, "title": "foo"}]}""".fromJson[Json].toOption.get
+
+ val downPosts = JsonCursor.field("posts")
+ val downTitle = JsonCursor.isArray >>> JsonCursor.element(0) >>> JsonCursor.isObject >>>
+ JsonCursor.field("title") >>> JsonCursor.isString
+ val combined = downPosts >>> downTitle
+
+ assert(posts.get(combined))(
+ isRight(equalTo(Json.Str("foo")))
+ )
+ },
+ test(">>>, identity") {
+ val obj = Json.Obj("a" -> Json.Num(1))
+
+ val fieldA = JsonCursor.field("a")
+ val identity = JsonCursor.identity
+
+ val num = obj.get(fieldA >>> identity)
+
+ assert(num)(isRight(equalTo(Json.Num(1))))
}
),
suite("intersect")(
@@ -271,7 +359,7 @@ object JsonSpec extends ZIOSpecDefault {
test("object, deep") {
val intersected = tweet.intersect(
Json.Obj(
- "id" -> Json.Num(8501),
+ "id" -> Json.Num(8501),
"user" -> Json.Obj(
"id" -> Json.Num(6200),
"name" -> Json.Str("Twitter API")
@@ -395,7 +483,7 @@ object JsonSpec extends ZIOSpecDefault {
assert(merged)(
equalTo(
Json.Obj(
- "id" -> Json.Num(8500),
+ "id" -> Json.Num(8500),
"user" -> Json.Obj(
"id" -> Json.Num(6200),
"name" -> Json.Str("Twitter API"),
@@ -451,7 +539,7 @@ object JsonSpec extends ZIOSpecDefault {
isRight(
equalTo(
Json.Obj(
- "id" -> Json.Num(8500),
+ "id" -> Json.Num(8500),
"entities" -> Json.Obj(
"id" -> Json.Num(6200),
"name" -> Json.Str("Twitter API")
@@ -497,7 +585,7 @@ object JsonSpec extends ZIOSpecDefault {
isRight(
equalTo(
Json.Obj(
- "id" -> Json.Num(8500),
+ "id" -> Json.Num(8500),
"user" -> Json.Obj(
"id" -> Json.Num(6200),
"name" -> Json.Str("Twitter API")
@@ -516,7 +604,7 @@ object JsonSpec extends ZIOSpecDefault {
isRight(
equalTo(
Json.Obj(
- "id" -> Json.Num(8500),
+ "id" -> Json.Num(8500),
"user" -> Json.Obj(
"id" -> Json.Num(6201),
"name" -> Json.Str("Twitter API")
@@ -542,7 +630,7 @@ object JsonSpec extends ZIOSpecDefault {
lazy val tweet: Json.Obj =
Json.Obj(
- "id" -> Json.Num(8500),
+ "id" -> Json.Num(8500),
"user" -> Json.Obj(
"id" -> Json.Num(6200),
"name" -> Json.Str("Twitter API")
diff --git a/zio-json/shared/src/test/scala/zio/json/internal/FieldEncoderHelperSpec.scala b/zio-json/shared/src/test/scala/zio/json/internal/FieldEncoderHelperSpec.scala
new file mode 100644
index 000000000..8dc33fe79
--- /dev/null
+++ b/zio-json/shared/src/test/scala/zio/json/internal/FieldEncoderHelperSpec.scala
@@ -0,0 +1,92 @@
+package zio.json
+package internal
+
+import zio.test._
+
+object FieldEncoderSpec extends ZIOSpecDefault {
+ val spec = suite("FieldEncoder")(
+ suite("encodeOrDefault")(
+ suite("OptionEncoder")(
+ test("should use the default encoding None when withExplicitNulls is false") {
+ val helper = FieldEncoder(
+ 1,
+ "test",
+ JsonEncoder.option(JsonEncoder.int),
+ withExplicitNulls = false,
+ withExplicitEmptyCollections = false
+ )
+ assertTrue(helper.skip(None))
+ },
+ test("should encode None when withExplicitNulls is true") {
+ val helper = FieldEncoder(
+ 1,
+ "test",
+ JsonEncoder.option(JsonEncoder.int),
+ withExplicitNulls = true,
+ withExplicitEmptyCollections = false
+ )
+ assertTrue(!helper.skip(None))
+ }
+ ),
+ suite("CollectionEncoder")(
+ test("should encode empty collections when withExplicitEmptyCollections is true") {
+ val helper = FieldEncoder(
+ 1,
+ "test",
+ implicitly[JsonEncoder[List[Int]]],
+ withExplicitNulls = false,
+ withExplicitEmptyCollections = true
+ )
+ assertTrue(!helper.skip(Nil))
+ },
+ test("should not encode empty collections when withExplicitEmptyCollections is false") {
+ val helper = FieldEncoder(
+ 1,
+ "test",
+ implicitly[JsonEncoder[List[Int]]],
+ withExplicitNulls = false,
+ withExplicitEmptyCollections = false
+ )
+ assertTrue(helper.skip(Nil))
+ }
+ ),
+ suite("for a case class")(
+ test("should encode case classes with empty collections when withExplicitEmptyCollections is true") {
+ case class Test(list: List[Int], option: Option[Int])
+ val helper = FieldEncoder(
+ 1,
+ "test",
+ DeriveJsonEncoder.gen[Test],
+ withExplicitNulls = false,
+ withExplicitEmptyCollections = true
+ )
+ assertTrue(!helper.skip(Test(Nil, None)))
+ },
+ test("should encode case classes with empty collections when withExplicitEmptyCollections is false") {
+ case class Test(list: List[Int], option: Option[Int])
+ val helper = FieldEncoder(
+ 1,
+ "test",
+ DeriveJsonEncoder.gen[Test],
+ withExplicitNulls = false,
+ withExplicitEmptyCollections = false
+ )
+ assertTrue(!helper.skip(Test(Nil, None)))
+ },
+ test(
+ "should encode case classes with empty options when withExplicitEmptyCollections is false, even when withExplicitNulls is true"
+ ) {
+ case class Test(list: List[Int], option: Option[Int])
+ val helper = FieldEncoder(
+ 1,
+ "test",
+ DeriveJsonEncoder.gen[Test],
+ withExplicitNulls = true,
+ withExplicitEmptyCollections = false
+ )
+ assertTrue(!helper.skip(Test(Nil, None)))
+ }
+ )
+ )
+ )
+}
diff --git a/zio-json/shared/src/test/scala/zio/json/internal/SafeNumbersSpec.scala b/zio-json/shared/src/test/scala/zio/json/internal/SafeNumbersSpec.scala
new file mode 100644
index 000000000..673ff71a6
--- /dev/null
+++ b/zio-json/shared/src/test/scala/zio/json/internal/SafeNumbersSpec.scala
@@ -0,0 +1,427 @@
+package zio.json.internal
+
+import zio.ZIO
+import zio.json.Gens._
+import zio.test.Assertion._
+import zio.test.TestAspect.jvmOnly
+import zio.test._
+
+object SafeNumbersSpec extends ZIOSpecDefault {
+ val spec =
+ suite("SafeNumbers")(
+ suite("BigDecimal")(
+ test("valid") {
+ check(genBigDecimal)(x => assert(SafeNumbers.bigDecimal(x.toString))(isSome(equalTo(x))))
+ },
+ test("invalid edge cases") {
+ val invalidBigDecimalEdgeCases = List(
+ "N",
+ "Inf",
+ "-NaN",
+ "+NaN",
+ "e1",
+ "1.1.1",
+ "1 ",
+ "NaN",
+ "Infinity",
+ "+Infinity",
+ "-Infinity",
+ "1eO",
+ "1e+2147483648",
+ "1e+3147483648",
+ "9" * 99,
+ "0." + "9" * 99
+ ).map(s => SafeNumbers.bigDecimal(s))
+
+ assert(invalidBigDecimalEdgeCases)(forall(isNone))
+ },
+ test("valid edge cases") {
+ val invalidBigDecimalEdgeCases = List(
+ ".0",
+ "-.0",
+ "0",
+ "0.0",
+ "-0.0", // zeroes
+ "0000.1",
+ "0.00001",
+ "000.00001000" // various trailing zeros, should be preserved
+ )
+
+ check(Gen.fromIterable(invalidBigDecimalEdgeCases)) { s =>
+ assert(SafeNumbers.bigDecimal(s).get.compareTo(new java.math.BigDecimal(s)))(equalTo(0))
+ }
+ },
+ test("invalid (text)") {
+ check(genAlphaLowerString)(s => assert(SafeNumbers.bigDecimal(s))(isNone))
+ }
+ ),
+ suite("BigInteger")(
+ test("valid edge cases") {
+ val inputs = List(
+ "0",
+ "0123",
+ "-123",
+ "-9223372036854775807",
+ "9223372036854775806",
+ "-9223372036854775809",
+ "9223372036854775808"
+ )
+
+ check(Gen.fromIterable(inputs)) { s =>
+ assert(SafeNumbers.bigInteger(s))(
+ isSome(
+ equalTo(new java.math.BigInteger(s))
+ )
+ )
+ }
+ },
+ test("invalid edge cases") {
+ val inputs = List("0e+1", "01E-1", "0.1", "", "1 ")
+
+ check(Gen.fromIterable(inputs))(s => assert(SafeNumbers.bigInteger(s))(isNone))
+ },
+ test("valid") {
+ check(genBigInteger)(x => assert(SafeNumbers.bigInteger(x.toString, 2048))(isSome(equalTo(x))))
+ },
+ test("invalid (text)") {
+ check(genAlphaLowerString)(s => assert(SafeNumbers.bigInteger(s))(isNone))
+ }
+ ),
+ suite("BigInt")(
+ test("valid edge cases") {
+ val inputs = List(
+ "0",
+ "0123",
+ "-123",
+ "-9223372036854775807",
+ "9223372036854775806",
+ "-9223372036854775809",
+ "9223372036854775808"
+ )
+
+ check(Gen.fromIterable(inputs)) { s =>
+ assert(SafeNumbers.bigInt(s))(
+ isSome(
+ equalTo(BigInt(s))
+ )
+ )
+ }
+ },
+ test("invalid edge cases") {
+ val inputs = List("0e+1", "01E-1", "0.1", "", "1 ")
+
+ check(Gen.fromIterable(inputs))(s => assert(SafeNumbers.bigInt(s))(isNone))
+ },
+ test("valid") {
+ check(genBigInteger)(x => assert(SafeNumbers.bigInt(x.toString, 2048))(isSome(equalTo(BigInt(x)))))
+ },
+ test("invalid (text)") {
+ check(genAlphaLowerString)(s => assert(SafeNumbers.bigInt(s))(isNone))
+ }
+ ),
+ suite("Byte")(
+ test("valid") {
+ check(Gen.byte(Byte.MinValue, Byte.MaxValue)) { x =>
+ val r = SafeNumbers.byte(x.toString)
+ assert(r)(equalTo(ByteSome(x))) && assert(r.isEmpty)(equalTo(false))
+ }
+ },
+ test("invalid (numbers)") {
+ check(Gen.int.filter(x => x < Byte.MinValue || x > Byte.MaxValue)) { x =>
+ assert(SafeNumbers.byte(x.toString))(equalTo(ByteNone))
+ }
+ },
+ test("invalid (text)") {
+ check(genAlphaLowerString)(s => assert(SafeNumbers.byte(s).isEmpty)(equalTo(true)))
+ },
+ test("ByteNone") {
+ ZIO.attempt(ByteNone.value).flip.map(error => assertTrue(error.isInstanceOf[NoSuchElementException]))
+ }
+ ),
+ suite("Double")(
+ test("valid") {
+ check(Gen.double.filterNot(_.isNaN)) { x =>
+ val r = SafeNumbers.double(x.toString)
+ assert(r)(equalTo(DoubleSome(x))) && assert(r.isEmpty)(equalTo(false))
+ }
+ },
+ test("valid (from Int)") {
+ check(Gen.int)(x => assert(SafeNumbers.double(x.toString))(equalTo(DoubleSome(x.toDouble))))
+ },
+ test("valid (from Long)") {
+ check(Gen.long)(x => assert(SafeNumbers.double(x.toString))(equalTo(DoubleSome(x.toDouble))))
+ },
+ test("valid (from BigDecimal)") {
+ check(genBigDecimal)(x => assert(SafeNumbers.double(x.toString))(equalTo(DoubleSome(x.doubleValue))))
+ },
+ test("invalid edge cases") {
+ val inputs = List(
+ "N",
+ "Inf",
+ "Info",
+ "-NaN",
+ "+NaN",
+ "e1",
+ "1.1.1",
+ "1 ",
+ "1eO",
+ "1e+2147483648",
+ "1e+3147483648",
+ "9" * 99,
+ "0." + "9" * 99
+ )
+
+ check(Gen.fromIterable(inputs))(s => assert(SafeNumbers.double(s))(equalTo(DoubleNone)))
+ },
+ test("valid edge cases") {
+ val inputs = List(
+ ".0",
+ "-.0",
+ "0",
+ "0.0",
+ "-0.0", // zeroes
+ "0000.1",
+ "0.00001",
+ "0.0e-12",
+ "1.1e-12",
+ "1.1e-1234",
+ "1.1e+1234",
+ "000.00001000", // trailing zeros
+ "NaN",
+ "92233720368547758070", // overflows a Long significand
+ "Infinity",
+ "+Infinity",
+ "-Infinity",
+ "503599627370496E+13", // fast path
+ "503599627370496E+23", // fast path with slop
+ "3.976210887433566E-281", // rounds if a naive scaling is used
+ "9007199254740993.0", // round-down, halfway
+ "18014398509481986.0",
+ "9223372036854776832.0",
+ "9007199254740995.0", // round-up, halfway
+ "18014398509481990.0",
+ "9223372036854778880.0",
+ "9223372036854776833.0", // round-up, above halfway
+ "36028797018963967.0", // 2^n - 1 integer regression
+ "2.2250738585072014E-308",
+ "2.2250738585072013E-308",
+ "2.2250738585072012E-308",
+ "2.2250738585072011E-308"
+ )
+
+ check(Gen.fromIterable(inputs)) { s =>
+ // better to do the comparison on strings to deal with NaNs
+ assert(SafeNumbers.double(s).toString)(
+ equalTo(DoubleSome(s.toDouble).toString)
+ )
+ }
+ },
+ test("valid magic doubles") {
+ assert(SafeNumbers.double("NaN"))(not(equalTo(DoubleNone))) &&
+ assert(SafeNumbers.double("Infinity"))(not(equalTo(DoubleNone))) &&
+ assert(SafeNumbers.double("+Infinity"))(not(equalTo(DoubleNone))) &&
+ assert(SafeNumbers.double("-Infinity"))(not(equalTo(DoubleNone)))
+ },
+ test("invalid doubles (text)") {
+ check(genAlphaLowerString)(s => assert(SafeNumbers.double(s).isEmpty)(equalTo(true)))
+ },
+ test("DoubleNone") {
+ ZIO.attempt(DoubleNone.value).flip.map(error => assertTrue(error.isInstanceOf[NoSuchElementException]))
+ }
+ ),
+ suite("Float")(
+ test("valid") {
+ check(Gen.float.filterNot(_.isNaN)) { x =>
+ val r = SafeNumbers.float(x.toString)
+ assert(r)(equalTo(FloatSome(x))) && assert(r.isEmpty)(equalTo(false))
+ }
+ },
+ test("large mantissa") {
+ // https://github.com/zio/zio-json/issues/221
+ assert(SafeNumbers.float("1.199999988079071"))(equalTo(FloatSome(1.1999999f)))
+ } @@ jvmOnly,
+ test("valid (from Int)") {
+ check(Gen.int)(x => assert(SafeNumbers.float(x.toString))(equalTo(FloatSome(x.toFloat))))
+ },
+ test("valid (from Long)") {
+ check(Gen.long)(x => assert(SafeNumbers.float(x.toString))(equalTo(FloatSome(x.toFloat))))
+ },
+ test("invalid edge cases") {
+ val inputs = List(
+ "N",
+ "Inf",
+ "Info",
+ "-NaN",
+ "+NaN",
+ "e1",
+ "1.1.1",
+ "1eO",
+ "1e+2147483648",
+ "1e+3147483648",
+ "9" * 99,
+ "0." + "9" * 99
+ )
+
+ check(Gen.fromIterable(inputs))(s => assert(SafeNumbers.float(s))(equalTo(FloatNone)))
+ },
+ test("valid edge cases") {
+ val inputs = List(
+ ".0",
+ "-.0",
+ "0",
+ "0.0",
+ "-0.0", // zeroes
+ "0000.1",
+ "0.00001",
+ "0.0e-12",
+ "1.1e-12",
+ "1.1e-1234",
+ "1.1e+1234",
+ "000.00001000", // trailing zeros
+ "NaN",
+ "92233720368547758070", // overflows a Long significand
+ "Infinity",
+ "+Infinity",
+ "-Infinity",
+ "16777217.0", // round-down, halfway
+ "33554434.0",
+ "17179870208.0",
+ "16777219.0", // round-up, halfway
+ "33554438.0",
+ "17179872256.0",
+ "33554435.0", // round-up, above halfway
+ "17179870209.0",
+ "37930954282500097", // fast path with `toFloat`
+ "48696272630054913",
+ // TODO: uncomment after release of Scala Native 0.5.7
+ // "1.00000017881393432617187499", // check exactly halfway, round-up at halfway
+ // "1.000000178813934326171875",
+ // "1.00000017881393432617187501",
+ "36028797018963967.0", // 2^n - 1 integer regression
+ "1.17549435E-38",
+ "1.17549434E-38",
+ "1.17549433E-38",
+ "1.17549432E-38",
+ "1.17549431E-38",
+ "1.17549430E-38",
+ "1.17549429E-38",
+ "1.17549428E-38"
+ )
+
+ check(Gen.fromIterable(inputs)) { s =>
+ // better to do the comparison on strings to deal with NaNs
+ assert(SafeNumbers.float(s).toString)(
+ equalTo(FloatSome(s.toFloat).toString)
+ )
+ }
+ },
+ test("valid (from Double)") {
+ check(Gen.double.filterNot(_.isNaN)) { x =>
+ assert(SafeNumbers.float(x.toString))(equalTo(FloatSome(x.toFloat)))
+ }
+ },
+ test("valid (from BigDecimal)") {
+ check(genBigDecimal)(i => assert(SafeNumbers.float(i.toString))(equalTo(FloatSome(i.floatValue))))
+ },
+ test("invalid float (text)") {
+ check(genAlphaLowerString)(s => assert(SafeNumbers.float(s).isEmpty)(equalTo(true)))
+ },
+ test("FloatNone") {
+ ZIO.attempt(FloatNone.value).flip.map(error => assertTrue(error.isInstanceOf[NoSuchElementException]))
+ }
+ ),
+ suite("Int")(
+ test("valid edge cases") {
+ val input = List("00", "01", "0000001", "-2147483648", "2147483647")
+
+ check(Gen.fromIterable(input))(x => assert(SafeNumbers.int(x))(equalTo(IntSome(x.toInt))))
+ },
+ test("valid") {
+ check(Gen.int) { x =>
+ val r = SafeNumbers.int(x.toString)
+ assert(r)(equalTo(IntSome(x))) && assert(r.isEmpty)(equalTo(false))
+ }
+ },
+ test("invalid (edge cases)") {
+ val input = List(
+ "1e3",
+ "1E-2",
+ "0.1",
+ "",
+ "1 ",
+ "-2147483649",
+ "2147483648"
+ )
+
+ check(Gen.fromIterable(input))(x => assert(SafeNumbers.int(x))(equalTo(IntNone)))
+ },
+ test("invalid (out of range)") {
+ check(Gen.long.filter(i => i < Int.MinValue || i > Int.MaxValue))(d =>
+ assert(SafeNumbers.int(d.toString))(equalTo(IntNone))
+ )
+ },
+ test("invalid (text)") {
+ check(genAlphaLowerString)(s => assert(SafeNumbers.int(s).isEmpty)(equalTo(true)))
+ },
+ test("IntNone") {
+ ZIO.attempt(IntNone.value).flip.map(error => assertTrue(error.isInstanceOf[NoSuchElementException]))
+ }
+ ),
+ suite("Long")(
+ test("valid edge cases") {
+ val input = List("00", "01", "0000001", "-9223372036854775808", "9223372036854775807")
+
+ check(Gen.fromIterable(input))(x => assert(SafeNumbers.long(x))(equalTo(LongSome(x.toLong))))
+ },
+ test("invalid (edge cases)") {
+ val input = List(
+ "1e3foo",
+ "1E-2",
+ "0.1",
+ "",
+ "1 ",
+ "-9223372036854775809",
+ "9223372036854775808"
+ )
+
+ check(Gen.fromIterable(input))(x => assert(SafeNumbers.long(x))(equalTo(LongNone)))
+ },
+ test("valid") {
+ check(Gen.long) { x =>
+ val r = SafeNumbers.long(x.toString)
+ assert(r)(equalTo(LongSome(x))) && assert(r.isEmpty)(equalTo(false))
+ }
+ },
+ test("invalid (out of range)") {
+ val outOfRange = genBigInteger.filter(_.bitLength > 63)
+
+ check(outOfRange)(x => assert(SafeNumbers.long(x.toString))(equalTo(LongNone)))
+ },
+ test("invalid (text)") {
+ check(genAlphaLowerString)(s => assert(SafeNumbers.long(s).isEmpty)(equalTo(true)))
+ },
+ test("LongNone") {
+ ZIO.attempt(LongNone.value).flip.map(error => assertTrue(error.isInstanceOf[NoSuchElementException]))
+ }
+ ),
+ suite("Short")(
+ test("valid") {
+ check(Gen.short) { x =>
+ val r = SafeNumbers.short(x.toString)
+ assert(r)(equalTo(ShortSome(x))) && assert(r.isEmpty)(equalTo(false))
+ }
+ },
+ test("invalid (out of range)") {
+ check(Gen.int.filter(i => i < Short.MinValue || i > Short.MaxValue))(d =>
+ assert(SafeNumbers.short(d.toString))(equalTo(ShortNone))
+ )
+ },
+ test("invalid (text)") {
+ check(genAlphaLowerString)(s => assert(SafeNumbers.short(s).isEmpty)(equalTo(true)))
+ },
+ test("ShortNone") {
+ ZIO.attempt(ShortNone.value).flip.map(error => assertTrue(error.isInstanceOf[NoSuchElementException]))
+ }
+ )
+ )
+}
diff --git a/zio-json/jvm/src/test/scala/zio/json/internal/StringMatrixSpec.scala b/zio-json/shared/src/test/scala/zio/json/internal/StringMatrixSpec.scala
similarity index 54%
rename from zio-json/jvm/src/test/scala/zio/json/internal/StringMatrixSpec.scala
rename to zio-json/shared/src/test/scala/zio/json/internal/StringMatrixSpec.scala
index 8ae865a76..243d70550 100644
--- a/zio-json/jvm/src/test/scala/zio/json/internal/StringMatrixSpec.scala
+++ b/zio-json/shared/src/test/scala/zio/json/internal/StringMatrixSpec.scala
@@ -1,52 +1,54 @@
-package testzio.json.internal
+package zio.json.internal
-import zio.json.internal._
import zio.test.Assertion._
+import zio.test.TestAspect._
import zio.test._
object StringMatrixSpec extends ZIOSpecDefault {
val spec: Spec[Environment, Any] = suite("StringMatrix")(
test("basic positive succeeds") {
- val names = List("a", "b")
- val aliases = List("c" -> 0, "d" -> 1)
+ val names = Array("\uD83D\uDE00" /* a surrogate pair for the grinning face */, "a", "b")
+ val aliases = Array("c" -> 0, "d" -> 1)
val asserts =
- names.map(s => matcher(names, aliases, s).contains(s)) ++
- aliases.map(a => matcher(names, aliases, a._1).contains(a._1))
+ (names.map(s => matcher(names, aliases, s).contains(s)) ++
+ aliases.map(a => matcher(names, aliases, a._1).contains(a._1))).toVector
assert(asserts)(forall(isTrue))
},
test("positive succeeds") {
// Watch out: TestStrings were passed
check(genTestStrings) { xs =>
- val asserts = xs.map(s => matcher(xs, List.empty, s).contains(s))
+ val asserts = xs.map(s => matcher(xs, Array.empty, s).contains(s)).toVector
assert(asserts)(forall(isTrue))
}
},
test("negative fails") {
- check(genTestStrings.filterNot(_.startsWith("wibble")))(xs => assert(matcher(xs, List.empty, "wibble"))(isEmpty))
+ check(genTestStrings.filter(_.forall(s => !s.startsWith("wibble"))))(xs =>
+ assert(matcher(xs, Array.empty, "wibble").toVector)(isEmpty)
+ )
},
test("substring fails") {
- check(genTestStrings.filter(_.length > 1))(xs => assert(matcher(xs, List.empty, xs.mkString))(isEmpty))
+ check(genTestStrings.filter(_.length > 1))(xs => assert(matcher(xs, Array.empty, xs.mkString).toVector)(isEmpty))
},
test("trivial") {
- check(genNonEmptyString)(s => assert(matcher(List(s), List.empty, s))(equalTo(List(s))))
+ check(genNonEmptyString)(s => assert(matcher(Array(s), Array.empty, s).toVector)(equalTo(Vector(s))))
},
test("exact match is a substring") {
assert(
matcher(
- List("retweeted_status", "retweeted"),
- List.empty,
+ Array("retweeted_status", "retweeted"),
+ Array.empty,
"retweeted"
- )
- )(equalTo(List("retweeted")))
+ ).toVector
+ )(equalTo(Vector("retweeted")))
},
test("first resolves to field index") {
check(genTestStrings) { xs =>
- val m = new StringMatrix(xs.toArray)
+ val m = new StringMatrix(xs)
val asserts = xs.indices.map { i =>
val test = xs(i)
- var bs = test.zipWithIndex.foldLeft(m.initial) { case (bs, (c, i)) =>
- m.update(bs, i, c.toInt)
+ var bs = test.zipWithIndex.foldLeft(m.initial) { case (bs, (c, i)) =>
+ m.update(bs, i, c)
}
bs = m.exact(bs, test.length)
m.first(bs) == i
@@ -58,8 +60,8 @@ object StringMatrixSpec extends ZIOSpecDefault {
// Watch out: TestStrings were passed
check(genTestStringsAndAliases) { case (xs, aliases) =>
val asserts =
- xs.map(s => matcher(xs, List.empty, s).contains(s)) ++
- aliases.map(alias => matcher(xs, aliases, alias._1).contains(alias._1))
+ (xs.map(s => matcher(xs, Array.empty, s).contains(s)) ++
+ aliases.map(alias => matcher(xs, aliases, alias._1).contains(alias._1))).toVector
assert(asserts)(forall(isTrue))
}
@@ -70,37 +72,37 @@ object StringMatrixSpec extends ZIOSpecDefault {
xs.exists(_.startsWith("wibble")) || aliases.exists(_._1.startsWith("wibble"))
}
) { case (xs, aliases) =>
- assert(matcher(xs, aliases, "wibble"))(isEmpty)
+ assert(matcher(xs, aliases, "wibble").toVector)(isEmpty)
}
},
test("alias substring fails") {
check(
genTestStringsAndAliases.filter { case (xs, aliases) => xs.length + aliases.length > 1 }
) { case (xs, aliases) =>
- assert(matcher(xs, aliases, xs.mkString + aliases.map(_._1).mkString))(isEmpty)
+ assert(matcher(xs, aliases, xs.mkString + aliases.map(_._1).mkString).toVector)(isEmpty)
}
},
test("alias trivial") {
check(genNonEmptyString.filterNot(_.startsWith("wibble")))(s =>
- assert(matcher(List("wibble"), List(s -> 0), s))(equalTo(List(s)))
+ assert(matcher(Array("wibble"), Array(s -> 0), s).toVector)(equalTo(Vector(s)))
)
},
test("alias exact match is a substring") {
assert(
matcher(
- List("wibble"),
- List("retweeted_status" -> 0, "retweeted" -> 0),
+ Array("wibble"),
+ Array("retweeted_status" -> 0, "retweeted" -> 0),
"retweeted"
- )
- )(equalTo(List("retweeted")))
+ ).toVector
+ )(equalTo(Vector("retweeted")))
},
test("alias first resolves to aliased field index") {
check(genTestStringsAndAliases) { case (xs, aliases) =>
- val m = new StringMatrix(xs.toArray, aliases.toArray)
+ val m = new StringMatrix(xs, aliases)
val asserts = aliases.indices.map { i =>
val test = aliases(i)._1
- var bs = test.zipWithIndex.foldLeft(m.initial) { case (bs, (c, i)) =>
- m.update(bs, i, c.toInt)
+ var bs = test.zipWithIndex.foldLeft(m.initial) { case (bs, (c, i)) =>
+ m.update(bs, i, c)
}
bs = m.exact(bs, test.length)
m.first(bs) == aliases(i)._2
@@ -108,43 +110,46 @@ object StringMatrixSpec extends ZIOSpecDefault {
assert(asserts)(forall(isTrue))
}
}
- )
+ ) @@ jvm(samples(100)) @@ js(samples(10)) @@ native(samples(10))
val genNonEmptyString =
Gen.alphaNumericString.filter(_.nonEmpty)
val genTestStrings =
for {
- n <- Gen.int(1, 63)
+ n <- Gen.int(1, 64)
xs <- Gen.setOfN(n)(genNonEmptyString)
- } yield xs.toList
+ } yield xs.toArray
val genTestStringsAndAliases =
for {
- xsn <- Gen.int(1, 63)
+ xsn <- Gen.int(1, 64)
xs <- Gen.setOfN(xsn)(genNonEmptyString)
- an <- Gen.int(0, 63 - xsn)
- aliasF <- Gen.setOfN(an)(genNonEmptyString.filter(a => !xs.contains(a))).map(_.toList)
- aliasN <- Gen.listOfN(an)(Gen.int(0, xsn - 1))
- } yield (xs.toList, aliasF zip aliasN)
+ an <- Gen.int(0, 64 - xsn)
+ aliasF <- Gen.setOfN(an)(genNonEmptyString.filter(a => !xs.contains(a))).map(_.toArray)
+ aliasN <- Gen.listOfN(an)(Gen.int(0, xsn - 1)).map(_.toArray)
+ } yield (xs.toArray, aliasF zip aliasN)
- private def matcher(xs: List[String], aliases: List[(String, Int)], test: String): List[String] = {
- val m = new StringMatrix(xs.toArray, aliases.toArray)
- var bs = test.zipWithIndex.foldLeft(m.initial) { case (bs, (c, i)) =>
- m.update(bs, i, c.toInt)
+ private def matcher(xs: Array[String], aliases: Array[(String, Int)], test: String): Array[String] = {
+ val m = new StringMatrix(xs, aliases)
+ var bs = test.foldLeft(m.initial) {
+ var i = 0
+ (bs, c) =>
+ val nm = m.update(bs, i, c)
+ i += 1
+ nm
}
bs = m.exact(bs, test.length)
matches(xs ++ aliases.map(_._1), bs)
}
- private def matches(xsAndAliases: List[String], bitset: Long): List[String] = {
- var hits: List[String] = Nil
- var i = 0
+ private def matches(xsAndAliases: Array[String], bitset: Long): Array[String] = {
+ val hits = Array.newBuilder[String]
+ var i = 0
while (i < xsAndAliases.length) {
- if (((bitset >>> i) & 1L) == 1L)
- hits = xsAndAliases(i) :: hits
+ if (((bitset >>> i) & 1L) != 0) hits += xsAndAliases(i)
i += 1
}
- hits
+ hits.result()
}
}