From 6a43338a9b49828e7ce0ff49f3d0045c770323df Mon Sep 17 00:00:00 2001 From: Nikita Volkov Date: Sat, 9 Dec 2023 14:13:52 +0300 Subject: [PATCH] Modernize --- .github/workflows/check.yaml | 30 +++ .github/workflows/integrate.yaml | 57 ------ .../workflows/on-push-to-master-or-pr.yaml | 17 ++ .github/workflows/on-push-to-release.yaml | 32 ++++ README.md | 9 +- bench/Main.hs | 50 ++--- bench/Main/Aeson.hs | 175 ++++++++++++------ bench/Main/BufferBuilder.hs | 57 ------ cabal.project | 1 + demo/Main.hs | 1 + jsonifier.cabal | 28 ++- library/Jsonifier.hs | 12 +- library/Jsonifier/Ffi.hs | 1 + library/Jsonifier/Prelude.hs | 6 +- test/Main.hs | 17 +- test/Main/Util/HedgehogGens.hs | 3 + 16 files changed, 259 insertions(+), 237 deletions(-) create mode 100644 .github/workflows/check.yaml delete mode 100644 .github/workflows/integrate.yaml create mode 100644 .github/workflows/on-push-to-master-or-pr.yaml create mode 100644 .github/workflows/on-push-to-release.yaml delete mode 100644 bench/Main/BufferBuilder.hs create mode 100644 cabal.project diff --git a/.github/workflows/check.yaml b/.github/workflows/check.yaml new file mode 100644 index 0000000..94d3b99 --- /dev/null +++ b/.github/workflows/check.yaml @@ -0,0 +1,30 @@ +name: Compile, test and check the docs + +on: + workflow_call: + +jobs: + + check: + + strategy: + fail-fast: false + matrix: + include: + - ghc: 8.8.1 + ghc-options: "" + ignore-haddock: true + ignore-cabal-check: true + - ghc: latest + ignore-cabal-check: true + + runs-on: ubuntu-latest + + steps: + + - uses: nikita-volkov/build-and-test-cabal-package.github-action@v1 + with: + ghc: ${{matrix.ghc}} + ghc-options: ${{matrix.ghc-options}} + ignore-haddock: ${{matrix.ignore-haddock}} + ignore-cabal-check: ${{matrix.ignore-cabal-check}} diff --git a/.github/workflows/integrate.yaml b/.github/workflows/integrate.yaml deleted file mode 100644 index 2dea82c..0000000 --- a/.github/workflows/integrate.yaml +++ /dev/null @@ -1,57 +0,0 @@ -on: - push: - branches: - - master - - dev - -jobs: - - check-formatting: - - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: mrkkrp/ormolu-action@v7 - - build: - - strategy: - matrix: - include: - - ghc: 8.4.2 - text: 1.2.5.0 - - ghc: 9.4.2 - text: 2.0.1 - - runs-on: ubuntu-latest - - steps: - - - uses: actions/checkout@v2 - - - name: Setup Haskell - uses: haskell/actions/setup@v2 - with: - ghc-version: ${{ matrix.ghc }} - cabal-version: 3.8 - - - run: | - echo "packages: ." > cabal.project - echo "constraints:" >> cabal.project - echo " text == ${{ matrix.text }}" >> cabal.project - - run: cabal update --enable-tests --enable-benchmarks - - run: cabal freeze --enable-tests --enable-benchmarks - - - uses: actions/cache@v2 - with: - path: | - ~/.cabal/store - dist-newstyle - key: ${{ runner.os }}-${{ matrix.ghc }}-${{ hashFiles('cabal.project.freeze') }} - restore-keys: | - ${{ runner.os }}-${{ matrix.ghc }}- - - - run: cabal build --enable-tests --enable-benchmarks - - run: cabal test --enable-tests --enable-benchmarks - - - run: cabal haddock --enable-tests --enable-benchmarks diff --git a/.github/workflows/on-push-to-master-or-pr.yaml b/.github/workflows/on-push-to-master-or-pr.yaml new file mode 100644 index 0000000..79c21f5 --- /dev/null +++ b/.github/workflows/on-push-to-master-or-pr.yaml @@ -0,0 +1,17 @@ +name: Compile, test and check the docs + +on: + push: + branches: + - master + pull_request: + +jobs: + + format: + uses: nikita-volkov/haskell-hackage-lib-github-actions-workflows/.github/workflows/format.yaml@v2 + secrets: inherit + + check: + uses: ./.github/workflows/check.yaml + secrets: inherit diff --git a/.github/workflows/on-push-to-release.yaml b/.github/workflows/on-push-to-release.yaml new file mode 100644 index 0000000..9024112 --- /dev/null +++ b/.github/workflows/on-push-to-release.yaml @@ -0,0 +1,32 @@ +name: Release the lib to Hackage + +on: + push: + branches: + - supermajor + - major + - minor + - patch + +concurrency: + group: release + cancel-in-progress: false + +jobs: + + format: + uses: nikita-volkov/haskell-hackage-lib-github-actions-workflows/.github/workflows/format.yaml@v2 + secrets: inherit + + check: + uses: ./.github/workflows/check.yaml + secrets: inherit + + release: + needs: + - format + - check + uses: nikita-volkov/haskell-hackage-lib-github-actions-workflows/.github/workflows/release.yaml@v2 + secrets: inherit + with: + prefix-tag-with-v: false diff --git a/README.md b/README.md index 7289a14..17e9799 100644 --- a/README.md +++ b/README.md @@ -14,49 +14,42 @@ becomes less drastic. ## Benchmarks Following are the benchmark results comparing the performance -of encoding typical documents using this library, "aeson" and "buffer-builder". +of encoding typical documents using this library and "aeson". Every approach is measured on Twitter API data of sizes ranging from roughly 1kB to 60MB. "aeson" stands for "aeson" producing a strict bytestring, "lazy-aeson" - lazy bytestring, "lazy-aeson-untrimmed-32k" - lazy bytestring using an untrimmed builder strategy with allocation of 32k. -"buffer-builder" is another library providing an alternative JSON encoder. ``` 1kB/jsonifier mean 2.054 μs ( +- 30.83 ns ) 1kB/aeson mean 6.456 μs ( +- 126.7 ns ) 1kB/lazy-aeson mean 6.338 μs ( +- 169.1 ns ) 1kB/lazy-aeson-untrimmed-32k mean 6.905 μs ( +- 280.2 ns ) -1kB/buffer-builder mean 5.550 μs ( +- 113.2 ns ) 6kB/jsonifier mean 12.80 μs ( +- 196.9 ns ) 6kB/aeson mean 31.28 μs ( +- 733.2 ns ) 6kB/lazy-aeson mean 30.30 μs ( +- 229.5 ns ) 6kB/lazy-aeson-untrimmed-32k mean 29.17 μs ( +- 371.3 ns ) -6kB/buffer-builder mean 30.39 μs ( +- 387.2 ns ) 60kB/jsonifier mean 122.9 μs ( +- 1.492 μs ) 60kB/aeson mean 258.4 μs ( +- 1.000 μs ) 60kB/lazy-aeson mean 259.4 μs ( +- 4.494 μs ) 60kB/lazy-aeson-untrimmed-32k mean 255.7 μs ( +- 3.239 μs ) -60kB/buffer-builder mean 309.0 μs ( +- 3.907 μs ) 600kB/jsonifier mean 1.299 ms ( +- 16.44 μs ) 600kB/aeson mean 3.389 ms ( +- 106.8 μs ) 600kB/lazy-aeson mean 2.520 ms ( +- 45.51 μs ) 600kB/lazy-aeson-untrimmed-32k mean 2.509 ms ( +- 30.76 μs ) -600kB/buffer-builder mean 3.012 ms ( +- 85.22 μs ) 6MB/jsonifier mean 20.91 ms ( +- 821.7 μs ) 6MB/aeson mean 30.74 ms ( +- 509.4 μs ) 6MB/lazy-aeson mean 24.83 ms ( +- 184.3 μs ) 6MB/lazy-aeson-untrimmed-32k mean 24.93 ms ( +- 383.2 μs ) -6MB/buffer-builder mean 32.98 ms ( +- 700.1 μs ) 60MB/jsonifier mean 194.8 ms ( +- 13.93 ms ) 60MB/aeson mean 276.0 ms ( +- 5.194 ms ) 60MB/lazy-aeson mean 246.9 ms ( +- 3.122 ms ) 60MB/lazy-aeson-untrimmed-32k mean 245.1 ms ( +- 1.050 ms ) -60MB/buffer-builder mean 312.0 ms ( +- 4.896 ms ) ``` The benchmark suite is bundled with the package. diff --git a/bench/Main.hs b/bench/Main.hs index 623b246..205bee2 100644 --- a/bench/Main.hs +++ b/bench/Main.hs @@ -1,17 +1,17 @@ module Main where +import Criterion.Main import qualified Data.Aeson import qualified Data.ByteString.Char8 as Char8ByteString import qualified Data.ByteString.Lazy -import Gauge.Main import qualified Jsonifier import qualified Main.Aeson -import qualified Main.BufferBuilder as BufferBuilder import qualified Main.Jsonifier import qualified Main.Model as Model import qualified Text.Builder as TextBuilder import Prelude +main :: IO () main = do twitter1Data <- load "samples/twitter1.json" @@ -23,29 +23,28 @@ main = -- Ensure that encoders are correct test "jsonifier" encodeWithJsonifier twitter10Data - test "buffer-builder" BufferBuilder.encodeResult twitter10Data test "aeson" encodeWithAeson twitter10Data -- Print out the data sizes of samples - TextBuilder.putLnToStdOut $ - let sampleDataSize = - TextBuilder.dataSizeInBytesInDecimal ',' - . Char8ByteString.length - . encodeWithJsonifier - sample sampleName sampleData = - "- " <> TextBuilder.text sampleName <> ": " <> sampleDataSize sampleData - in "Input data sizes report:\n" - <> sample "twitter with 1 objects" twitter1Data - <> "\n" - <> sample "twitter with 10 objects" twitter10Data - <> "\n" - <> sample "twitter with 100 objects" twitter100Data - <> "\n" - <> sample "twitter with 1,000 objects" twitter1000Data - <> "\n" - <> sample "twitter with 10,000 objects" twitter10000Data - <> "\n" - <> sample "twitter with 100,000 objects" twitter100000Data + TextBuilder.putLnToStdOut + $ let sampleDataSize = + TextBuilder.dataSizeInBytesInDecimal ',' + . Char8ByteString.length + . encodeWithJsonifier + sample sampleName sampleData = + "- " <> TextBuilder.text sampleName <> ": " <> sampleDataSize sampleData + in "Input data sizes report:\n" + <> sample "twitter with 1 objects" twitter1Data + <> "\n" + <> sample "twitter with 10 objects" twitter10Data + <> "\n" + <> sample "twitter with 100 objects" twitter100Data + <> "\n" + <> sample "twitter with 1,000 objects" twitter1000Data + <> "\n" + <> sample "twitter with 10,000 objects" twitter10000Data + <> "\n" + <> sample "twitter with 100,000 objects" twitter100000Data let benchInput :: String -> Model.Result -> Benchmark benchInput name input = @@ -54,8 +53,7 @@ main = [ bench "jsonifier" (nf encodeWithJsonifier input), bench "aeson" (nf encodeWithAeson input), bench "lazy-aeson" (nf encodeWithLazyAeson input), - bench "lazy-aeson-untrimmed-32k" (nf Main.Aeson.resultToLazyByteStringWithUntrimmedStrategy input), - bench "buffer-builder" (nf BufferBuilder.encodeResult input) + bench "lazy-aeson-untrimmed-32k" (nf Main.Aeson.resultToLazyByteStringWithUntrimmedStrategy input) ] in defaultMain [ benchInput "1kB" twitter1Data, @@ -75,6 +73,7 @@ mapResultsOfResult :: ([Model.Story] -> [Model.Story]) -> Model.Result -> Model. mapResultsOfResult f a = a {Model.results = f (Model.results a)} +test :: (Data.Aeson.FromJSON a, Eq a, MonadFail m) => String -> (a -> ByteString) -> a -> m () test name strictEncoder input = let encoding = strictEncoder input in case Data.Aeson.eitherDecodeStrict' encoding of @@ -85,11 +84,14 @@ test name strictEncoder input = Left err -> fail ("Encoder " <> name <> " failed: " <> err <> ".\nOutput:\n" <> Char8ByteString.unpack encoding) +encodeWithJsonifier :: Model.Result -> ByteString encodeWithJsonifier = Jsonifier.toByteString . Main.Jsonifier.resultJson +encodeWithAeson :: (Data.Aeson.ToJSON a) => a -> ByteString encodeWithAeson = Data.ByteString.Lazy.toStrict . Data.Aeson.encode +encodeWithLazyAeson :: (Data.Aeson.ToJSON a) => a -> Data.ByteString.Lazy.ByteString encodeWithLazyAeson = Data.Aeson.encode diff --git a/bench/Main/Aeson.hs b/bench/Main/Aeson.hs index fa393e1..67d6372 100644 --- a/bench/Main/Aeson.hs +++ b/bench/Main/Aeson.hs @@ -1,3 +1,5 @@ +{-# OPTIONS_GHC -Wno-orphans #-} + module Main.Aeson where import Data.Aeson hiding (Result) @@ -25,8 +27,9 @@ instance ToJSON Metadata where ] toEncoding Metadata {..} = - pairs $ - "result_type" .= result_type + pairs + $ "result_type" + .= result_type instance FromJSON Metadata where parseJSON (Object v) = Metadata <$> v .: "result_type" @@ -40,15 +43,19 @@ instance ToJSON Geo where ] toEncoding Geo {..} = - pairs $ - "type_" .= type_ - <> "coordinates" .= coordinates + pairs + $ "type_" + .= type_ + <> "coordinates" + .= coordinates instance FromJSON Geo where parseJSON (Object v) = Geo - <$> v .: "type_" - <*> v .: "coordinates" + <$> v + .: "type_" + <*> v + .: "coordinates" parseJSON _ = empty instance ToJSON Story where @@ -71,39 +78,67 @@ instance ToJSON Story where ] toEncoding Story {..} = - pairs $ - "from_user_id_str" .= from_user_id_str - <> "profile_image_url" .= profile_image_url - <> "created_at" .= created_at - <> "from_user" .= from_user - <> "id_str" .= id_str - <> "metadata" .= metadata - <> "to_user_id" .= to_user_id - <> "text" .= text - <> "id" .= id - <> "from_user_id" .= from_user_id - <> "geo" .= geo - <> "iso_language_code" .= iso_language_code - <> "to_user_id_str" .= to_user_id_str - <> "source" .= source + pairs + $ "from_user_id_str" + .= from_user_id_str + <> "profile_image_url" + .= profile_image_url + <> "created_at" + .= created_at + <> "from_user" + .= from_user + <> "id_str" + .= id_str + <> "metadata" + .= metadata + <> "to_user_id" + .= to_user_id + <> "text" + .= text + <> "id" + .= id + <> "from_user_id" + .= from_user_id + <> "geo" + .= geo + <> "iso_language_code" + .= iso_language_code + <> "to_user_id_str" + .= to_user_id_str + <> "source" + .= source instance FromJSON Story where parseJSON (Object v) = Story - <$> v .: "from_user_id_str" - <*> v .: "profile_image_url" - <*> v .: "created_at" - <*> v .: "from_user" - <*> v .: "id_str" - <*> v .: "metadata" - <*> v .: "to_user_id" - <*> v .: "text" - <*> v .: "id" - <*> v .: "from_user_id" - <*> v .: "geo" - <*> v .: "iso_language_code" - <*> v .: "to_user_id_str" - <*> v .: "source" + <$> v + .: "from_user_id_str" + <*> v + .: "profile_image_url" + <*> v + .: "created_at" + <*> v + .: "from_user" + <*> v + .: "id_str" + <*> v + .: "metadata" + <*> v + .: "to_user_id" + <*> v + .: "text" + <*> v + .: "id" + <*> v + .: "from_user_id" + <*> v + .: "geo" + <*> v + .: "iso_language_code" + <*> v + .: "to_user_id_str" + <*> v + .: "source" parseJSON _ = empty instance ToJSON Result where @@ -123,31 +158,53 @@ instance ToJSON Result where ] toEncoding Result {..} = - pairs $ - "results" .= results - <> "max_id" .= max_id - <> "since_id" .= since_id - <> "refresh_url" .= refresh_url - <> "next_page" .= next_page - <> "results_per_page" .= results_per_page - <> "page" .= page - <> "completed_in" .= completed_in - <> "since_id_str" .= since_id_str - <> "max_id_str" .= max_id_str - <> "query" .= query + pairs + $ "results" + .= results + <> "max_id" + .= max_id + <> "since_id" + .= since_id + <> "refresh_url" + .= refresh_url + <> "next_page" + .= next_page + <> "results_per_page" + .= results_per_page + <> "page" + .= page + <> "completed_in" + .= completed_in + <> "since_id_str" + .= since_id_str + <> "max_id_str" + .= max_id_str + <> "query" + .= query instance FromJSON Result where parseJSON (Object v) = Result - <$> v .: "results" - <*> v .: "max_id" - <*> v .: "since_id" - <*> v .: "refresh_url" - <*> v .: "next_page" - <*> v .: "results_per_page" - <*> v .: "page" - <*> v .: "completed_in" - <*> v .: "since_id_str" - <*> v .: "max_id_str" - <*> v .: "query" + <$> v + .: "results" + <*> v + .: "max_id" + <*> v + .: "since_id" + <*> v + .: "refresh_url" + <*> v + .: "next_page" + <*> v + .: "results_per_page" + <*> v + .: "page" + <*> v + .: "completed_in" + <*> v + .: "since_id_str" + <*> v + .: "max_id_str" + <*> v + .: "query" parseJSON _ = empty diff --git a/bench/Main/BufferBuilder.hs b/bench/Main/BufferBuilder.hs deleted file mode 100644 index a9f7b53..0000000 --- a/bench/Main/BufferBuilder.hs +++ /dev/null @@ -1,57 +0,0 @@ -module Main.BufferBuilder where - -import Data.BufferBuilder.Json -import qualified Main.Model as M -import Prelude - -encodeResult :: M.Result -> ByteString -encodeResult = - encodeJson - -instance ToJson M.Geo where - toJson M.Geo {..} = - toJson $ - "type_"# .=# toJson type_ - <> "coordinates"# .=# toJson coordinates - -instance ToJson (Double, Double) where - toJson (a, b) = - toJson [toJson a, toJson b] - -instance ToJson M.Story where - toJson M.Story {..} = - toJson $ - "from_user_id_str"# .=# toJson from_user_id_str - <> "profile_image_url"# .=# toJson profile_image_url - <> "created_at"# .=# toJson created_at - <> "from_user"# .=# toJson from_user - <> "id_str"# .=# toJson id_str - <> "metadata"# .=# toJson metadata - <> "to_user_id"# .=# toJson to_user_id - <> "text"# .=# toJson text - <> "id"# .=# toJson id - <> "from_user_id"# .=# toJson from_user_id - <> "geo"# .=# toJson geo - <> "iso_language_code"# .=# toJson iso_language_code - <> "to_user_id_str"# .=# toJson to_user_id_str - <> "source"# .=# toJson source - -instance ToJson M.Metadata where - toJson M.Metadata {..} = - toJson $ - "result_type"# .=# toJson result_type - -instance ToJson M.Result where - toJson M.Result {..} = - toJson $ - "results"# .=# toJson results - <> "max_id"# .=# toJson max_id - <> "since_id"# .=# toJson since_id - <> "refresh_url"# .=# toJson refresh_url - <> "next_page"# .=# toJson next_page - <> "results_per_page"# .=# toJson results_per_page - <> "page"# .=# toJson page - <> "completed_in"# .=# toJson completed_in - <> "since_id_str"# .=# toJson since_id_str - <> "max_id_str"# .=# toJson max_id_str - <> "query"# .=# toJson query diff --git a/cabal.project b/cabal.project new file mode 100644 index 0000000..e6fdbad --- /dev/null +++ b/cabal.project @@ -0,0 +1 @@ +packages: . diff --git a/demo/Main.hs b/demo/Main.hs index fc6059f..306c3b7 100644 --- a/demo/Main.hs +++ b/demo/Main.hs @@ -9,6 +9,7 @@ import Prelude -- Outputs the following: -- -- > {"name":"Metallica","genres":[{"name":"Metal"},{"name":"Rock"},{"name":"Blues"}]} +main :: IO () main = Data.ByteString.Char8.putStrLn (J.toByteString (artistJson metallica)) diff --git a/jsonifier.cabal b/jsonifier.cabal index 778e128..402d37f 100644 --- a/jsonifier.cabal +++ b/jsonifier.cabal @@ -88,11 +88,11 @@ library cbits/json_encoding.c build-depends: - , base >=4.11 && <5 - , bytestring >=0.10.10 && <0.12 - , ptr-poker ^>=0.1.2.2 - , scientific ^>=0.3.6.2 - , text >=1 && <3 + , base >=4.11 && <5 + , bytestring >=0.10.10 && <0.13 + , ptr-poker ^>=0.1.2.3 + , scientific ^>=0.3.6.2 + , text >=1 && <3 test-suite demo import: base-settings @@ -111,11 +111,11 @@ test-suite test main-is: Main.hs other-modules: Main.Util.HedgehogGens build-depends: - , aeson >=2 && <3 - , hedgehog >=1.0.3 && <2 + , aeson >=2 && <3 + , hedgehog >=1.0.3 && <2 , jsonifier - , numeric-limits ^>=0.1 - , rerebase >=1.10.0.1 && <2 + , numeric-limits ^>=0.1 + , rerebase >=1.10.0.1 && <2 benchmark bench import: executable-settings @@ -124,14 +124,12 @@ benchmark bench main-is: Main.hs other-modules: Main.Aeson - Main.BufferBuilder Main.Jsonifier Main.Model build-depends: - , aeson >=2 && <3 - , buffer-builder ^>=0.2.4.7 - , gauge ^>=0.2.5 + , aeson >=2 && <3 + , criterion >=1.6.3 && <2 , jsonifier - , rerebase >=1.10.0.1 && <2 - , text-builder ^>=0.6.6.1 + , rerebase >=1.10.0.1 && <2 + , text-builder ^>=0.6.6.1 diff --git a/library/Jsonifier.hs b/library/Jsonifier.hs index 3041967..04c6ef5 100644 --- a/library/Jsonifier.hs +++ b/library/Jsonifier.hs @@ -36,12 +36,10 @@ module Jsonifier where import qualified Data.ByteString as ByteString -import qualified Data.ByteString.Internal as ByteString import qualified Jsonifier.Poke as Poke import Jsonifier.Prelude hiding (bool, null) import qualified Jsonifier.Size as Size import qualified Jsonifier.Write as Write -import PtrPoker.Poke (Poke) import qualified PtrPoker.Poke as Poke import PtrPoker.Write (Write) import qualified PtrPoker.Write as Write @@ -164,7 +162,7 @@ scientificString = -- | -- JSON Array literal from a foldable over element literals. {-# INLINE array #-} -array :: Foldable f => f Json -> Json +array :: (Foldable f) => f Json -> Json array foldable = write size poke where @@ -176,9 +174,9 @@ array foldable = finalize count size = Size.array count size poke = - Poke.Poke $ - Poke.pokePtr Poke.openingSquareBracket - >=> foldr step finalize foldable True + Poke.Poke + $ Poke.pokePtr Poke.openingSquareBracket + >=> foldr step finalize foldable True where step (Json (Write.Write _ poke)) next first = if first @@ -195,7 +193,7 @@ array foldable = -- | -- JSON Object literal from a foldable over pairs of key to value literal. {-# INLINE object #-} -object :: Foldable f => f (Text, Json) -> Json +object :: (Foldable f) => f (Text, Json) -> Json object f = foldr step finalize f True 0 0 mempty where diff --git a/library/Jsonifier/Ffi.hs b/library/Jsonifier/Ffi.hs index 53506e0..224011c 100644 --- a/library/Jsonifier/Ffi.hs +++ b/library/Jsonifier/Ffi.hs @@ -1,5 +1,6 @@ {-# LANGUAGE CPP #-} {-# LANGUAGE UnliftedFFITypes #-} +{-# OPTIONS_GHC -Wno-unused-imports #-} module Jsonifier.Ffi where diff --git a/library/Jsonifier/Prelude.hs b/library/Jsonifier/Prelude.hs index 7d27959..e025f1c 100644 --- a/library/Jsonifier/Prelude.hs +++ b/library/Jsonifier/Prelude.hs @@ -27,7 +27,7 @@ import Data.Either as Exports import Data.Fixed as Exports import Data.Foldable as Exports hiding (toList) import Data.Function as Exports hiding (id, (.)) -import Data.Functor as Exports +import Data.Functor as Exports hiding (unzip) import Data.Functor.Compose as Exports import Data.IORef as Exports import Data.Int as Exports @@ -69,12 +69,10 @@ import System.IO.Unsafe as Exports import System.Mem as Exports import System.Mem.StableName as Exports import System.Timeout as Exports -import Text.ParserCombinators.ReadP as Exports (ReadP, readP_to_S, readS_to_P) -import Text.ParserCombinators.ReadPrec as Exports (ReadPrec, readP_to_Prec, readPrec_to_P, readPrec_to_S, readS_to_Prec) import Text.Printf as Exports (hPrintf, printf) import Text.Read as Exports (Read (..), readEither, readMaybe) import Unsafe.Coerce as Exports import Prelude as Exports hiding (all, and, any, concat, concatMap, elem, fail, foldl, foldl1, foldr, foldr1, id, mapM, mapM_, maximum, minimum, notElem, or, product, sequence, sequence_, sum, (.)) -showAsText :: Show a => a -> Text +showAsText :: (Show a) => a -> Text showAsText = show >>> fromString diff --git a/test/Main.hs b/test/Main.hs index cc11d4e..f800ca2 100644 --- a/test/Main.hs +++ b/test/Main.hs @@ -13,12 +13,15 @@ import qualified Jsonifier as J import qualified Main.Util.HedgehogGens as GenExtras import Prelude hiding (bool, null) +main :: IO () main = defaultMain $ pure $ checkParallel $ $$(discover) +prop_sample :: Property prop_sample = - withTests 1 $ - property $ do + withTests 1 + $ property + $ do sample <- liftIO $ load "samples/twitter100.json" A.eitherDecodeStrict' (J.toByteString (aesonJson sample)) === Right sample where @@ -42,9 +45,11 @@ prop_sample = A.Object a -> J.object (AesonKeyMap.foldMapWithKey (\k -> (: []) . (,) (AesonKey.toText k) . aesonJson) a) +prop_aesonRoundtrip :: Property prop_aesonRoundtrip = - withTests 9999 $ - property $ do + withTests 9999 + $ property + $ do sample <- forAll sampleGen let encoding = sampleJsonifier sample annotate (Char8ByteString.unpack encoding) @@ -148,8 +153,8 @@ sampleAeson = ObjectSample a -> A.Object (AesonKeyMap.fromList (fmap (bimap AesonKey.fromText sample) a)) where realNumber a = - A.Number $ - if isNaN a || isInfinite a then 0 else (read . show) a + A.Number + $ if isNaN a || isInfinite a then 0 else (read . show) a -- | -- We have to come down to this trickery due to small differences in diff --git a/test/Main/Util/HedgehogGens.hs b/test/Main/Util/HedgehogGens.hs index 7ac97c6..f26a34d 100644 --- a/test/Main/Util/HedgehogGens.hs +++ b/test/Main/Util/HedgehogGens.hs @@ -11,14 +11,17 @@ scientific = Gen.realFrac_ (Range.linearFrac (-99999999999999) 99999999999999) <&> fromRational +realFloat :: (MonadGen m, RealFloat a) => m a realFloat = Gen.frequency [ (99, realRealFloat), (1, nonRealRealFloat) ] +realRealFloat :: (MonadGen m, RealFloat a) => m a realRealFloat = Gen.realFloat (Range.exponentialFloat NumericLimits.minValue NumericLimits.maxValue) +nonRealRealFloat :: (MonadGen m, Fractional a) => m a nonRealRealFloat = Gen.element [0 / 0, 1 / 0, (-1) / 0, -0]