Skip to content

Commit

Permalink
Modernize
Browse files Browse the repository at this point in the history
  • Loading branch information
nikita-volkov committed Dec 9, 2023
1 parent 93eafd1 commit 6a43338
Show file tree
Hide file tree
Showing 16 changed files with 259 additions and 237 deletions.
30 changes: 30 additions & 0 deletions .github/workflows/check.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
name: Compile, test and check the docs

on:
workflow_call:

jobs:

check:

strategy:
fail-fast: false
matrix:
include:
- ghc: 8.8.1
ghc-options: ""
ignore-haddock: true
ignore-cabal-check: true
- ghc: latest
ignore-cabal-check: true

runs-on: ubuntu-latest

steps:

- uses: nikita-volkov/build-and-test-cabal-package.github-action@v1
with:
ghc: ${{matrix.ghc}}
ghc-options: ${{matrix.ghc-options}}
ignore-haddock: ${{matrix.ignore-haddock}}
ignore-cabal-check: ${{matrix.ignore-cabal-check}}
57 changes: 0 additions & 57 deletions .github/workflows/integrate.yaml

This file was deleted.

17 changes: 17 additions & 0 deletions .github/workflows/on-push-to-master-or-pr.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
name: Compile, test and check the docs

on:
push:
branches:
- master
pull_request:

jobs:

format:
uses: nikita-volkov/haskell-hackage-lib-github-actions-workflows/.github/workflows/format.yaml@v2
secrets: inherit

check:
uses: ./.github/workflows/check.yaml
secrets: inherit
32 changes: 32 additions & 0 deletions .github/workflows/on-push-to-release.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
name: Release the lib to Hackage

on:
push:
branches:
- supermajor
- major
- minor
- patch

concurrency:
group: release
cancel-in-progress: false

jobs:

format:
uses: nikita-volkov/haskell-hackage-lib-github-actions-workflows/.github/workflows/format.yaml@v2
secrets: inherit

check:
uses: ./.github/workflows/check.yaml
secrets: inherit

release:
needs:
- format
- check
uses: nikita-volkov/haskell-hackage-lib-github-actions-workflows/.github/workflows/release.yaml@v2
secrets: inherit
with:
prefix-tag-with-v: false
9 changes: 1 addition & 8 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,49 +14,42 @@ becomes less drastic.
## Benchmarks

Following are the benchmark results comparing the performance
of encoding typical documents using this library, "aeson" and "buffer-builder".
of encoding typical documents using this library and "aeson".
Every approach is measured on Twitter API data of sizes ranging from roughly 1kB to 60MB.
"aeson" stands for "aeson" producing a strict bytestring,
"lazy-aeson" - lazy bytestring,
"lazy-aeson-untrimmed-32k" - lazy bytestring using an untrimmed builder strategy with allocation of 32k.
"buffer-builder" is another library providing an alternative JSON encoder.

```
1kB/jsonifier mean 2.054 μs ( +- 30.83 ns )
1kB/aeson mean 6.456 μs ( +- 126.7 ns )
1kB/lazy-aeson mean 6.338 μs ( +- 169.1 ns )
1kB/lazy-aeson-untrimmed-32k mean 6.905 μs ( +- 280.2 ns )
1kB/buffer-builder mean 5.550 μs ( +- 113.2 ns )
6kB/jsonifier mean 12.80 μs ( +- 196.9 ns )
6kB/aeson mean 31.28 μs ( +- 733.2 ns )
6kB/lazy-aeson mean 30.30 μs ( +- 229.5 ns )
6kB/lazy-aeson-untrimmed-32k mean 29.17 μs ( +- 371.3 ns )
6kB/buffer-builder mean 30.39 μs ( +- 387.2 ns )
60kB/jsonifier mean 122.9 μs ( +- 1.492 μs )
60kB/aeson mean 258.4 μs ( +- 1.000 μs )
60kB/lazy-aeson mean 259.4 μs ( +- 4.494 μs )
60kB/lazy-aeson-untrimmed-32k mean 255.7 μs ( +- 3.239 μs )
60kB/buffer-builder mean 309.0 μs ( +- 3.907 μs )
600kB/jsonifier mean 1.299 ms ( +- 16.44 μs )
600kB/aeson mean 3.389 ms ( +- 106.8 μs )
600kB/lazy-aeson mean 2.520 ms ( +- 45.51 μs )
600kB/lazy-aeson-untrimmed-32k mean 2.509 ms ( +- 30.76 μs )
600kB/buffer-builder mean 3.012 ms ( +- 85.22 μs )
6MB/jsonifier mean 20.91 ms ( +- 821.7 μs )
6MB/aeson mean 30.74 ms ( +- 509.4 μs )
6MB/lazy-aeson mean 24.83 ms ( +- 184.3 μs )
6MB/lazy-aeson-untrimmed-32k mean 24.93 ms ( +- 383.2 μs )
6MB/buffer-builder mean 32.98 ms ( +- 700.1 μs )
60MB/jsonifier mean 194.8 ms ( +- 13.93 ms )
60MB/aeson mean 276.0 ms ( +- 5.194 ms )
60MB/lazy-aeson mean 246.9 ms ( +- 3.122 ms )
60MB/lazy-aeson-untrimmed-32k mean 245.1 ms ( +- 1.050 ms )
60MB/buffer-builder mean 312.0 ms ( +- 4.896 ms )
```

The benchmark suite is bundled with the package.
Expand Down
50 changes: 26 additions & 24 deletions bench/Main.hs
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
module Main where

import Criterion.Main
import qualified Data.Aeson
import qualified Data.ByteString.Char8 as Char8ByteString
import qualified Data.ByteString.Lazy
import Gauge.Main
import qualified Jsonifier
import qualified Main.Aeson
import qualified Main.BufferBuilder as BufferBuilder
import qualified Main.Jsonifier
import qualified Main.Model as Model
import qualified Text.Builder as TextBuilder
import Prelude

main :: IO ()
main =
do
twitter1Data <- load "samples/twitter1.json"
Expand All @@ -23,29 +23,28 @@ main =

-- Ensure that encoders are correct
test "jsonifier" encodeWithJsonifier twitter10Data
test "buffer-builder" BufferBuilder.encodeResult twitter10Data
test "aeson" encodeWithAeson twitter10Data

-- Print out the data sizes of samples
TextBuilder.putLnToStdOut $
let sampleDataSize =
TextBuilder.dataSizeInBytesInDecimal ','
. Char8ByteString.length
. encodeWithJsonifier
sample sampleName sampleData =
"- " <> TextBuilder.text sampleName <> ": " <> sampleDataSize sampleData
in "Input data sizes report:\n"
<> sample "twitter with 1 objects" twitter1Data
<> "\n"
<> sample "twitter with 10 objects" twitter10Data
<> "\n"
<> sample "twitter with 100 objects" twitter100Data
<> "\n"
<> sample "twitter with 1,000 objects" twitter1000Data
<> "\n"
<> sample "twitter with 10,000 objects" twitter10000Data
<> "\n"
<> sample "twitter with 100,000 objects" twitter100000Data
TextBuilder.putLnToStdOut
$ let sampleDataSize =
TextBuilder.dataSizeInBytesInDecimal ','
. Char8ByteString.length
. encodeWithJsonifier
sample sampleName sampleData =
"- " <> TextBuilder.text sampleName <> ": " <> sampleDataSize sampleData
in "Input data sizes report:\n"
<> sample "twitter with 1 objects" twitter1Data
<> "\n"
<> sample "twitter with 10 objects" twitter10Data
<> "\n"
<> sample "twitter with 100 objects" twitter100Data
<> "\n"
<> sample "twitter with 1,000 objects" twitter1000Data
<> "\n"
<> sample "twitter with 10,000 objects" twitter10000Data
<> "\n"
<> sample "twitter with 100,000 objects" twitter100000Data

let benchInput :: String -> Model.Result -> Benchmark
benchInput name input =
Expand All @@ -54,8 +53,7 @@ main =
[ bench "jsonifier" (nf encodeWithJsonifier input),
bench "aeson" (nf encodeWithAeson input),
bench "lazy-aeson" (nf encodeWithLazyAeson input),
bench "lazy-aeson-untrimmed-32k" (nf Main.Aeson.resultToLazyByteStringWithUntrimmedStrategy input),
bench "buffer-builder" (nf BufferBuilder.encodeResult input)
bench "lazy-aeson-untrimmed-32k" (nf Main.Aeson.resultToLazyByteStringWithUntrimmedStrategy input)
]
in defaultMain
[ benchInput "1kB" twitter1Data,
Expand All @@ -75,6 +73,7 @@ mapResultsOfResult :: ([Model.Story] -> [Model.Story]) -> Model.Result -> Model.
mapResultsOfResult f a =
a {Model.results = f (Model.results a)}

test :: (Data.Aeson.FromJSON a, Eq a, MonadFail m) => String -> (a -> ByteString) -> a -> m ()
test name strictEncoder input =
let encoding = strictEncoder input
in case Data.Aeson.eitherDecodeStrict' encoding of
Expand All @@ -85,11 +84,14 @@ test name strictEncoder input =
Left err ->
fail ("Encoder " <> name <> " failed: " <> err <> ".\nOutput:\n" <> Char8ByteString.unpack encoding)

encodeWithJsonifier :: Model.Result -> ByteString
encodeWithJsonifier =
Jsonifier.toByteString . Main.Jsonifier.resultJson

encodeWithAeson :: (Data.Aeson.ToJSON a) => a -> ByteString
encodeWithAeson =
Data.ByteString.Lazy.toStrict . Data.Aeson.encode

encodeWithLazyAeson :: (Data.Aeson.ToJSON a) => a -> Data.ByteString.Lazy.ByteString
encodeWithLazyAeson =
Data.Aeson.encode
Loading

0 comments on commit 6a43338

Please sign in to comment.