Skip to content

Commit 5fbae30

Browse files
authored
Code clean up by using Lexer.error (zio#1223)
1 parent b3759cd commit 5fbae30

File tree

8 files changed

+126
-211
lines changed

8 files changed

+126
-211
lines changed

zio-json-macros/shared/src/main/scala/zio/json/jsonDerive.scala

+7-4
Original file line numberDiff line numberDiff line change
@@ -128,10 +128,13 @@ private[json] final class DeriveCodecMacros(val c: blackbox.Context) {
128128
} else {
129129
val tparamNames = tparams.map(_.name)
130130
def mkImplicitParams(prefix: String, typeSymbol: TypeSymbol) =
131-
tparamNames.zipWithIndex.map { case (tparamName, i) =>
132-
val paramName = TermName(s"$prefix$i")
133-
val paramType = tq"$typeSymbol[$tparamName]"
134-
q"$paramName: $paramType"
131+
tparamNames.map {
132+
var i = 0
133+
tparamName =>
134+
val paramName = TermName(s"$prefix$i")
135+
i += 1
136+
val paramType = tq"$typeSymbol[$tparamName]"
137+
q"$paramName: $paramType"
135138
}
136139
val decodeParams = mkImplicitParams("decode", DecoderClass)
137140
val encodeParams = mkImplicitParams("encode", EncoderClass)

zio-json/jvm/src/test/scala/zio/json/data/geojson/GeoJSON.scala

+21-64
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,7 @@ package handrolled {
127127
implicit lazy val zioJsonJsonDecoder: JsonDecoder[Geometry] =
128128
new JsonDecoder[Geometry] {
129129
import zio.json._
130-
import JsonDecoder.{ JsonError, UnsafeJson }
130+
import JsonDecoder.JsonError
131131
import internal._
132132

133133
import scala.annotation._
@@ -157,37 +157,31 @@ package handrolled {
157157
case Json.Arr(chunk)
158158
if chunk.length == 2 && chunk(0).isInstanceOf[Json.Num] && chunk(1).isInstanceOf[Json.Num] =>
159159
(chunk(0).asInstanceOf[Json.Num].value.doubleValue(), chunk(1).asInstanceOf[Json.Num].value.doubleValue())
160-
case _ =>
161-
throw UnsafeJson(
162-
JsonError.Message("expected coordinates") :: trace
163-
)
160+
case _ => Lexer.error("expected coordinates", trace)
164161
}
165162
def coordinates1(
166163
trace: List[JsonError],
167164
js: Json.Arr
168165
): List[(Double, Double)] =
169166
js.elements.map {
170167
case js1: Json.Arr => coordinates0(trace, js1)
171-
case _ =>
172-
throw UnsafeJson(JsonError.Message("expected list") :: trace)
168+
case _ => Lexer.error("expected list", trace)
173169
}.toList
174170
def coordinates2(
175171
trace: List[JsonError],
176172
js: Json.Arr
177173
): List[List[(Double, Double)]] =
178174
js.elements.map {
179175
case js1: Json.Arr => coordinates1(trace, js1)
180-
case _ =>
181-
throw UnsafeJson(JsonError.Message("expected list") :: trace)
176+
case _ => Lexer.error("expected list", trace)
182177
}.toList
183178
def coordinates3(
184179
trace: List[JsonError],
185180
js: Json.Arr
186181
): List[List[List[(Double, Double)]]] =
187182
js.elements.map {
188183
case js1: Json.Arr => coordinates2(trace, js1)
189-
case _ =>
190-
throw UnsafeJson(JsonError.Message("expected list") :: trace)
184+
case _ => Lexer.error("expected list", trace)
191185
}.toList
192186

193187
def unsafeDecode(
@@ -208,40 +202,25 @@ package handrolled {
208202
val trace_ = spans(field) :: trace
209203
(field: @switch) match {
210204
case 0 =>
211-
if (subtype != -1)
212-
throw UnsafeJson(JsonError.Message("duplicate") :: trace_)
205+
if (subtype != -1) Lexer.error("duplicate", trace_)
213206
subtype = Lexer.enumeration(trace_, in, subtypes)
214207
case 1 =>
215-
if (coordinates != null)
216-
throw UnsafeJson(JsonError.Message("duplicate") :: trace_)
208+
if (coordinates != null) Lexer.error("duplicate", trace_)
217209
coordinates = coordinatesD.unsafeDecode(trace_, in)
218210
case 2 =>
219-
if (geometries != null)
220-
throw UnsafeJson(JsonError.Message("duplicate") :: trace_)
221-
211+
if (geometries != null) Lexer.error("duplicate", trace_)
222212
geometries = geometriesD.unsafeDecode(trace_, in)
223213
}
224214
}
225215
Lexer.nextField(trace, in)
226216
}) ()
227217

228-
if (subtype == -1)
229-
throw UnsafeJson(
230-
JsonError.Message("missing discriminator") :: trace
231-
)
232-
218+
if (subtype == -1) Lexer.error("missing discriminator", trace)
233219
if (subtype == 6) {
234-
if (geometries == null)
235-
throw UnsafeJson(
236-
JsonError.Message("missing 'geometries' field") :: trace
237-
)
220+
if (geometries == null) Lexer.error("missing 'geometries' field", trace)
238221
else GeometryCollection(geometries)
239222
}
240-
241-
if (coordinates == null)
242-
throw UnsafeJson(
243-
JsonError.Message("missing 'coordinates' field") :: trace
244-
)
223+
if (coordinates == null) Lexer.error("missing 'coordinates' field", trace)
245224
val trace_ = spans(1) :: trace
246225
(subtype: @switch) match {
247226
case 0 => Point(coordinates0(trace_, coordinates))
@@ -298,7 +277,7 @@ package handrolled {
298277
implicit lazy val zioJsonJsonDecoder: JsonDecoder[GeoJSON] =
299278
new JsonDecoder[GeoJSON] {
300279
import zio.json._
301-
import JsonDecoder.{ JsonError, UnsafeJson }
280+
import JsonDecoder.JsonError
302281
import internal._
303282

304283
import scala.annotation._
@@ -332,52 +311,30 @@ package handrolled {
332311
val trace_ = spans(field) :: trace
333312
(field: @switch) match {
334313
case 0 =>
335-
if (subtype != -1)
336-
throw UnsafeJson(JsonError.Message("duplicate") :: trace_)
337-
314+
if (subtype != -1) Lexer.error("duplicate", trace_)
338315
subtype = Lexer.enumeration(trace_, in, subtypes)
339316
case 1 =>
340-
if (properties != null)
341-
throw UnsafeJson(JsonError.Message("duplicate") :: trace_)
342-
317+
if (properties != null) Lexer.error("duplicate", trace_)
343318
properties = propertyD.unsafeDecode(trace_, in)
344319
case 2 =>
345-
if (geometry != null)
346-
throw UnsafeJson(JsonError.Message("duplicate") :: trace_)
347-
320+
if (geometry != null) Lexer.error("duplicate", trace_)
348321
geometry = geometryD.unsafeDecode(trace_, in)
349322
case 3 =>
350-
if (features != null)
351-
throw UnsafeJson(JsonError.Message("duplicate") :: trace_)
352-
323+
if (features != null) Lexer.error("duplicate", trace_)
353324
features = featuresD.unsafeDecode(trace_, in)
354325
}
355326
}
356327
Lexer.nextField(trace, in)
357328
}) ()
358329

359-
if (subtype == -1)
360-
// we could infer the type but that would mean accepting invalid data
361-
throw UnsafeJson(
362-
JsonError.Message("missing required fields") :: trace
363-
)
364-
330+
// we could infer the type but that would mean accepting invalid data
331+
if (subtype == -1) Lexer.error("missing required fields", trace)
365332
if (subtype == 0) {
366-
if (properties == null)
367-
throw UnsafeJson(
368-
JsonError.Message("missing 'properties' field") :: trace
369-
)
370-
if (geometry == null)
371-
throw UnsafeJson(
372-
JsonError.Message("missing 'geometry' field") :: trace
373-
)
333+
if (properties == null) Lexer.error("missing 'properties' field", trace)
334+
if (geometry == null) Lexer.error("missing 'geometry' field", trace)
374335
Feature(properties, geometry)
375336
} else {
376-
377-
if (features == null)
378-
throw UnsafeJson(
379-
JsonError.Message("missing 'features' field") :: trace
380-
)
337+
if (features == null) Lexer.error("missing 'features' field", trace)
381338
FeatureCollection(features)
382339
}
383340
}

zio-json/shared/src/main/scala-2.x/zio/json/macros.scala

+30-44
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ package zio.json
22

33
import magnolia1._
44
import zio.Chunk
5-
import zio.json.JsonDecoder.{ JsonError, UnsafeJson }
5+
import zio.json.JsonDecoder.JsonError
66
import zio.json.ast.Json
77
import zio.json.internal.{ Lexer, RetractReader, StringMatrix, Write }
88

@@ -228,21 +228,24 @@ object DeriveJsonDecoder {
228228
json match {
229229
case Json.Obj(_) => ctx.rawConstruct(Nil)
230230
case Json.Null => ctx.rawConstruct(Nil)
231-
case _ => throw UnsafeJson(JsonError.Message("Not an object") :: trace)
231+
case _ => Lexer.error("Not an object", trace)
232232
}
233233
}
234234
else
235235
new JsonDecoder[A] {
236236
val (names, aliases): (Array[String], Array[(String, Int)]) = {
237-
val names = Array.ofDim[String](ctx.parameters.size)
237+
val names = new Array[String](ctx.parameters.size)
238238
val aliasesBuilder = Array.newBuilder[(String, Int)]
239-
ctx.parameters.zipWithIndex.foreach { case (p, i) =>
240-
names(i) = p.annotations.collectFirst { case jsonField(name) => name }
241-
.getOrElse(if (transformNames) nameTransform(p.label) else p.label)
242-
aliasesBuilder ++= p.annotations.flatMap {
243-
case jsonAliases(alias, aliases @ _*) => (alias +: aliases).map(_ -> i)
244-
case _ => Seq.empty
245-
}
239+
ctx.parameters.foreach {
240+
var i = 0
241+
p =>
242+
names(i) = p.annotations.collectFirst { case jsonField(name) => name }
243+
.getOrElse(if (transformNames) nameTransform(p.label) else p.label)
244+
aliasesBuilder ++= p.annotations.flatMap {
245+
case jsonAliases(alias, aliases @ _*) => (alias +: aliases).map(_ -> i)
246+
case _ => Seq.empty
247+
}
248+
i += 1
246249
}
247250
val aliases = aliasesBuilder.result()
248251

@@ -270,9 +273,6 @@ object DeriveJsonDecoder {
270273
lazy val namesMap: Map[String, Int] =
271274
(names.zipWithIndex ++ aliases).toMap
272275

273-
private[this] def error(message: String, trace: List[JsonError]): Nothing =
274-
throw UnsafeJson(JsonError.Message(message) :: trace)
275-
276276
def unsafeDecode(trace: List[JsonError], in: RetractReader): A = {
277277
Lexer.char(trace, in, '{')
278278

@@ -288,7 +288,7 @@ object DeriveJsonDecoder {
288288
do {
289289
val field = Lexer.field(trace, in, matrix)
290290
if (field != -1) {
291-
if (ps(field) != null) error("duplicate", trace)
291+
if (ps(field) != null) Lexer.error("duplicate", trace)
292292
val default = defaults(field)
293293
ps(field) =
294294
if (
@@ -298,8 +298,8 @@ object DeriveJsonDecoder {
298298
}
299299
) tcs(field).unsafeDecode(spans(field) :: trace, in)
300300
else if (in.readChar() == 'u' && in.readChar() == 'l' && in.readChar() == 'l') default.get
301-
else error("expected 'null'", spans(field) :: trace)
302-
} else if (no_extra) error("invalid extra field", trace)
301+
else Lexer.error("expected 'null'", spans(field) :: trace)
302+
} else if (no_extra) Lexer.error("invalid extra field", trace)
303303
else Lexer.skipValue(trace, in)
304304
} while (Lexer.nextField(trace, in))
305305
var i = 0
@@ -322,13 +322,13 @@ object DeriveJsonDecoder {
322322
for ((key, value) <- fields) {
323323
namesMap.get(key) match {
324324
case Some(field) =>
325-
if (ps(field) != null) error("duplicate", trace)
325+
if (ps(field) != null) Lexer.error("duplicate", trace)
326326
ps(field) = {
327327
if ((value eq Json.Null) && (defaults(field) ne None)) defaults(field).get
328328
else tcs(field).unsafeFromJsonAST(spans(field) :: trace, value)
329329
}
330330
case _ =>
331-
if (no_extra) error("invalid extra field", trace)
331+
if (no_extra) Lexer.error("invalid extra field", trace)
332332
}
333333
}
334334
var i = 0
@@ -341,7 +341,7 @@ object DeriveJsonDecoder {
341341
i += 1
342342
}
343343
ctx.rawConstruct(new ArraySeq(ps))
344-
case _ => error("Not an object", trace)
344+
case _ => Lexer.error("Not an object", trace)
345345
}
346346
}
347347
}
@@ -374,14 +374,8 @@ object DeriveJsonDecoder {
374374
val a = tcs(field).unsafeDecode(trace_, in).asInstanceOf[A]
375375
Lexer.char(trace, in, '}')
376376
a
377-
} else
378-
throw UnsafeJson(
379-
JsonError.Message("invalid disambiguator") :: trace
380-
)
381-
} else
382-
throw UnsafeJson(
383-
JsonError.Message("expected non-empty object") :: trace
384-
)
377+
} else Lexer.error("invalid disambiguator", trace)
378+
} else Lexer.error("expected non-empty object", trace)
385379
}
386380

387381
override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
@@ -391,10 +385,10 @@ object DeriveJsonDecoder {
391385
namesMap.get(key) match {
392386
case Some(idx) =>
393387
tcs(idx).unsafeFromJsonAST(JsonError.ObjectAccess(key) :: trace, inner).asInstanceOf[A]
394-
case None => throw UnsafeJson(JsonError.Message("Invalid disambiguator") :: trace)
388+
case None => Lexer.error("Invalid disambiguator", trace)
395389
}
396-
case Json.Obj(_) => throw UnsafeJson(JsonError.Message("Not an object with a single field") :: trace)
397-
case _ => throw UnsafeJson(JsonError.Message("Not an object") :: trace)
390+
case Json.Obj(_) => Lexer.error("Not an object with a single field", trace)
391+
case _ => Lexer.error("Not an object", trace)
398392
}
399393
}
400394
else
@@ -410,20 +404,14 @@ object DeriveJsonDecoder {
410404
do {
411405
if (Lexer.field(trace, in_, hintmatrix) != -1) {
412406
val field = Lexer.enumeration(trace, in_, matrix)
413-
if (field == -1)
414-
throw UnsafeJson(
415-
JsonError.Message(s"invalid disambiguator") :: trace
416-
)
407+
if (field == -1) Lexer.error("invalid disambiguator", trace)
417408
in_.rewind()
418409
val trace_ = spans(field) :: trace
419410
return tcs(field).unsafeDecode(trace_, in_).asInstanceOf[A]
420411
} else
421412
Lexer.skipValue(trace, in_)
422413
} while (Lexer.nextField(trace, in_))
423-
424-
throw UnsafeJson(
425-
JsonError.Message(s"missing hint '$hintfield'") :: trace
426-
)
414+
Lexer.error(s"missing hint '$hintfield'", trace)
427415
}
428416

429417
override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A =
@@ -433,14 +421,12 @@ object DeriveJsonDecoder {
433421
case Some((_, Json.Str(name))) =>
434422
namesMap.get(name) match {
435423
case Some(idx) => tcs(idx).unsafeFromJsonAST(trace, json).asInstanceOf[A]
436-
case None => throw UnsafeJson(JsonError.Message("Invalid disambiguator") :: trace)
424+
case _ => Lexer.error("Invalid disambiguator", trace)
437425
}
438-
case Some(_) =>
439-
throw UnsafeJson(JsonError.Message(s"Non-string hint '$hintfield'") :: trace)
440-
case None =>
441-
throw UnsafeJson(JsonError.Message(s"Missing hint '$hintfield'") :: trace)
426+
case Some(_) => Lexer.error(s"Non-string hint '$hintfield'", trace)
427+
case _ => Lexer.error(s"Missing hint '$hintfield'", trace)
442428
}
443-
case _ => throw UnsafeJson(JsonError.Message("Not an object") :: trace)
429+
case _ => Lexer.error("Not an object", trace)
444430
}
445431
}
446432
}

0 commit comments

Comments
 (0)